code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
module SoOSiM.Components.PeriodicIO where
import Control.Concurrent.STM
import Control.Concurrent.STM.TQueue
import Control.Monad
import Data.List
import Data.Maybe
import SoOSiM
import SoOSiM.Components.Common
import SoOSiM.Components.Scheduler
newtype PeriodicIO = PeriodicIO String
newtype PeriodicIOS = PeriodicIOS ( Maybe (TVar [(TQueue (Int,Int),Int,Int,Int)])
, [(TQueue (Int,Int),Int,String,Int,Int)]
, ComponentId
)
data PIO_Cmd = PIO_Stop
deriving (Typeable,Show)
instance ComponentInterface PeriodicIO where
type State PeriodicIO = PeriodicIOS
type Receive PeriodicIO = PIO_Cmd
type Send PeriodicIO = ()
initState = const (PeriodicIOS (Nothing,[],(-1)))
componentName (PeriodicIO an) = ("<<" ++ an ++ ">>Periodic IO")
componentBehaviour = const periodicIO
periodicIO ::
PeriodicIOS
-> Input PIO_Cmd
-> Sim PeriodicIOS
periodicIO _ (Message _ PIO_Stop _) = stop
periodicIO s@(PeriodicIOS (Just qsS,ds,sId)) _ = do
currentTime <- getTime
qs <- runSTM $ readTVar qsS
qs' <- fmap catMaybes $ forM qs $ \(q,c,p,n) -> do
case n of
0 -> return Nothing
n | c == (p-1) -> do runSTM $ writeTQueue q (currentTime,currentTime)
newIOToken sId
return $ Just (q,0,p,n-1)
| otherwise -> return $ Just (q,c+1,p,n)
runSTM $ writeTVar qsS qs'
ds' <- forM ds $ \(q,n,fN,tid,latest) -> do { ds <- fmap sort $ runSTM $ peek q
; forM_ ds $ (\(d,l) -> when ((currentTime - d) > n && l > latest) (deadLineMissed d currentTime n fN tid))
; let latest' = case ds of { [] -> latest; _ -> maximum $ map snd ds }
; return (q,n,fN,tid,latest')
}
return (PeriodicIOS (Just qsS,ds',sId))
periodicIO s@(PeriodicIOS (Nothing,_,_)) _ = yield s
stopPIO :: ComponentId -> String -> Sim ()
stopPIO cId n = notify (PeriodicIO n) cId PIO_Stop
deadLineMissed st et n fN tid =
traceMsgTag (appThread ++ " missed deadline of " ++ show n ++ " by " ++ show (et - st - n) ++ " cycles")
("DeadlineMissed " ++ appThread ++ " " ++ show missed)
where
missed = et - st - n
appThread = fN ++ ".T" ++ show tid
peek :: TQueue a -> STM [a]
peek q = do
k <- fmap reverse $ peek' []
forM k (unGetTQueue q)
return k
where
peek' l = do v <- tryReadTQueue q
case v of
Nothing -> return l
Just v' -> peek' (v':l)
|
christiaanb/SoOSiM-components
|
src/SoOSiM/Components/PeriodicIO.hs
|
mit
| 2,872 | 56 | 16 | 1,010 | 913 | 510 | 403 | 62 | 3 |
module Y2017.M01.D26.Solution where
import Data.Function (on)
import Data.List (sortBy)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid (getSum)
import Data.Ord
-- below imports available from 1HaskellADay git repository
import Data.Bag (Bag, add, emptyBag)
import Y2017.M01.D25.Solution
{--
So, yesterday we were able to find out what Haskell source files were in a
directory, then, as a bonus, we were also able to drill down into subdirectories.
Great!
Now, today, let's do a frequency analysis of the words of a Haskell file.
--}
wordCounts :: FilePath -> IO (Map String Int)
wordCounts = fmap (toInt . wc' emptyBag) . readFile
toInt :: Bag a -> Map a Int
toInt = Map.map getSum
wc' :: Bag String -> String -> Bag String
wc' bag = foldr add bag . words
-- wordCounts counts the words of a (Haskell) source file returning a
-- word -> occurences map.
-- hint: Data.Bag counts occurences of elements in a collection
-- Point wordCounts at this file. What are the top 5 words in this file?
{--
*Y2017.M01.D26.Solution> wordCounts "Y2017/M01/D26/Exercise.hs" ~> counts
*Y2017.M01.D26.Solution> take 5 (sortBy (compare `on` Down . snd) (Map.toList counts)) ~>
[("a",9),("the",9),("of",8),("--",5),("Haskell",5)]
The words 'a' 'the' tie for first, 'of' is right after that. Now it also
counts 'non-words' and we may wish to exclude these.
--}
{-- BONUS -----------------------------------------------------------------
Now, one file doesn't give a good cross section of frequently used words
in the Haskell corpus, so, find a Haskell Corpus, such as the sources of
the GHC libraries, or the 1HaskellADay problem sets and libraries, or your
own sets of Haskell files.
Run wordCounts over those filesets. What are the top 5 words of the combined
files?
--}
wordsCounts :: [FilePath] -> IO (Map String Int)
wordsCounts = fmap (toInt . foldr (flip wc') emptyBag) . mapM readFile
{--
*Y2017.M01.D26.Solution> haskellFilesR "." ~> files ~> length ~> 409
*Y2017.M01.D26.Solution> wordsCounts files ~> bigcounts ~> length ~> 48281
*Y2017.M01.D26.Solution> take 5 (sortBy (compare `on` Down . snd) (Map.toList bigcounts))
[(",",26859),("|",14083),("County",12062),("the",4283),("=",4205)]
AHA! My word-counts results differ from 'them.' Just goes to show that I'm
not addicted to the 'use a to represent any type'-haskell-programming-style.
So there! ;)
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M01/D26/Solution.hs
|
mit
| 2,395 | 0 | 11 | 378 | 254 | 142 | 112 | 17 | 1 |
{-# htermination nubBy :: (a -> a -> Bool) -> [a] -> [a] #-}
import List
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/List_nubBy_1.hs
|
mit
| 73 | 0 | 3 | 16 | 5 | 3 | 2 | 1 | 0 |
module Test.Scher.Generic
( M
, int
, integer
, range
, run
, assert
, assume
)
where
import Test.Scher.Klee.Common
#ifdef KLEE_IMPURE
import qualified Test.Scher.Klee.Impure as K
#endif
#ifdef KLEE_PURE
import qualified Test.Scher.Klee.Pure as K
#endif
#ifndef KLEE_PURE
#ifndef KLEE_IMPURE
import qualified Test.Scher.Klee.Void as K
#endif
#endif
type M a = K.M a
run :: M a -> IO a
run = K.run
int :: String -> M Int
int = K.int
integer :: String -> M Integer
integer = K.integer
range :: Int -> Int -> String -> M Int
range i j name = K.range i j name
|
m-alvarez/scher
|
Test/Scher/Generic.hs
|
mit
| 583 | 0 | 8 | 127 | 188 | 115 | 73 | 19 | 1 |
{-# LANGUAGE
TypeOperators
, FlexibleContexts #-}
module Render.Simple where
import Calculus.Connectives.Simple
import Calculus.Connectives.Linear
import Calculus.Expr
import Auxiliary.List
import Control.Monad.State
import Data.List
type UniqNames = String
initUniqNames = ['a'..'z']
type NameSupply a = State UniqNames a
freshName :: NameSupply Char
freshName = do xs <- get
put $ tail xs
return $ head xs
{- Simple rendering to String -}
class Render f where
-- not realy need state for list of names.
-- but I may use this type later with better
-- unique names set.
render :: Render g => f (Expr g) -> NameSupply String
instance (Render f, Render g) => Render (f :+: g) where
render (Inl f) = render f
render (Inr f) = render f
pretty :: Render f => Expr f -> String
pretty f = evalState (render $ out f) initUniqNames --render . out
{- Connectives rendering -}
binOpRender f g op =
do lft <- render (out f)
rht <- render (out g)
return $ "(" ++ lft ++ op ++ rht ++ ")"
instance Render SimConj where
render (SimConj f g) = binOpRender f g " & "
instance Render AltConj where
render (AltConj f g) = binOpRender f g " $ "
instance Render Disj where
render (Disj f g) = binOpRender f g " | "
instance Render ResImpl where
render (ResImpl f g) = binOpRender f g " -> "
instance Render ValImpl where
render (ValImpl f g) = binOpRender f g " => "
instance Render Unit where
render Unit = return "1"
instance Render Top where
render Top = return "T"
instance Render Zero where
render Zero = return "0"
instance Render Modal where
render (Modal f) = do inner <- (render $ out f)
return $ "!" ++ inner
{- Base rendering -}
showArgs :: [Term] -> String
showArgs = between "(" ")" . intercalate ", " . map showTerm
showTerm (Func name []) = name
showTerm (Func name args) = name ++ showArgs args
showTerm (Var name) = "Var " ++ name
instance Render Atom where
render (Atom name args) = return $ name ++ showArgs args
-- quantifiers
instance Render Exists where
render (Exists f) =
do uniq <- freshName
let var = Var [uniq]
body <- render (out $ f var)
return $ "Exists " ++ showTerm var ++ " . " ++ "[" ++ body ++ "]"
instance Render ForAll where
render (ForAll f) =
do uniq <- freshName
let var = Var [uniq]
body <- render (out $ f var)
return $ "ForAll " ++ showTerm var ++ " . " ++ "[" ++ body ++ "]"
|
wowofbob/calculus
|
Render/Simple.hs
|
mit
| 2,509 | 0 | 13 | 648 | 906 | 447 | 459 | -1 | -1 |
module Main where
import Test.Framework (defaultMain)
import qualified Dojo.Poker.Tests
main :: IO ()
main = defaultMain [ Dojo.Poker.Tests.tests ]
|
b52/kata-poker
|
tests/TestSuite.hs
|
mit
| 150 | 0 | 7 | 21 | 46 | 28 | 18 | 5 | 1 |
{-# OPTIONS_GHC -Wall #-}
module AsPatterns where
import Data.Char
isSubsequenceOf :: (Eq a) => [a] -> [a] -> Bool
isSubsequenceOf sub s = and $ map ((flip elem) s) sub
capitalizeWords :: String -> [(String, String)]
capitalizeWords s = map (\wd@(x:xs) -> (wd, (toUpper x):xs)) $ words s
capitalizeWord :: String -> String
capitalizeWord (x:xs) = (toUpper x):xs
splitSentences :: String -> [String]
splitSentences s =
let
positions = foldr (\(c,n) acc -> if c == '.' then n:acc else acc) [] $ zip s [0..]
(orig', splits') = foldr (\n (orig, splits) -> let (first, second) = splitAt n orig in (first, second:splits)) (s, []) positions
in
(orig':splits')
capitalizeSentence :: String -> String
capitalizeSentence [] = []
capitalizeSentence xs@(".") = xs
capitalizeSentence ('.':' ':x:xs) = ". " ++ (toUpper x):xs
capitalizeSentence ('.':x:xs) = '.':(toUpper x):xs
capitalizeSentence (x:xs) = (toUpper x):xs
capitalizeParagraph :: String -> String
capitalizeParagraph s = concat $ map capitalizeSentence $ splitSentences s
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter11.hsproj/AsPatterns.hs
|
mit
| 1,054 | 0 | 16 | 190 | 499 | 268 | 231 | 23 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
------------------------------------------------------------------------------
-- | This module defines our application's state type and an alias for its
-- handler monad.
module Application where
------------------------------------------------------------------------------
import Control.Lens
import Control.Monad.State.Class
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Snaplet.PostgresqlSimple
------------------------------------------------------------------------------
data App = App
{ _heist :: Snaplet (Heist App)
, _db :: Snaplet Postgres
}
makeLenses ''App
instance HasHeist App where
heistLens = subSnaplet heist
instance HasPostgres (Handler b App) where
getPostgresState = with db get
------------------------------------------------------------------------------
type AppHandler = Handler App App
|
sestrella/react-haskell
|
src/Application.hs
|
mit
| 917 | 0 | 11 | 107 | 135 | 78 | 57 | 17 | 0 |
module Language.Lips.LanguageDef where
--------------------
-- Global Imports --
import Control.Applicative
import Data.Monoid
----------
-- Code --
-- Error types
data ErrorType = VariableNotDefinedError
| InvalidFunctionApplicationError
| InvalidArrayIndexError
| InvalidTypeError
deriving (Eq, Show, Read)
-- Pretilly displaying an ErrorType
displayErrorType :: ErrorType -> String
displayErrorType t = "Error thrown: " ++ show t
-- The actual error monad
data Error a = Success a | Error ErrorType
-- Showing an error
instance (Show a) => Show (Error a) where
show (Success a) = "Success " ++ show a
show (Error t) = "Error " ++ show t
-- A functor instance
instance Functor Error where
fmap fn (Success a) = Success $ fn a
fmap fn (Error t) = Error t
-- A monad instance
instance Monad Error where
return a = Success a
err >>= fn =
case err of
Success a -> fn a
Error et -> Error et
-- An applicative instance
instance Applicative Error where
pure a = return a
efn <*> err = do
fn <- efn
val <- err
return $ fn val
-- Checking if an error kinds
isSuccess :: Error a -> Bool
isSuccess (Success _) = True
isSuccess (Error _) = False
isError :: Error a -> Bool
isError (Success _) = False
isError (Error _) = True
-- LipsVal definition
data LipsVal = LAtom String
| LList [LipsVal]
| LDottedList [LipsVal] LipsVal
| LNumber Double
| LString String
| LChar Char
| LBool Bool
| LFunction [String] LipsVal
-- Show instance for LipsVal
instance Show LipsVal where
show (LAtom name ) = name
show (LList list ) = mconcat ["(", unwords $ map (show) list, ")"]
show (LDottedList list v) = mconcat ["(", show $ LList list, " . ", show v, ")"]
show (LNumber number) = show number
show (LString string) = show string
show (LChar char ) = show char
show (LBool True ) = "#t"
show (LBool False ) = "#f"
show (LFunction args v) = mconcat ["(lambda ", show $ LList $ map LAtom args, "\n ", show v, ")"]
-- A null definition
lNull :: LipsVal
lNull = LList []
|
crockeo/lips
|
src/lib/Language/Lips/LanguageDef.hs
|
mit
| 2,251 | 0 | 10 | 660 | 714 | 371 | 343 | 55 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module IHaskell.Display.Charts () where
import ClassyPrelude
import System.Directory
import Data.Default.Class
import Graphics.Rendering.Chart.Renderable
import Graphics.Rendering.Chart.Backend.Cairo
import qualified Data.ByteString.Char8 as Char
import System.IO.Unsafe
import IHaskell.Display
width :: Width
width = 450
height :: Height
height = 300
instance IHaskellDisplay (Renderable a) where
display renderable = do
pngDisp <- chartData renderable PNG
-- We can add `svg svgDisplay` to the output of `display`,
-- but SVGs are not resizable in the IPython notebook.
svgDisp <- chartData renderable SVG
return $ Display [pngDisp, svgDisp]
chartData :: Renderable a -> FileFormat -> IO DisplayData
chartData renderable format = do
switchToTmpDir
-- Write the PNG image.
let filename = ".ihaskell-chart.png"
opts = def{_fo_format = format, _fo_size = (width, height)}
renderableToFile opts renderable filename
-- Convert to base64.
imgData <- readFile $ fpFromString filename
return $ case format of
PNG -> png width height $ base64 imgData
SVG -> svg $ Char.unpack imgData
|
aostiles/LiveHaskell
|
ihaskell-display/ihaskell-charts/IHaskell/Display/Charts.hs
|
mit
| 1,173 | 0 | 13 | 211 | 280 | 152 | 128 | 29 | 2 |
module Backend.Prelude
( module X
, (<$$>)
) where
import RIO as X hiding (Handler, timeout)
import Control.Error.Util as X (hush, note)
import Control.Monad as X (replicateM)
import Control.Monad.Logger as X
( MonadLogger
, MonadLoggerIO
, runNoLoggingT
, runStderrLoggingT
, runStdoutLoggingT
)
import Control.Monad.State as X
(MonadState, State, evalState, execState, modify, runState)
import Data.Aeson as X
( (.:)
, (.=)
, FromJSON(..)
, KeyValue
, ToJSON(..)
, decode
, eitherDecode
, encode
, object
, pairs
, withObject
, withText
)
import Data.Coerce as X (Coercible, coerce)
import Data.Default as X
import Data.Kind as X (Constraint, Type)
import Data.Text as X (pack, unpack)
import Data.Text.Encoding as X (decodeUtf8)
import Database.Esqueleto.PostgreSQL.JSON as X (JSONB(..))
import Database.Persist as X
(Entity(..), Key, PersistEntity(..), PersistEntityBackend, PersistField)
import Database.Persist.Sql as X
(ConnectionPool, PersistFieldSql, SqlBackend, SqlPersistT)
import GHC.TypeLits as X (KnownNat, KnownSymbol, Nat, Symbol, natVal, symbolVal)
import Network.HTTP.Types.Header as X
import Network.HTTP.Types.Method as X
import Network.HTTP.Types.Status as X
import Network.Wai as X
(Application, Middleware, Request, Response, responseLBS)
import RIO.Orphans as X ()
import RIO.Seq as X ((<|), (|>))
import Web.HttpApiData as X (FromHttpApiData(..), ToHttpApiData(..))
import Web.PathPieces as X (PathPiece(..))
(<$$>) :: (Functor m, Functor n) => (a -> b) -> m (n a) -> m (n b)
f <$$> m = fmap f <$> m
{-# INLINE (<$$>) #-}
|
cdparks/lambda-machine
|
backend/src/Backend/Prelude.hs
|
mit
| 1,602 | 0 | 10 | 268 | 526 | 352 | 174 | 50 | 1 |
main = putStrLn . show $ primes !! 1000
primes = sieve [2..]
where sieve (p:xs) = p : sieve [x | x <- xs, mod x p > 0]
|
benji6/project-euler-solutions
|
haskell/7.hs
|
mit
| 121 | 0 | 12 | 32 | 77 | 39 | 38 | 3 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGPathSegLinetoAbs
(js_setX, setX, js_getX, getX, js_setY, setY, js_getY, getY,
SVGPathSegLinetoAbs, castToSVGPathSegLinetoAbs,
gTypeSVGPathSegLinetoAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"x\"] = $2;" js_setX ::
JSRef SVGPathSegLinetoAbs -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla SVGPathSegLinetoAbs.x documentation>
setX :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setX self val = liftIO (js_setX (unSVGPathSegLinetoAbs self) val)
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
JSRef SVGPathSegLinetoAbs -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla SVGPathSegLinetoAbs.x documentation>
getX :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getX self = liftIO (js_getX (unSVGPathSegLinetoAbs self))
foreign import javascript unsafe "$1[\"y\"] = $2;" js_setY ::
JSRef SVGPathSegLinetoAbs -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation>
setY :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setY self val = liftIO (js_setY (unSVGPathSegLinetoAbs self) val)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
JSRef SVGPathSegLinetoAbs -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation>
getY :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getY self = liftIO (js_getY (unSVGPathSegLinetoAbs self))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/SVGPathSegLinetoAbs.hs
|
mit
| 2,450 | 28 | 9 | 318 | 615 | 361 | 254 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.API.Mandrill.Whitelists where
import Network.API.Mandrill.Response
import Network.API.Mandrill.Types
import Network.API.Mandrill.Utils
-- | Adds an email to your email rejection whitelist. If the address is
-- currently on your blacklist, that blacklist entry will be removed
-- automatically.
add :: (MonadIO m) =>
Email ->
Comment ->
MandrillT m (Either ApiError Whitelist)
add e c =
performRequest "/whitelists/add.json" $
[ "email" .= e
, "comment" .= c]
-- | Retrieves your email rejection whitelist. You can provide an email address
-- or search prefix to limit the results. Returns up to 1000 results.
list :: (MonadIO m) =>
Email ->
MandrillT m (Either ApiError [Whitelist])
list e = performRequest "/whitelists/list.json" ["email" .= e]
-- | Removes an email address from the whitelist.
delete :: (MonadIO m) =>
Email ->
MandrillT m (Either ApiError Whitelist)
delete e = performRequest "/whitelists/delete.json" ["email" .= e]
|
krgn/hamdrill
|
src/Network/API/Mandrill/Whitelists.hs
|
mit
| 1,110 | 0 | 10 | 267 | 207 | 116 | 91 | 21 | 1 |
module Main (main) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (when)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Monoid ((<>))
import Options.Applicative (eitherReader, Parser, strOption, long, short, metavar, help, option, ReadM, execParser, info, fullDesc, progDesc, header, helper)
import SSH.Key (PrivateKey(Ed25519PrivateKey), serialiseKey, parseKey, privateKeys)
import SSH.Key.Derived (deriveKey)
import System.Directory (doesFileExist)
import System.Entropy (getEntropy)
import System.Posix.Files (setFileCreationMask, groupModes, otherModes, unionFileModes)
data Args = Args
{ argsSeed :: FilePath
, argsMode :: Modes
, argsOutput :: FilePath
, argsHandle :: String
}
data Modes = Raw | Generate | PrivateKey
parseMode :: String -> Either String Modes
parseMode "raw" = Right Raw
parseMode "generate" = Right Generate
parseMode "key" = Right PrivateKey
parseMode _ = Left "unknown mode"
mode :: ReadM Modes
mode = eitherReader parseMode
parseArgs :: Parser Args
parseArgs = Args
<$> strOption
( long "seed"
<> short 's'
<> metavar "SEEDFILE"
<> help "File the seed is stored in" )
<*> option mode
( long "mode"
<> short 'm'
<> metavar "MODE"
<> help "Mode to handle the seed in: raw, generate, key" )
<*> strOption
( long "output"
<> short 'o'
<> metavar "OUTPUT"
<> help "File the generated key is stored in" )
<*> strOption
( long "handle"
<> short 'h'
<> metavar "HANDLE"
<> help "The key handle to use" )
getSeed :: Modes -> FilePath -> IO B.ByteString
getSeed Raw seedFile = do
seed <- B.readFile seedFile
when (B.length seed < 32) (fail "Seed is shorter than 32 bytes")
return seed
getSeed Generate seedFile = do
doesFileExist seedFile >>= flip when (fail "Refusing to overwrite seed")
seed <- getEntropy 32
B.writeFile seedFile seed
return seed
getSeed PrivateKey seedFile = do
Right box <- parseKey <$> B.readFile seedFile
let Ed25519PrivateKey _ key _ = head . privateKeys $ box
return $ B.take 32 key
main :: IO ()
main = do
args <- execParser $ info (helper <*> parseArgs) (
fullDesc
<> progDesc "Generate an Ed25519 SSH key deterministically"
<> header "ssh-key-generator - a deterministic SSH key generator")
let handle = BC.pack (argsHandle args)
seed <- getSeed (argsMode args) (argsSeed args)
_ <- setFileCreationMask $ groupModes `unionFileModes` otherModes
B.writeFile (argsOutput args) . serialiseKey . snd $ deriveKey seed handle
|
mithrandi/ssh-key-generator
|
app/KeyTool.hs
|
mit
| 2,728 | 0 | 14 | 658 | 806 | 415 | 391 | 71 | 1 |
module Language.Jass.ShowIndent(
ShowIndent(..),
makeIndent,
commaSep,
newlineSep,
sepWith
) where
-- | Printing with indentation
class ShowIndent a where
-- | As show but makes indentation with specified level
showIndent :: Int -> a -> String
-- | Default implementation of indentation
makeIndent :: Int -> String
makeIndent i = replicate i '\t'
-- | Print list separated by comma
commaSep :: [String] -> String
commaSep = sepWith ", "
-- | Print list separated by newline
newlineSep :: [String] -> String
newlineSep = sepWith "\n"
-- | Print list with specified separator
sepWith :: String -> [String] -> String
sepWith sep = go ""
where go acc [] = acc
go acc [x] = acc ++ x
go acc (x:xs) = go (acc ++ x ++ sep) xs
|
NCrashed/hjass
|
src/library/Language/Jass/ShowIndent.hs
|
mit
| 766 | 0 | 10 | 176 | 209 | 116 | 93 | 19 | 3 |
import Test.HUnit
import qualified Data.Map as M
import Expr
import Nat
import Tree
-- Expr
expr = BinOp Minus (Var "y") (Const (I 3))
prog1 =
[ "x" $= Const (B True)
, "x" $= Const (I 3)
, "y" $= Var "x" `plus` Var "x"
, "x" $= Const (B False)
]
prog2 =
[ "x" $= Const (B True)
, "x" $= Const (I 3)
, "z" $= Const (B True)
, "y" $= Var "x" `plus` Var "z"
]
prog3 =
[ If (Var "x") [
"a" $= Var "y" `plus` Const (I 3)
] `else_` [
"b" $= Var "z" `plus` Const (I 3)
]
, "c" $= Var "a"
]
fac =
[ "i" $= Const (I 0)
, "r" $= Const (I 1)
, While (Var "i" `less` Var "n")
[ "i" $= Var "i" `plus` Const (I 1)
, "r" $= Var "r" `mul` Var "i"
]
]
testsExpr = [ evalExpr (M.fromList [("x", B False), ("y", I 5)]) expr ~?= Right (I 2)
, errorsCount (evalExpr (M.fromList [("y", B False)]) expr) ~?= 1
]
testsProgram = [ evalProgram M.empty prog1 ~?= Right (M.fromList [("x",B False),("y",I 6)])
, errorsCount (evalProgram M.empty prog2) ~?= 1
, evalProgram (M.fromList [("n",I 5)]) fac ~?= Right (M.fromList [("n",I 5),("i",I 5),("r",I 120)])
, evalProgram (M.fromList [("x",B True),("y",I 4),("z",B False)]) prog3 ~?= Right (M.fromList [("x",B True),("y",I 4),("z",B False),("a",I 7),("c",I 7)])
, errorsCount (evalProgram (M.fromList [("x",B False),("y",I 4),("z",B False),("a",I 7)]) prog3) ~?= 1
, evalProgram (M.fromList [("x",B True),("y",I 4),("z",I 5)]) prog3 ~?= Right (M.fromList [("x",B True),("y",I 4),("z",I 5),("a",I 7),("c",I 7)])
, errorsCount (evalProgram (M.fromList [("x",B False),("y",I 4),("z",I 5)]) prog3) ~?= 1
, errorsCount (evalProgram (M.fromList [("x",I 1),("y",I 4),("z",I 5),("a",I 6)]) prog3) ~?= 1
]
errorsCount = either length (const 0)
-- Tree
tree1 = Node "a" [Node "b" [Node "f" []], Node "c" [Node "d" []], Node "e" []]
tree2 = Node 1 [Node 2 [Node 4 [], Node 5 []], Node 3 []]
tree3 = Node "1" [Node "2" [Node "4" [], Node "5" []], Node "3" []]
testsTree =
[ tree2 ~?= tree2
, tree1 ~?/= tree3
, show tree1 ~?= "\"a\":{\"b\":{\"f\"},\"c\":{\"d\"},\"e\"}"
, show tree2 ~?= "1:{2:{4,5},3}"
, fmap show tree2 ~?= tree3
, (read "1:{2:{4,5},3}" :: Tree Int) ~?= tree2
, (reads "1:{2:{4,5},}" :: [(Tree Int,String)]) ~?= [(Node 1 [],":{2:{4,5},}")]
, (reads ",1:{2:{4,5},}" :: [(Tree Int,String)]) ~?= []
]
-- Nat
one = Suc Zero
two = Suc one
three = Suc two
testsNat =
[ two ~?= two
, three ~?/= two
, (two < three) ~?= True
, (one > three) ~?= False
, show two ~?= "2"
, show three ~?= "3"
, fromInteger 3 ~?= three
, three + fromInteger 7 ~?= fromInteger 10
, three * fromInteger 7 ~?= fromInteger 21
]
-- main
(~?/=) :: (Eq a, Show a) => a -> a -> Test
x ~?/= y = TestCase $ assertBool (show x ++ " shoud not be equal to " ++ show y) (x /= y)
main = fmap (\_ -> ()) $ runTestTT $ test $
label "evalExpr" testsExpr
++ label "evalProgram" testsProgram
++ label "Tree" testsTree
++ label "Nat" testsNat
where
label :: String -> [Test] -> [Test]
label l = map (\(i,t) -> TestLabel (l ++ " [" ++ show i ++ "]") t) . zip [1..]
|
nkartashov/haskell
|
hw06/Main.hs
|
gpl-2.0
| 3,329 | 0 | 15 | 936 | 1,765 | 941 | 824 | 73 | 1 |
main = do
a <- return "hell"
b <- return "yeah!"
putStrLn $ a ++ " " ++ b
|
softwaremechanic/Miscellaneous
|
Haskell/7.hs
|
gpl-2.0
| 81 | 0 | 9 | 26 | 41 | 18 | 23 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.WebKit.Inspect
-- Copyright : 2007-2014 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.Pane.WebKit.Inspect (
IDEInspect(..)
, InspectState(..)
, getInspectPane
) where
import Graphics.UI.Frame.Panes
(RecoverablePane(..), PanePath, RecoverablePane, Pane(..))
import IDE.Utils.GUIUtils
import Data.Typeable (Typeable)
import IDE.Core.Types (IDEAction, IDEM, IDE(..))
import Control.Monad.IO.Class (MonadIO(..))
import Graphics.UI.Frame.ViewFrame (getNotebook)
import IDE.Core.State
(modifyIDE_, postSyncIDE, reifyIDE, leksahOrPackageDir)
import IDE.Core.State (reflectIDE)
import Graphics.UI.Editor.Basics (Connection(..))
import Text.Show.Pretty
(HtmlOpts(..), defaultHtmlOpts, valToHtmlPage, parseValue, getDataDir)
import System.FilePath ((</>))
import Data.IORef (writeIORef, newIORef, readIORef, IORef)
import Control.Applicative ((<$>))
import System.Log.Logger (debugM)
import Data.Text (Text)
import qualified Data.Text as T (unpack, pack)
import GI.Gtk.Objects.ScrolledWindow
(scrolledWindowSetPolicy, scrolledWindowSetShadowType,
scrolledWindowNew, ScrolledWindow(..))
import GI.WebKit.Objects.WebView
(setWebViewSettings, getWebViewSettings, webViewNew, WebView(..))
import GI.Gtk.Objects.Widget (afterWidgetFocusInEvent, toWidget)
import GI.Gtk.Objects.Adjustment (noAdjustment)
import GI.Gtk.Enums (PolicyType(..), ShadowType(..))
import GI.WebKit.Objects.WebSettings
(setWebSettingsMonospaceFontFamily)
import GI.Gtk.Objects.Container (containerAdd)
data IDEInspect = IDEInspect {
scrollWin :: ScrolledWindow
, inspectView :: WebView
} deriving Typeable
data InspectState = InspectState {
} deriving(Eq,Ord,Read,Show,Typeable)
instance Pane IDEInspect IDEM
where
primPaneName _ = "Inspect"
getAddedIndex _ = 0
getTopWidget = liftIO . toWidget . scrollWin
paneId b = "*Inspect"
instance RecoverablePane IDEInspect InspectState IDEM where
saveState p = return (Just InspectState{})
recoverState pp InspectState {} = do
nb <- getNotebook pp
buildPane pp nb builder
builder pp nb windows = reifyIDE $ \ ideR -> do
scrollWin <- scrolledWindowNew noAdjustment noAdjustment
scrolledWindowSetShadowType scrollWin ShadowTypeIn
inspectView <- webViewNew
settings <- getWebViewSettings inspectView
setWebSettingsMonospaceFontFamily settings "Consolas"
setWebViewSettings inspectView settings
alwaysHtmlRef <- newIORef False
containerAdd scrollWin inspectView
scrolledWindowSetPolicy scrollWin PolicyTypeAutomatic PolicyTypeAutomatic
let inspect = IDEInspect {..}
cid1 <- ConnectC inspectView <$> afterWidgetFocusInEvent inspectView ( \e -> do
liftIO $ reflectIDE (makeActive inspect) ideR
return True)
return (Just inspect, [cid1])
getInspectPane :: Maybe PanePath -> IDEM IDEInspect
getInspectPane Nothing = forceGetPane (Right "*Inspect")
getInspectPane (Just pp) = forceGetPane (Left pp)
|
JPMoresmau/leksah
|
src/IDE/Pane/WebKit/Inspect.hs
|
gpl-2.0
| 3,602 | 1 | 20 | 620 | 829 | 482 | 347 | 74 | 1 |
-- String.hs Copyright 2014-17 John F. Miller
-- | String data and functions
{-# LANGUAGE OverloadedStrings #-}
module String
( module Data.String
, SapString
, mkStringLiteral
, string
, stringLength
, sconcat
) where
import qualified Data.Text.Lazy as T
import Data.Monoid
import Data.String
import Control.Monad.Except
data SapString =
SapString
{ escapes :: [String]
, text :: T.Text
}
instance Show SapString where
show s = '"':(T.unpack $ text s) ++ ['"']
instance Eq SapString where
a == b = text a == text b
stringLength s = fromIntegral $ T.length $ text s
instance IsString SapString where
fromString str = mkStringLiteral str
mkStringLiteral :: String -> SapString
mkStringLiteral s = SapString { escapes = [], text = T.pack s }
string :: SapString -> String
string = T.unpack . text
sconcat :: (IsString e, MonadError e m) =>
SapString -> SapString -> m SapString
sconcat SapString{escapes=e1, text=x} SapString{escapes=e2, text=y}
| (e1 == e2) = return SapString{escapes = e1, text=mappend x y}
| otherwise = throwError $ fromString
"String Error:Concatenating Strings with different escapes!"
instance Monoid SapString where
mempty = SapString{escapes = [], text = T.empty}
mappend SapString{escapes=e1, text=x} SapString{escapes=e2, text=y}
| (e1 == e2) = SapString{escapes = e1, text=mappend x y}
| otherwise = error "Concatenating Strings with different escapes!" -- TODO fixme
|
antarestrader/sapphire
|
String.hs
|
gpl-3.0
| 1,488 | 0 | 10 | 306 | 487 | 267 | 220 | 38 | 1 |
{-# LANGUAGE OverloadedLists #-}
module Kalkulu.Builtin.AtomQ(atomQ) where
import Control.Monad (when)
import Kalkulu.Builtin
import Kalkulu.BuiltinSymbol as B
atomQ :: BuiltinDefinition
atomQ = defaultBuiltin {
downcode = downcodeAtomQ
}
downcodeAtomQ :: Expression -> Kernel Expression
downcodeAtomQ e@(Cmp _ args) = do
when (length args /= 1) undefined -- TODO: sendMessage
return $ pureAtomQ e
downcodeAtomQ _ = error "unreachable"
pureAtomQ :: Expression -> Expression
pureAtomQ (Cmp _ [Cmp _ _]) = SymbolB B.False
pureAtomQ (Cmp _ [_]) = SymbolB B.True
pureAtomQ e = e
|
vizietto/kalkulu
|
src/Kalkulu/Builtin/AtomQ.hs
|
gpl-3.0
| 612 | 0 | 10 | 119 | 194 | 103 | 91 | 17 | 1 |
module DependencyGraph (
module Graphs
, module Imports
, module Modules
, module Loaders
, startGraph
, displayGraph
, printableNode
, printGraph
) where
import Control.Applicative
import Data.String.Utils
import System.IO
import Paths_dependency_graph
import DependencyGraph.GraphModules as Graphs
import DependencyGraph.ImportLine as Imports
import DependencyGraph.Modules as Modules
import DependencyGraph.Loaders as Loaders
printEdge :: (String, String) -> String
printEdge (a, b) = "(" ++ a ++ ", " ++ b ++ ")\n"
printableNode :: Node -> String
printableNode nd
| null $ edges nd = ""
| otherwise = node nd ++ " Edges: \n" ++ concatMap printEdge (edges nd) ++ "\n\n"
printGraph :: IO [Node] -> IO ()
printGraph nds = do
nods <- nds
putStrLn $ concatMap printableNode nods
putStrLn "Modules discovered: "
putStrLn $ concatMap (\n -> node n ++ "\n") nods
sub :: String
sub = "//### EDGES ###//\n"
edgeToLink :: (String, String) -> String
edgeToLink (a, b) = "{source: \"" ++ a ++ "\", target: \"" ++ b ++ "\", type: \"direct\"},\n"
-- rework to use readerT Environment ...
startGraph :: FilePath -> EnvT [Node]
startGraph infile = do
firstNode <- makeNode (pure infile)
generateGraph (pure [firstNode])
displayGraph :: [Node] -> IO String
displayGraph nods = do
template <- getDataFileName "html/index.html"
content <- readFile template
let all_edges = (concat . filter (not . null)) $ map edges nods
let links = concatMap edgeToLink all_edges
replace sub <$> pure links <*> pure content
|
pellagic-puffbomb/haskpy-dependency-graphs
|
src/DependencyGraph.hs
|
gpl-3.0
| 1,547 | 0 | 15 | 288 | 493 | 257 | 236 | 44 | 1 |
{- |
Module : Tct.Encoding.Precedence
Copyright : (c) Martin Avanzini <[email protected]>,
Georg Moser <[email protected]>,
Andreas Schnabl <[email protected]>
License : LGPL (see COPYING)
Maintainer : Martin Avanzini <[email protected]>
Stability : unstable
Portability : unportable
This module implements a SAT encoding of quasi precedences.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Tct.Encoding.Precedence
(
validPrecedenceM
-- | Add this constraint for a valid SAT encoding.
, encodeRecDepthM
-- | Initial constraints on recursion depth encoding
, restrictRecDepthM
-- | Restrict the recursion depth to a given bound
, precGt
-- | 'f ``precGt`` g` asserts that 'f' is strictly
-- above 'g' in the precedence
, precEq
-- | Assert equivalence in the precedence.
, initial
-- | The initial argument filtering.
-- * Recursive Symbols
-- | This is used by "Tct.Method.PopStar" to
-- obtain a more precise complexity certificate
, RecursiveSymbols (..)
, initialRecursiveSymbols
, isRecursive)
where
import Data.Typeable (Typeable)
import qualified Data.Set as Set
import qualified Termlib.Precedence as Prec
import Termlib.Precedence (Precedence, Order(..))
import Qlogic.Formula hiding (size)
import Qlogic.Boolean
import Prelude hiding ((&&),(||),not)
import Qlogic.PropositionalFormula
import Qlogic.NatSat (NatMonad, mGrt,mEqu,Size(..),natAtom, toFormula, natToFormula, NatFormula)
import Termlib.FunctionSymbol (Symbol, Signature)
import Qlogic.SatSolver
instance PropAtom (Order Symbol)
data Rank = Rank Symbol deriving (Typeable, Show, Eq, Ord)
instance PropAtom Rank
data RecDepth = RecDepth Symbol deriving (Typeable, Show, Eq, Ord)
instance PropAtom RecDepth
data IsRecursive = IsRecursive Symbol deriving (Typeable, Show, Eq, Ord)
instance PropAtom IsRecursive
newtype RecursiveSymbols = RS (Set.Set Symbol)
initial :: Signature -> Precedence
initial = Prec.empty
initialRecursiveSymbols :: RecursiveSymbols
initialRecursiveSymbols = RS Set.empty
gt :: Eq l => Symbol -> Symbol -> PropFormula l
f `gt` g = propAtom $ f :>: g
eq :: Eq l => Symbol -> Symbol -> PropFormula l
f `eq` g = propAtom $ f :~: g
precGt :: Eq l => Symbol -> Symbol -> PropFormula l
f `precGt` g | f == g = Bot
| otherwise = f `gt` g
precEq :: Eq l => Symbol -> Symbol -> PropFormula l
f `precEq` g | f == g = Top
| f < g = f `eq` g
| otherwise = g `eq` f
isRecursive :: Eq l => Symbol -> PropFormula l
isRecursive = propAtom . IsRecursive
validPrecedenceM :: (Eq l, Monad s, Solver s l) => [Symbol] -> SatSolver s l (PropFormula l)
validPrecedenceM [] = return $ Top
validPrecedenceM syms = toFormula constraint
where rank sym = natAtom size (Rank sym)
size = Bound $ length syms
constraint = bigAnd [ bigAnd [ f `mgt` g --> rank f `mGrt` rank g
, g `mgt` f --> rank g `mGrt` rank f
, f `meq` g --> rank f `mEqu` rank g]
| f <- syms, g <- syms, f < g ]
f `mgt` g = return $ f `gt` g
f `meq` g = return $ f `eq` g
recDepth :: Eq l => Int -> Symbol -> NatFormula l
recDepth maxrd sym = natAtom (Bound $ max 1 maxrd) (RecDepth sym)
encodeRecDepthM :: (Eq l, Monad s, Solver s l) => [Symbol] -> Int -> NatMonad s l (PropFormula l)
encodeRecDepthM syms bound =
bigAnd [ isRecursiveM f --> recdepth f `mGrt` natToFormula 0 | f <- syms]
&&
bigAnd [ bigAnd [ f `mgt` g --> f `recGt` g
, g `mgt` f --> g `recGt` f
, f `meq` g --> (recdepth f `mEqu` recdepth g)]
| f <- syms, g <- syms, f < g ]
where
recdepth = recDepth bound
isRecursiveM = return . isRecursive
isNonRecursiveM = not . isRecursiveM
f `recGt` g = recdepth f `mGrt` recdepth g
|| (isNonRecursiveM f && recdepth f `mEqu` recdepth g)
f `mgt` g = return $ f `gt` g
f `meq` g = return $ f `eq` g
restrictRecDepthM :: (Eq l, Monad s, Solver s l) => [Symbol] -> Int -> NatMonad s l (PropFormula l)
restrictRecDepthM syms bound =
bigAnd [ natToFormula (bound + 1) `mGrt` recdepth f | f <- syms]
where
recdepth = recDepth bound
instance Decoder Precedence (Order Symbol) where
add = Prec.insert
instance Decoder RecursiveSymbols IsRecursive where
add (IsRecursive f) (RS fs) = RS (f `Set.insert` fs)
|
mzini/TcT
|
source/Tct/Encoding/Precedence.hs
|
gpl-3.0
| 4,787 | 0 | 13 | 1,307 | 1,431 | 776 | 655 | 87 | 1 |
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
--
-- Fast write-buffered Handles
--
-- (c) The University of Glasgow 2005-2006
--
-- This is a simple abstraction over Handles that offers very fast write
-- buffering, but without the thread safety that Handles provide. It's used
-- to save time in Pretty.printDoc.
--
-----------------------------------------------------------------------------
module U.BufWrite (
BufHandle(..),
newBufHandle,
bPutChar,
bPutStr,
bPutFS,
bPutFZS,
bPutLitString,
bFlush,
) where
import Language.Haskell.Utility.FastString
import U.FastMutInt
import Control.Monad ( when )
import Data.ByteString (ByteString)
import qualified Data.ByteString.Unsafe as BS
import Data.Char ( ord )
import Foreign
import Foreign.C.String
import System.IO
-- -----------------------------------------------------------------------------
data BufHandle = BufHandle {-#UNPACK#-}!(Ptr Word8)
{-#UNPACK#-}!FastMutInt
Handle
newBufHandle :: Handle -> IO BufHandle
newBufHandle hdl = do
ptr <- mallocBytes buf_size
r <- newFastMutInt
writeFastMutInt r 0
return (BufHandle ptr r hdl)
buf_size :: Int
buf_size = 8192
bPutChar :: BufHandle -> Char -> IO ()
bPutChar b@(BufHandle buf r hdl) !c = do
i <- readFastMutInt r
if (i >= buf_size)
then do hPutBuf hdl buf buf_size
writeFastMutInt r 0
bPutChar b c
else do pokeElemOff buf i (fromIntegral (ord c) :: Word8)
writeFastMutInt r (i+1)
bPutStr :: BufHandle -> String -> IO ()
bPutStr (BufHandle buf r hdl) !str = do
i <- readFastMutInt r
loop str i
where loop _ i | i `seq` False = undefined
loop "" i = do writeFastMutInt r i; return ()
loop (c:cs) i
| i >= buf_size = do
hPutBuf hdl buf buf_size
loop (c:cs) 0
| otherwise = do
pokeElemOff buf i (fromIntegral (ord c))
loop cs (i+1)
bPutFS :: BufHandle -> FastString -> IO ()
bPutFS b fs = bPutBS b $ fastStringToByteString fs
bPutFZS :: BufHandle -> FastZString -> IO ()
bPutFZS b fs = bPutBS b $ fastZStringToByteString fs
bPutBS :: BufHandle -> ByteString -> IO ()
bPutBS b bs = BS.unsafeUseAsCStringLen bs $ bPutCStringLen b
bPutCStringLen :: BufHandle -> CStringLen -> IO ()
bPutCStringLen b@(BufHandle buf r hdl) cstr@(ptr, len) = do
i <- readFastMutInt r
if (i + len) >= buf_size
then do hPutBuf hdl buf i
writeFastMutInt r 0
if (len >= buf_size)
then hPutBuf hdl ptr len
else bPutCStringLen b cstr
else do
copyBytes (buf `plusPtr` i) ptr len
writeFastMutInt r (i + len)
bPutLitString :: BufHandle -> LitString -> Int -> IO ()
bPutLitString b@(BufHandle buf r hdl) a len = a `seq` do
i <- readFastMutInt r
if (i+len) >= buf_size
then do hPutBuf hdl buf i
writeFastMutInt r 0
if (len >= buf_size)
then hPutBuf hdl a len
else bPutLitString b a len
else do
copyBytes (buf `plusPtr` i) a len
writeFastMutInt r (i+len)
bFlush :: BufHandle -> IO ()
bFlush (BufHandle buf r hdl) = do
i <- readFastMutInt r
when (i > 0) $ hPutBuf hdl buf i
free buf
return ()
|
shayan-najd/HsParser
|
U/BufWrite.hs
|
gpl-3.0
| 3,511 | 0 | 14 | 1,048 | 1,080 | 541 | 539 | 88 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudWatchLogs.DeleteLogStream
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes a log stream and permanently deletes all the archived log events
-- associated with it.
--
-- <http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DeleteLogStream.html>
module Network.AWS.CloudWatchLogs.DeleteLogStream
(
-- * Request
DeleteLogStream
-- ** Request constructor
, deleteLogStream
-- ** Request lenses
, dlsLogGroupName
, dlsLogStreamName
-- * Response
, DeleteLogStreamResponse
-- ** Response constructor
, deleteLogStreamResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudWatchLogs.Types
import qualified GHC.Exts
data DeleteLogStream = DeleteLogStream
{ _dlsLogGroupName :: Text
, _dlsLogStreamName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DeleteLogStream' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlsLogGroupName' @::@ 'Text'
--
-- * 'dlsLogStreamName' @::@ 'Text'
--
deleteLogStream :: Text -- ^ 'dlsLogGroupName'
-> Text -- ^ 'dlsLogStreamName'
-> DeleteLogStream
deleteLogStream p1 p2 = DeleteLogStream
{ _dlsLogGroupName = p1
, _dlsLogStreamName = p2
}
dlsLogGroupName :: Lens' DeleteLogStream Text
dlsLogGroupName = lens _dlsLogGroupName (\s a -> s { _dlsLogGroupName = a })
dlsLogStreamName :: Lens' DeleteLogStream Text
dlsLogStreamName = lens _dlsLogStreamName (\s a -> s { _dlsLogStreamName = a })
data DeleteLogStreamResponse = DeleteLogStreamResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteLogStreamResponse' constructor.
deleteLogStreamResponse :: DeleteLogStreamResponse
deleteLogStreamResponse = DeleteLogStreamResponse
instance ToPath DeleteLogStream where
toPath = const "/"
instance ToQuery DeleteLogStream where
toQuery = const mempty
instance ToHeaders DeleteLogStream
instance ToJSON DeleteLogStream where
toJSON DeleteLogStream{..} = object
[ "logGroupName" .= _dlsLogGroupName
, "logStreamName" .= _dlsLogStreamName
]
instance AWSRequest DeleteLogStream where
type Sv DeleteLogStream = CloudWatchLogs
type Rs DeleteLogStream = DeleteLogStreamResponse
request = post "DeleteLogStream"
response = nullResponse DeleteLogStreamResponse
|
dysinger/amazonka
|
amazonka-cloudwatch-logs/gen/Network/AWS/CloudWatchLogs/DeleteLogStream.hs
|
mpl-2.0
| 3,342 | 0 | 9 | 719 | 408 | 247 | 161 | 54 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.Instances.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates settings of a Cloud SQL instance. Caution: This is not a partial
-- update, so you must include values for all the settings that you want to
-- retain. For partial updates, use patch.. This method supports patch
-- semantics.
--
-- /See:/ <https://cloud.google.com/sql/docs/reference/latest Cloud SQL Administration API Reference> for @sql.instances.patch@.
module Network.Google.Resource.SQL.Instances.Patch
(
-- * REST Resource
InstancesPatchResource
-- * Creating a Request
, instancesPatch
, InstancesPatch
-- * Request Lenses
, ipProject
, ipPayload
, ipInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.instances.patch@ method which the
-- 'InstancesPatch' request conforms to.
type InstancesPatchResource =
"sql" :>
"v1beta4" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DatabaseInstance :>
Patch '[JSON] Operation
-- | Updates settings of a Cloud SQL instance. Caution: This is not a partial
-- update, so you must include values for all the settings that you want to
-- retain. For partial updates, use patch.. This method supports patch
-- semantics.
--
-- /See:/ 'instancesPatch' smart constructor.
data InstancesPatch = InstancesPatch'
{ _ipProject :: !Text
, _ipPayload :: !DatabaseInstance
, _ipInstance :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ipProject'
--
-- * 'ipPayload'
--
-- * 'ipInstance'
instancesPatch
:: Text -- ^ 'ipProject'
-> DatabaseInstance -- ^ 'ipPayload'
-> Text -- ^ 'ipInstance'
-> InstancesPatch
instancesPatch pIpProject_ pIpPayload_ pIpInstance_ =
InstancesPatch'
{ _ipProject = pIpProject_
, _ipPayload = pIpPayload_
, _ipInstance = pIpInstance_
}
-- | Project ID of the project that contains the instance.
ipProject :: Lens' InstancesPatch Text
ipProject
= lens _ipProject (\ s a -> s{_ipProject = a})
-- | Multipart request metadata.
ipPayload :: Lens' InstancesPatch DatabaseInstance
ipPayload
= lens _ipPayload (\ s a -> s{_ipPayload = a})
-- | Cloud SQL instance ID. This does not include the project ID.
ipInstance :: Lens' InstancesPatch Text
ipInstance
= lens _ipInstance (\ s a -> s{_ipInstance = a})
instance GoogleRequest InstancesPatch where
type Rs InstancesPatch = Operation
type Scopes InstancesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient InstancesPatch'{..}
= go _ipProject _ipInstance (Just AltJSON) _ipPayload
sQLAdminService
where go
= buildClient (Proxy :: Proxy InstancesPatchResource)
mempty
|
rueshyna/gogol
|
gogol-sqladmin/gen/Network/Google/Resource/SQL/Instances/Patch.hs
|
mpl-2.0
| 3,925 | 0 | 15 | 909 | 470 | 283 | 187 | 73 | 1 |
module Main where
data Expr = Expr | Stmt
main :: IO ()
main = putStrLn "Hello, GrCal"
|
gr-a-m/GrCal
|
src/Main.hs
|
mpl-2.0
| 89 | 0 | 6 | 20 | 32 | 18 | 14 | 4 | 1 |
ans :: [Int] -> String
ans (0:y:_)
| y <= 1911 = "M" ++ show(y-1867)
| y <= 1925 = "T" ++ show(y-1911)
| y <= 1988 = "S" ++ show(y-1925)
| otherwise = "H" ++ show(y-1988)
ans (1:y:_) = show(y + 1867)
ans (2:y:_) = show(y + 1911)
ans (3:y:_) = show(y + 1925)
ans (4:y:_) = show(y + 1988)
main = do
c <- getLine
let i = map read $ words c :: [Int]
o = ans i
putStrLn o
|
a143753/AOJ
|
0337.hs
|
apache-2.0
| 392 | 3 | 11 | 108 | 306 | 150 | 156 | 15 | 1 |
-----------------------------------------------------------------------------
-- Copyright 2019, Ideas project team. This file is distributed under the
-- terms of the Apache License 2.0. For more information, see the files
-- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution.
-----------------------------------------------------------------------------
-- |
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable (depends on ghc)
--
-- Extensions to the QuickCheck library
--
-----------------------------------------------------------------------------
module Ideas.Utils.QuickCheck
( module Test.QuickCheck
-- * Data type
, ArbGen, generator, generators
-- * Constructors
, arbGen, constGen, constGens, unaryGen, unaryGens
, unaryArbGen, binaryGen, binaryGens, toArbGen
-- * Frequency combinators
, common, uncommon, rare, changeFrequency
) where
import Control.Arrow
import Control.Monad
import Data.Ratio
import Data.Semigroup as Sem
import Test.QuickCheck
---------------------------------------------------------
-- @ArbGen@ datatype
newtype ArbGen a = AG [(Rational, (Int, Gen ([a] -> a)))]
instance Sem.Semigroup (ArbGen a) where
AG xs <> AG ys = AG (xs <> ys)
instance Monoid (ArbGen a) where
mempty = AG mempty
mappend = (<>)
generator :: ArbGen a -> Gen a
generator (AG pairs) = sized rec
where
factor = foldr (lcm . denominator . fst) 1 pairs
rec n = frequency (map make (select pairs))
where
select
| n == 0 = filter ((==0) . fst . snd)
| otherwise = id
make (r, (a, gf)) =
let m = round (fromInteger factor*r)
xs = replicateM a $ rec $ n `div` 2
in (m, gf <*> xs)
generators :: [ArbGen a] -> Gen a
generators = generator . mconcat
---------------------------------------------------------
-- Constructors
arbGen :: Arbitrary b => (b -> a) -> ArbGen a
arbGen f = newGen 0 (const . f <$> arbitrary)
constGen :: a -> ArbGen a
constGen = pureGen 0 . const
constGens :: [a] -> ArbGen a
constGens = mconcat . map constGen
unaryGen :: (a -> a) -> ArbGen a
unaryGen f = pureGen 1 (f . head)
unaryArbGen :: Arbitrary b => (b -> a -> a) -> ArbGen a
unaryArbGen f = newGen 1 $ (\a -> f a . head) <$> arbitrary
unaryGens :: [a -> a] -> ArbGen a
unaryGens = mconcat . map unaryGen
binaryGen :: (a -> a -> a) -> ArbGen a
binaryGen f = pureGen 2 (\xs -> f (head xs) (xs !! 1))
binaryGens :: [a -> a -> a] -> ArbGen a
binaryGens = mconcat . map binaryGen
pureGen :: Int -> ([a] -> a) -> ArbGen a
pureGen n = newGen n . return
toArbGen :: Gen a -> ArbGen a
toArbGen = newGen 0 . fmap const
newGen :: Int -> Gen ([a] -> a) -> ArbGen a
newGen n f = AG [(1, (n, f))]
---------------------------------------------------------
-- Frequency combinators
common, uncommon, rare :: ArbGen a -> ArbGen a
common = changeFrequency 2
uncommon = changeFrequency (1/2)
rare = changeFrequency (1/5)
changeFrequency :: Rational -> ArbGen a -> ArbGen a
changeFrequency r (AG xs) = AG (map (first (*r)) xs)
|
ideas-edu/ideas
|
src/Ideas/Utils/QuickCheck.hs
|
apache-2.0
| 3,204 | 0 | 16 | 739 | 1,023 | 551 | 472 | 58 | 1 |
module Github.Review.URISpec where
import Control.Applicative
import qualified Data.List as L
import Data.Maybe
import Github.Review
import Network.URI
import Test.Hspec
-- import Test.QuickCheck
testUrl :: String
testUrl = "https://api.github.com/repos/scholarslab/SolrSearch/commits/df5618356282acedec1d603b115e29b1a5065092"
expectedUrl :: String
expectedUrl = "https://github.com/scholarslab/SolrSearch/commit/df5618356282acedec1d603b115e29b1a5065092"
url :: Maybe URI
url = toGithubUri <$> parseURI testUrl
shouldNotContain :: Maybe URI -> (URI -> String) -> String -> Expectation
shouldNotContain uri getter s =
getter (fromJust uri) `shouldSatisfy` (not . L.isInfixOf s)
shouldContain :: Maybe URI -> (URI -> String) -> String -> Expectation
shouldContain uri getter s =
getter (fromJust uri) `shouldSatisfy` L.isInfixOf s
spec :: Spec
spec =
describe "getGithubUri" $ do
it "should properly parse the URL." $
url `shouldSatisfy` isJust
it "should return the expected output." $
show (fromJust url) `shouldBe` expectedUrl
context "the host" $
it "should not contain the string 'api'." $
(url `shouldNotContain` (uriRegName . fromJust . uriAuthority)) "api"
context "the path" $ do
it "should not contain the string 'repos'." $
(url `shouldNotContain` uriPath) "repos"
it "should not contain the string 'commits'." $
(url `shouldNotContain` uriPath) "commits"
it "should contain the string 'commit'." $
(url `shouldContain` uriPath) "commit"
|
erochest/gitreview-core
|
specs/Github/Review/URISpec.hs
|
apache-2.0
| 1,720 | 0 | 14 | 455 | 380 | 199 | 181 | 36 | 1 |
{-# LANGUAGE FlexibleContexts, GADTs, DeriveDataTypeable #-}
module Propellor.Property.Chroot (
debootstrapped,
bootstrapped,
provisioned,
hostChroot,
Chroot(..),
ChrootBootstrapper(..),
Debootstrapped(..),
ChrootTarball(..),
noServices,
inChroot,
exposeTrueLocaldir,
-- * Internal use
provisioned',
propagateChrootInfo,
propellChroot,
chain,
chrootSystem,
) where
import Propellor.Base
import Propellor.Container
import Propellor.Types.CmdLine
import Propellor.Types.Chroot
import Propellor.Types.Info
import Propellor.Types.Core
import Propellor.Property.Chroot.Util
import qualified Propellor.Property.Debootstrap as Debootstrap
import qualified Propellor.Property.Systemd.Core as Systemd
import qualified Propellor.Property.File as File
import qualified Propellor.Shim as Shim
import Propellor.Property.Mount
import Utility.FileMode
import qualified Data.Map as M
import Data.List.Utils
import System.Posix.Directory
import System.Console.Concurrent
-- | Specification of a chroot. Normally you'll use `debootstrapped` or
-- `bootstrapped` to construct a Chroot value.
data Chroot where
Chroot :: ChrootBootstrapper b => FilePath -> b -> InfoPropagator -> Host -> Chroot
instance IsContainer Chroot where
containerProperties (Chroot _ _ _ h) = containerProperties h
containerInfo (Chroot _ _ _ h) = containerInfo h
setContainerProperties (Chroot loc b p h) ps =
let h' = setContainerProperties h ps
in Chroot loc b p h'
chrootSystem :: Chroot -> Maybe System
chrootSystem = fromInfoVal . fromInfo . containerInfo
instance Show Chroot where
show c@(Chroot loc _ _ _) = "Chroot " ++ loc ++ " " ++ show (chrootSystem c)
-- | Class of things that can do initial bootstrapping of an operating
-- System in a chroot.
class ChrootBootstrapper b where
-- | Do initial bootstrapping of an operating system in a chroot.
-- If the operating System is not supported, return
-- Left error message.
buildchroot :: b -> Maybe System -> FilePath -> Either String (Property Linux)
-- | Use this to bootstrap a chroot by extracting a tarball.
--
-- The tarball is expected to contain a root directory (no top-level
-- directory, also known as a "tarbomb").
-- It may be optionally compressed with any format `tar` knows how to
-- detect automatically.
data ChrootTarball = ChrootTarball FilePath
instance ChrootBootstrapper ChrootTarball where
buildchroot (ChrootTarball tb) _ loc = Right $
tightenTargets $ extractTarball loc tb
extractTarball :: FilePath -> FilePath -> Property UnixLike
extractTarball target src = check (unpopulated target) $
cmdProperty "tar" params
`assume` MadeChange
`requires` File.dirExists target
where
params =
[ "-C"
, target
, "-xf"
, src
]
-- | Use this to bootstrap a chroot with debootstrap.
data Debootstrapped = Debootstrapped Debootstrap.DebootstrapConfig
instance ChrootBootstrapper Debootstrapped where
buildchroot (Debootstrapped cf) system loc = case system of
(Just s@(System (Debian _ _) _)) -> Right $ debootstrap s
(Just s@(System (Buntish _) _)) -> Right $ debootstrap s
(Just (System ArchLinux _)) -> Left "Arch Linux not supported by debootstrap."
(Just (System (FreeBSD _) _)) -> Left "FreeBSD not supported by debootstrap."
Nothing -> Left "Cannot debootstrap; OS not specified"
where
debootstrap s = Debootstrap.built loc s cf
-- | Defines a Chroot at the given location, built with debootstrap.
--
-- Properties can be added to configure the Chroot. At a minimum,
-- add a property such as `osDebian` to specify the operating system
-- to bootstrap.
--
-- > debootstrapped Debootstrap.BuildD "/srv/chroot/ghc-dev" $ props
-- > & osDebian Unstable X86_64
-- > & Apt.installed ["ghc", "haskell-platform"]
-- > & ...
debootstrapped :: Debootstrap.DebootstrapConfig -> FilePath -> Props metatypes -> Chroot
debootstrapped conf = bootstrapped (Debootstrapped conf)
-- | Defines a Chroot at the given location, bootstrapped with the
-- specified ChrootBootstrapper.
bootstrapped :: ChrootBootstrapper b => b -> FilePath -> Props metatypes -> Chroot
bootstrapped bootstrapper location ps = c
where
c = Chroot location bootstrapper propagateChrootInfo (host location ps)
-- | Ensures that the chroot exists and is provisioned according to its
-- properties.
--
-- Reverting this property removes the chroot. Anything mounted inside it
-- is first unmounted. Note that it does not ensure that any processes
-- that might be running inside the chroot are stopped.
provisioned :: Chroot -> RevertableProperty (HasInfo + Linux) Linux
provisioned c = provisioned' c False
provisioned'
:: Chroot
-> Bool
-> RevertableProperty (HasInfo + Linux) Linux
provisioned' c@(Chroot loc bootstrapper infopropigator _) systemdonly =
(infopropigator c normalContainerInfo $ setup `describe` chrootDesc c "exists")
<!>
(teardown `describe` chrootDesc c "removed")
where
setup :: Property Linux
setup = propellChroot c (inChrootProcess (not systemdonly) c) systemdonly
`requires` built
built = case buildchroot bootstrapper (chrootSystem c) loc of
Right p -> p
Left e -> cantbuild e
cantbuild e = property (chrootDesc c "built") (error e)
teardown :: Property Linux
teardown = check (not <$> unpopulated loc) $
property ("removed " ++ loc) $
makeChange (removeChroot loc)
type InfoPropagator = Chroot -> (PropagateInfo -> Bool) -> Property Linux -> Property (HasInfo + Linux)
propagateChrootInfo :: InfoPropagator
propagateChrootInfo c@(Chroot location _ _ _) pinfo p =
propagateContainer location c pinfo $
p `setInfoProperty` chrootInfo c
chrootInfo :: Chroot -> Info
chrootInfo (Chroot loc _ _ h) = mempty `addInfo`
mempty { _chroots = M.singleton loc h }
-- | Propellor is run inside the chroot to provision it.
propellChroot :: Chroot -> ([String] -> IO (CreateProcess, IO ())) -> Bool -> Property UnixLike
propellChroot c@(Chroot loc _ _ _) mkproc systemdonly = property (chrootDesc c "provisioned") $ do
let d = localdir </> shimdir c
let me = localdir </> "propellor"
shim <- liftIO $ ifM (doesDirectoryExist d)
( pure (Shim.file me d)
, Shim.setup me Nothing d
)
ifM (liftIO $ bindmount shim)
( chainprovision shim
, return FailedChange
)
where
bindmount shim = ifM (doesFileExist (loc ++ shim))
( return True
, do
let mntpnt = loc ++ localdir
createDirectoryIfMissing True mntpnt
boolSystem "mount"
[ Param "--bind"
, File localdir, File mntpnt
]
)
chainprovision shim = do
parenthost <- asks hostName
cmd <- liftIO $ toChain parenthost c systemdonly
pe <- liftIO standardPathEnv
(p, cleanup) <- liftIO $ mkproc
[ shim
, "--continue"
, show cmd
]
let p' = p { env = Just pe }
r <- liftIO $ withHandle StdoutHandle createProcessSuccess p'
processChainOutput
liftIO cleanup
return r
toChain :: HostName -> Chroot -> Bool -> IO CmdLine
toChain parenthost (Chroot loc _ _ _) systemdonly = do
onconsole <- isConsole <$> getMessageHandle
return $ ChrootChain parenthost loc systemdonly onconsole
chain :: [Host] -> CmdLine -> IO ()
chain hostlist (ChrootChain hn loc systemdonly onconsole) =
case findHostNoAlias hostlist hn of
Nothing -> errorMessage ("cannot find host " ++ hn)
Just parenthost -> case M.lookup loc (_chroots $ fromInfo $ hostInfo parenthost) of
Nothing -> errorMessage ("cannot find chroot " ++ loc ++ " on host " ++ hn)
Just h -> go h
where
go h = do
changeWorkingDirectory localdir
when onconsole forceConsole
onlyProcess (provisioningLock loc) $ do
r <- runPropellor (setInChroot h) $ ensureChildProperties $
if systemdonly
then [toChildProperty Systemd.installed]
else hostProperties h
flushConcurrentOutput
putStrLn $ "\n" ++ show r
chain _ _ = errorMessage "bad chain command"
inChrootProcess :: Bool -> Chroot -> [String] -> IO (CreateProcess, IO ())
inChrootProcess keepprocmounted (Chroot loc _ _ _) cmd = do
mountproc
return (proc "chroot" (loc:cmd), cleanup)
where
-- /proc needs to be mounted in the chroot for the linker to use
-- /proc/self/exe which is necessary for some commands to work
mountproc = unlessM (elem procloc <$> mountPointsBelow loc) $
void $ mount "proc" "proc" procloc mempty
procloc = loc </> "proc"
cleanup
| keepprocmounted = noop
| otherwise = whenM (elem procloc <$> mountPointsBelow loc) $
umountLazy procloc
provisioningLock :: FilePath -> FilePath
provisioningLock containerloc = "chroot" </> mungeloc containerloc ++ ".lock"
shimdir :: Chroot -> FilePath
shimdir (Chroot loc _ _ _) = "chroot" </> mungeloc loc ++ ".shim"
mungeloc :: FilePath -> String
mungeloc = replace "/" "_"
chrootDesc :: Chroot -> String -> String
chrootDesc (Chroot loc _ _ _) desc = "chroot " ++ loc ++ " " ++ desc
-- | Adding this property to a chroot prevents daemons and other services
-- from being started, which is often something you want to prevent when
-- building a chroot.
--
-- On Debian, this is accomplished by installing a </usr/sbin/policy-rc.d>
-- script that does not let any daemons be started by packages that use
-- invoke-rc.d. Reverting the property removes the script.
--
-- This property has no effect on non-Debian systems.
noServices :: RevertableProperty UnixLike UnixLike
noServices = setup <!> teardown
where
f = "/usr/sbin/policy-rc.d"
script = [ "#!/bin/sh", "exit 101" ]
setup = combineProperties "no services started" $ toProps
[ File.hasContent f script
, File.mode f (combineModes (readModes ++ executeModes))
]
teardown = File.notPresent f
-- | Check if propellor is currently running within a chroot.
--
-- This allows properties to check and avoid performing actions that
-- should not be done in a chroot.
inChroot :: Propellor Bool
inChroot = extract . fromMaybe (InChroot False) . fromInfoVal <$> askInfo
where
extract (InChroot b) = b
setInChroot :: Host -> Host
setInChroot h = h { hostInfo = hostInfo h `addInfo` InfoVal (InChroot True) }
newtype InChroot = InChroot Bool
deriving (Typeable, Show)
-- | Runs an action with the true localdir exposed,
-- not the one bind-mounted into a chroot. The action is passed the
-- path containing the contents of the localdir outside the chroot.
--
-- In a chroot, this is accomplished by temporily bind mounting the localdir
-- to a temp directory, to preserve access to the original bind mount. Then
-- we unmount the localdir to expose the true localdir. Finally, to cleanup,
-- the temp directory is bind mounted back to the localdir.
exposeTrueLocaldir :: (FilePath -> Propellor a) -> Propellor a
exposeTrueLocaldir a = ifM inChroot
( withTmpDirIn (takeDirectory localdir) "propellor.tmp" $ \tmpdir ->
bracket_
(movebindmount localdir tmpdir)
(movebindmount tmpdir localdir)
(a tmpdir)
, a localdir
)
where
movebindmount from to = liftIO $ do
run "mount" [Param "--bind", File from, File to]
-- Have to lazy unmount, because the propellor process
-- is running in the localdir that it's unmounting..
run "umount" [Param "-l", File from]
-- We were in the old localdir; move to the new one after
-- flipping the bind mounts. Otherwise, commands that try
-- to access the cwd will fail because it got umounted out
-- from under.
changeWorkingDirectory "/"
changeWorkingDirectory localdir
run cmd ps = unlessM (boolSystem cmd ps) $
error $ "exposeTrueLocaldir failed to run " ++ show (cmd, ps)
-- | Generates a Chroot that has all the properties of a Host.
--
-- Note that it's possible to create loops using this, where a host
-- contains a Chroot containing itself etc. Such loops will be detected at
-- runtime.
hostChroot :: ChrootBootstrapper bootstrapper => Host -> bootstrapper -> FilePath -> Chroot
hostChroot h bootstrapper d = chroot
where
chroot = Chroot d bootstrapper pinfo h
pinfo = propagateHostChrootInfo h
-- This is different than propagateChrootInfo in that Info using
-- HostContext is not made to use the name of the chroot as its context,
-- but instead uses the hostname of the Host.
propagateHostChrootInfo :: Host -> InfoPropagator
propagateHostChrootInfo h c pinfo p =
propagateContainer (hostName h) c pinfo $
p `setInfoProperty` chrootInfo c
|
ArchiveTeam/glowing-computing-machine
|
src/Propellor/Property/Chroot.hs
|
bsd-2-clause
| 12,151 | 124 | 17 | 2,160 | 3,056 | 1,586 | 1,470 | -1 | -1 |
{-# LANGUAGE BangPatterns, CPP, GeneralizedNewtypeDeriving #-}
-- |
-- Module : Data.Text.Foreign
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Portability : GHC
--
-- Support for using 'Text' data with native code via the Haskell
-- foreign function interface.
module Data.Text.Foreign
(
-- * Interoperability with native code
-- $interop
I16
-- * Safe conversion functions
, fromPtr
, useAsPtr
, asForeignPtr
-- ** Encoding as UTF-8
, peekCStringLen
, withCStringLen
-- * Unsafe conversion code
, lengthWord16
, unsafeCopyToPtr
-- * Low-level manipulation
-- $lowlevel
, dropWord16
, takeWord16
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
#if MIN_VERSION_base(4,4,0)
import Control.Monad.ST.Unsafe (unsafeIOToST)
#else
import Control.Monad.ST (unsafeIOToST)
#endif
import Data.ByteString.Unsafe (unsafePackCStringLen, unsafeUseAsCStringLen)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Text.Internal (Text(..), empty)
import Data.Text.Internal.Functions (unsafeWithForeignPtr)
import Data.Text.Unsafe (lengthWord16)
import Data.Word (Word16)
import Foreign.C.String (CStringLen)
import Foreign.ForeignPtr (ForeignPtr, mallocForeignPtrArray)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (peek, poke)
import qualified Data.Text.Array as A
-- $interop
--
-- The 'Text' type is implemented using arrays that are not guaranteed
-- to have a fixed address in the Haskell heap. All communication with
-- native code must thus occur by copying data back and forth.
--
-- The 'Text' type's internal representation is UTF-16, using the
-- platform's native endianness. This makes copied data suitable for
-- use with native libraries that use a similar representation, such
-- as ICU. To interoperate with native libraries that use different
-- internal representations, such as UTF-8 or UTF-32, consider using
-- the functions in the 'Data.Text.Encoding' module.
-- | A type representing a number of UTF-16 code units.
newtype I16 = I16 Int
deriving (Bounded, Enum, Eq, Integral, Num, Ord, Read, Real, Show)
-- | /O(n)/ Create a new 'Text' from a 'Ptr' 'Word16' by copying the
-- contents of the array.
fromPtr :: Ptr Word16 -- ^ source array
-> I16 -- ^ length of source array (in 'Word16' units)
-> IO Text
fromPtr _ (I16 0) = return empty
fromPtr ptr (I16 len) =
#if defined(ASSERTS)
assert (len > 0) $
#endif
return $! Text arr 0 len
where
arr = A.run (A.new len >>= copy)
copy marr = loop ptr 0
where
loop !p !i | i == len = return marr
| otherwise = do
A.unsafeWrite marr i =<< unsafeIOToST (peek p)
loop (p `plusPtr` 2) (i + 1)
-- $lowlevel
--
-- Foreign functions that use UTF-16 internally may return indices in
-- units of 'Word16' instead of characters. These functions may
-- safely be used with such indices, as they will adjust offsets if
-- necessary to preserve the validity of a Unicode string.
-- | /O(1)/ Return the prefix of the 'Text' of @n@ 'Word16' units in
-- length.
--
-- If @n@ would cause the 'Text' to end inside a surrogate pair, the
-- end of the prefix will be advanced by one additional 'Word16' unit
-- to maintain its validity.
takeWord16 :: I16 -> Text -> Text
takeWord16 (I16 n) t@(Text arr off len)
| n <= 0 = empty
| n >= len || m >= len = t
| otherwise = Text arr off m
where
m | w < 0xD800 || w > 0xDBFF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(1)/ Return the suffix of the 'Text', with @n@ 'Word16' units
-- dropped from its beginning.
--
-- If @n@ would cause the 'Text' to begin inside a surrogate pair, the
-- beginning of the suffix will be advanced by one additional 'Word16'
-- unit to maintain its validity.
dropWord16 :: I16 -> Text -> Text
dropWord16 (I16 n) t@(Text arr off len)
| n <= 0 = t
| n >= len || m >= len = empty
| otherwise = Text arr (off+m) (len-m)
where
m | w < 0xD800 || w > 0xDBFF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(n)/ Copy a 'Text' to an array. The array is assumed to be big
-- enough to hold the contents of the entire 'Text'.
unsafeCopyToPtr :: Text -> Ptr Word16 -> IO ()
unsafeCopyToPtr (Text arr off len) ptr = loop ptr off
where
end = off + len
loop !p !i | i == end = return ()
| otherwise = do
poke p (A.unsafeIndex arr i)
loop (p `plusPtr` 2) (i + 1)
-- | /O(n)/ Perform an action on a temporary, mutable copy of a
-- 'Text'. The copy is freed as soon as the action returns.
useAsPtr :: Text -> (Ptr Word16 -> I16 -> IO a) -> IO a
useAsPtr t@(Text _arr _off len) action =
allocaBytes (len * 2) $ \buf -> do
unsafeCopyToPtr t buf
action (castPtr buf) (fromIntegral len)
-- | /O(n)/ Make a mutable copy of a 'Text'.
asForeignPtr :: Text -> IO (ForeignPtr Word16, I16)
asForeignPtr t@(Text _arr _off len) = do
fp <- mallocForeignPtrArray len
unsafeWithForeignPtr fp $ unsafeCopyToPtr t
return (fp, I16 len)
-- | /O(n)/ Decode a C string with explicit length, which is assumed
-- to have been encoded as UTF-8. If decoding fails, a
-- 'UnicodeException' is thrown.
--
-- @since 1.0.0.0
peekCStringLen :: CStringLen -> IO Text
peekCStringLen cs = do
bs <- unsafePackCStringLen cs
return $! decodeUtf8 bs
-- | Marshal a 'Text' into a C string encoded as UTF-8 in temporary
-- storage, with explicit length information. The encoded string may
-- contain NUL bytes, and is not followed by a trailing NUL byte.
--
-- The temporary storage is freed when the subcomputation terminates
-- (either normally or via an exception), so the pointer to the
-- temporary storage must /not/ be used after this function returns.
--
-- @since 1.0.0.0
withCStringLen :: Text -> (CStringLen -> IO a) -> IO a
withCStringLen t act = unsafeUseAsCStringLen (encodeUtf8 t) act
|
bos/text
|
src/Data/Text/Foreign.hs
|
bsd-2-clause
| 6,172 | 0 | 15 | 1,414 | 1,257 | 683 | 574 | 79 | 1 |
module Finance.Market where
import Data.Time
data MarketState = PreHours |
Open |
AfterHours |
Closed
deriving (Show, Read, Eq, Ord, Enum, Bounded)
-- | For use with the TradeKing `/market/clock` API call.
data MarketClock = MarketClock {
mcState :: MarketState,
mcNextState :: MarketState,
mcTime :: UTCTime,
mcNextTime :: UTCTime --^ when the market will transition into the next state
} deriving (Show, Read, Eq, Ord)
|
tathougies/hstradeking
|
src/Finance/Market.hs
|
bsd-3-clause
| 504 | 0 | 10 | 148 | 128 | 73 | 55 | -1 | -1 |
-- Ajatus: tiedostojen lukemiseen liittyvät CSV/ENVI/TFW -parserit yms.
-- tänne, jotta irrallaan IO-kaluista.
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
module Parsers where
import qualified Data.Attoparsec.Text as P
import qualified Data.Attoparsec.Combinator as P
import Control.Applicative
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Types
import CV.Image
comma = P.char ','
csInt s = P.decimal <* comma P.<?> s
csDouble s = P.double <* comma P.<?> s
puu :: P.Parser Puu
puu = do
puu_koealaid <- csInt "koeala"
nro <- csInt "nro"
puulaji <- csInt "puulaji"
latvkerr <- csInt "latvkerr"
lpm <- csInt "lpm"
pituus <- csInt "pituus"
puu_x_euref <- csDouble "puu_x_euref"
puu_y_euref <- csDouble "puu_y_euref" -- P.double P.<?> "puu_y_euref"
P.skipSpace
return $! Puu{..}
koeala :: P.Parser Koeala
koeala = do
koealaid <- csInt "koealaid"
x_euref <- csDouble "x_euref"
y_euref <- csDouble "y_euref"
vma <- csDouble "vma"
vku <- csDouble "vku"
vlp <- csDouble "vlp"
vtot <- csDouble "vtot"
gma <- csDouble "gma"
gku <- csDouble "gku"
gle <- csDouble "gle"
g <- csDouble "g"
nma <- csDouble "nma"
nku <- csDouble "nku"
nle <- csDouble "nle"
n <- csDouble "n"
dgm <- csDouble "dgm"
hgm <- csDouble "hgm"
xykj <- csDouble "x_ykj"
yykj <- P.double P.<?> "y_ykj"
P.skipSpace
return $! Koeala{..}
parseEither s = P.parseOnly
-- Sääli kun attoparsecin Either ei palauta tarkempaa virhettä
parseEither' :: String -> P.Parser a -> T.Text -> Either String a
parseEither' s p t = case (P.parse (p P.<?> s) t) of
P.Fail ct c e -> Left $ unlines [show t, show e, show c, show ct]
P.Done ct r -> Right r
P.Partial x -> Left $ unlines ["Partial "++s++" match with:", show t]
-- Parses GeoTiff TFW header file
-- http://gis.ess.washington.edu/data/raster/drg/tfw.html
parseTFWHeader :: (Int,Int) -> a -> String -> ImageInWorld a
parseTFWHeader (w,h) im s =
let [a,b,c,d,e,f] = map read . lines . filter (/='\r') $ s
in ImageInWorld{..}
-- Parses some header info from ENVI header file
-- http://geol.hu/data/online_help/ENVI_Header_Format.html
parseENVIHeader :: a -> String -> ImageInWorld a
parseENVIHeader im (T.pack->s) =
let (P.Done _ res) = P.parse (mapLinePref *> P.count 6 num) $ s
[_,_,lrx,lry,xUnit,yUnit] = res
h = 200 -- XXX: info available in envi header in lines samples/lines
w = 200 --
[a,b,c,d,e,f] = [xUnit,0,0,-yUnit,lrx,lry]
in ImageInWorld{..}
where
-- find and parse nums from line in format of
-- "map info = {Unknown, 1, 1, 400898.697548, 6786343.147276, 0.2, 0.2, 0, North}"
num = P.double <* P.char ',' <* P.skipSpace
mapLinePref = P.manyTill P.anyChar (P.try (pS "map info")) >>
P.skipSpace >>
P.char '=' >>
P.skipSpace >>
P.char '{' >>
P.manyTill P.anyChar (P.try (P.char ',')) >>
P.skipSpace
pS = P.string . T.pack
|
deggis/guesswork
|
src/Guesswork/Import/Parsers.hs
|
bsd-3-clause
| 3,194 | 0 | 17 | 848 | 1,020 | 513 | 507 | 75 | 3 |
module Sexy.Instances.Eq.Either () where
import Sexy.Classes (Eq(..), BoolC(..))
import Sexy.Data (Either(..))
import Sexy.Instances.BoolC.Bool ()
instance (Eq a, Eq b) => Eq (Either a b) where
Left x == Left y = x == y
Right x == Right y = x == y
_ == _ = false
|
DanBurton/sexy
|
src/Sexy/Instances/Eq/Either.hs
|
bsd-3-clause
| 286 | 0 | 7 | 71 | 140 | 77 | 63 | 8 | 0 |
module SVG
( generatePlayingFieldSVG
, renderPiece
) where
import Data.List (intersperse)
import qualified Data.Map.Strict as Map
import Types
generatePlayingFieldSVG :: PlayingField -> String
generatePlayingFieldSVG pf = header ++ pieces ++ "\n" ++ footer
where
header =
unlines
[ "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"
, "<svg width=\"" ++
show sideLengthX ++
"\" height=\"" ++
show sideLengthY ++
"\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">"
]
pieces =
concat $
intersperse "\n" $
map
(\(cs, (PlacedPiece char _)) -> renderPiece sideLength minXoffset minYoffset cs char)
(Map.toList pf)
footer = "</svg>"
-- length of one side of a piece in pixels
sideLength = 50
(xs, ys) = unzip $ map cCoordinates $ Map.keys pf
minX = minimum xs
minY = minimum ys
maxX = maximum xs
maxY = maximum ys
-- TODO: Fix these offsets properly
minXoffset = (abs minX) * sideLength
minYoffset = (abs minY) * sideLength
sideLengthX = minXoffset + sideLength * (maxX + 2)
sideLengthY = minYoffset + sideLength * (maxY + 2)
renderPiece :: Int -> Int -> Int -> Coordinates -> Char -> String
renderPiece sideLength minXoffset minYoffset (C (x, y)) char =
wrapInSvg $ rectangle ++ "\n" ++ shapeText
where
startX = show $ minXoffset + x * sideLength
startY = show $ minYoffset + y * sideLength
-- TODO: Draw the stroke on the inside?
rectangle =
"<rect x=\"0\" y=\"0\" width=\"" ++
show sideLength ++
"\" height=\"" ++
show sideLength ++
"\" fill=\"white\"" ++ " stroke-width=\"1\" stroke=\"black\" stroke-linejoin=\"round\" />"
shapeText =
"<text x=\"60%\" y=\"60%\" alignment-baseline=\"middle\" text-anchor=\"middle\" font-size=\"2em\">" ++
[char] ++ "</text>"
-- this wrapper is necessary to center the text...
wrapInSvg text =
"<svg x=\"" ++
startX ++
"\" y=\"" ++
startY ++
"\" width=\"" ++
show sideLength ++ "\" height=\"" ++ show sideLength ++ "\">\n" ++ text ++ "\n</svg>"
|
rkrzr/shcrabble
|
src/SVG.hs
|
bsd-3-clause
| 2,191 | 0 | 16 | 574 | 509 | 270 | 239 | 54 | 1 |
{-# LANGUAGE OverloadedLists, OverloadedStrings, DeriveGeneric #-}
module AWS.IAM (tests) where
import AWS.Aeson
import Control.Concurrent (threadDelay)
import Control.Lens hiding ((.=))
import Data.Aeson (encode)
import Data.Aeson.Lens (key, _String, values, _Value)
import Data.Char (toUpper)
import Data.IORef (IORef, readIORef, writeIORef)
import Data.Text as T (Text, pack, unpack, split)
import Data.Text.Encoding (encodeUtf8)
import Data.Text.Lazy as LT (toStrict)
import Data.Text.Lazy.Encoding as E (decodeUtf8)
import GHC.Generics
import Network.Wreq
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (assertBool)
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as DAT
tests :: String -> String -> Options -> IORef String -> Test
tests prefix region baseopts iamTestState = testGroup "iam" [
testCase "listUsers" $ listUsers prefix region baseopts
, testCase "createRole" $ createRole prefix region baseopts iamTestState
, testCase "listRoles" $ listRoles prefix region baseopts
, testCase "putRolePolicy" $ putRolePolicy prefix region baseopts
, testCase "stsAssumeRole" $ stsAssumeRole prefix region baseopts iamTestState
, testCase "deleteRolePolicy" $ deleteRolePolicy prefix region baseopts
, testCase "deleteRole" $ deleteRole prefix region baseopts
]
listUsers :: String -> String -> Options -> IO ()
listUsers _prefix region baseopts = do
let opts = baseopts
& param "Action" .~ ["ListUsers"]
& param "Version" .~ ["2010-05-08"]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "listUsers 200" $ r ^. responseStatus . statusCode == 200
assertBool "listUsers OK" $ r ^. responseStatus . statusMessage == "OK"
createRole :: String -> String -> Options -> IORef String -> IO ()
createRole prefix region baseopts iamTestState = do
let opts = baseopts
& param "Action" .~ ["CreateRole"]
& param "Version" .~ ["2010-05-08"]
& param "RoleName" .~ [T.pack $ prefix ++ roleName]
& param "AssumeRolePolicyDocument" .~ [rolePolicyDoc]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "createRole 200" $ r ^. responseStatus . statusCode == 200
assertBool "createRole OK" $ r ^. responseStatus . statusMessage == "OK"
let [arn] = r ^.. responseBody . key "CreateRoleResponse"
. key "CreateRoleResult"
. key "Role"
. key "Arn" . _String
writeIORef iamTestState $ T.unpack arn
putRolePolicy :: String -> String -> Options -> IO ()
putRolePolicy prefix region baseopts = do
let opts = baseopts
& param "Action" .~ ["PutRolePolicy"]
& param "Version" .~ ["2010-05-08"]
& param "RoleName" .~ [T.pack $ prefix ++ roleName]
& param "PolicyName" .~ [testPolicyName]
& param "PolicyDocument" .~ [policyDoc]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "putRolePolicy 200" $ r ^. responseStatus . statusCode == 200
assertBool "putRolePolicy OK" $ r ^. responseStatus . statusMessage == "OK"
threadDelay $ 30*1000*1000 -- 30 sleep, allow change to propagate to region
deleteRolePolicy :: String -> String -> Options -> IO ()
deleteRolePolicy prefix region baseopts = do
let opts = baseopts
& param "Action" .~ ["DeleteRolePolicy"]
& param "Version" .~ ["2010-05-08"]
& param "RoleName" .~ [T.pack $ prefix ++ roleName]
& param "PolicyName" .~ [testPolicyName]
& param "PolicyDocument" .~ [policyDoc]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "deleteRolePolicy 200" $ r ^. responseStatus . statusCode == 200
assertBool "deleteRolePolicy OK" $ r ^. responseStatus . statusMessage == "OK"
deleteRole :: String -> String -> Options -> IO ()
deleteRole prefix region baseopts = do
let opts = baseopts
& param "Action" .~ ["DeleteRole"]
& param "Version" .~ ["2010-05-08"]
& param "RoleName" .~ [T.pack $ prefix ++ roleName]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "deleteRole 200" $ r ^. responseStatus . statusCode == 200
assertBool "deleteRole OK" $ r ^. responseStatus . statusMessage == "OK"
listRoles :: String -> String -> Options -> IO ()
listRoles prefix region baseopts = do
let opts = baseopts
& param "Action" .~ ["ListRoles"]
& param "Version" .~ ["2010-05-08"]
& header "Accept" .~ ["application/json"]
r <- getWith opts (iamUrl region)
assertBool "listRoles 200" $ r ^. responseStatus . statusCode == 200
assertBool "listRoles OK" $ r ^. responseStatus . statusMessage == "OK"
let arns = r ^.. responseBody . key "ListRolesResponse" .
key "ListRolesResult" .
key "Roles" .
values .
key "Arn" . _String
-- arns are of form: "arn:aws:iam::<acct>:role/ec2-role"
let arns' = map (T.unpack . last . T.split (=='/')) arns
assertBool "listRoles contains test role" $
elem (prefix ++ roleName) arns'
-- Security Token Service (STS)
data Cred = Cred {
accessKeyId :: T.Text,
secretAccessKey :: T.Text,
sessionToken :: T.Text,
expiration :: Int -- Unix epoch
} deriving (Generic, Show, Eq)
instance A.FromJSON Cred where
parseJSON = DAT.genericParseJSON $ DAT.defaultOptions {
DAT.fieldLabelModifier = \(h:t) -> toUpper h:t
}
stsAssumeRole :: String -> String -> Options -> IORef String -> IO ()
stsAssumeRole prefix region baseopts iamTestState = do
arn <- readIORef iamTestState
let opts = baseopts
& param "Action" .~ ["AssumeRole"]
& param "Version" .~ ["2011-06-15"]
& param "RoleArn" .~ [T.pack arn]
& param "ExternalId" .~ [externalId]
& param "RoleSessionName" .~ ["Bob"]
& header "Accept" .~ ["application/json"]
r <- getWith opts (stsUrl region) -- STS call (part of IAM service family)
let v = r ^? responseBody
. key "AssumeRoleResponse"
. key "AssumeRoleResult"
. key "Credentials"
. _Value
assertBool "stsAssumeRole 200" $ r ^. responseStatus . statusCode == 200
assertBool "stsAssumeRole OK" $ r ^. responseStatus . statusMessage == "OK"
-- Now, use the temporary credentials to call an AWS service
let cred = conv v :: Cred
let key' = encodeUtf8 $ accessKeyId cred
let secret' = encodeUtf8 $ secretAccessKey cred
let token' = encodeUtf8 $ sessionToken cred
let baseopts2 = defaults
& auth ?~ awsSessionTokenAuth AWSv4 key' secret' token'
let opts2 = baseopts2
& param "Action" .~ ["ListRoles"]
& param "Version" .~ ["2010-05-08"]
& header "Accept" .~ ["application/json"]
r2 <- getWith opts2 (iamUrl region)
assertBool "listRoles 200" $ r2 ^. responseStatus . statusCode == 200
assertBool "listRoles OK" $ r2 ^. responseStatus . statusMessage == "OK"
let arns = r2 ^.. responseBody . key "ListRolesResponse" .
key "ListRolesResult" .
key "Roles" .
values .
key "Arn" . _String
-- arns are of form: "arn:aws:iam::<acct>:role/ec2-role"
let arns' = map (T.unpack . last . T.split (=='/')) arns
assertBool "listRoles contains test role" $
elem (prefix ++ roleName) arns'
where
conv :: DAT.FromJSON a => Maybe DAT.Value -> a
conv v = case v of
Nothing -> error "1"
Just x ->
case A.fromJSON x of
A.Success r ->
r
A.Error e ->
error $ show e
iamUrl :: String -> String
iamUrl _ =
"https://iam.amazonaws.com/" -- IAM is not region specific
stsUrl :: String -> String
stsUrl _region =
"https://sts.amazonaws.com/" -- keep from needing to enable STS in regions
-- To test region specific behavior, uncomment the line below
-- "https://sts." ++ _region ++ ".amazonaws.com/" -- region specific
-- Note: to access AWS STS in any region other than us-east-1, or the default
-- region (sts.amazonaws.com), STS needs to be enabled in the
-- AWS Management Console under
-- Account Settings > Security Token Service Region
-- If you forget, the AssumeRole call will return a 403 error with:
-- "STS is not activated in this region for account:<acct>.
-- Your account administrator can activate STS in this region using
-- the IAM Console."
roleName :: String
roleName = "test"
testPolicyName :: T.Text
testPolicyName = "testPolicy"
-- Note that ExternalId is a concept used for cross account use cases
-- with 3rd parties. But the check works for same-account as well, which
-- makes it more convenient to test.
-- For more, see:
-- http://docs.aws.amazon.com/STS/latest/UsingSTS/sts-delegating-externalid.html
externalId :: T.Text
externalId = "someExternalId"
rolePolicyDoc :: T.Text
rolePolicyDoc = LT.toStrict . E.decodeUtf8 . encode $
object [
"Version" .= "2012-10-17",
"Statement" .= [
object [
"Effect" .= "Allow",
"Action" .= "sts:AssumeRole",
"Principal" .= object ["AWS" .= "*"],
"Condition" .= object ["StringEquals" .=
object ["sts:ExternalId" .= string externalId]]
]
]
]
policyDoc :: T.Text
policyDoc = LT.toStrict . E.decodeUtf8 . encode $
object [
"Version" .= "2012-10-17",
"Statement" .= [
object [
"Effect" .= "Allow",
"Action" .= ["*"],
"Resource" .= ["*"]
]
]
]
|
bos/wreq
|
tests/AWS/IAM.hs
|
bsd-3-clause
| 10,107 | 0 | 22 | 2,743 | 2,667 | 1,341 | 1,326 | -1 | -1 |
--------------------------------------------------------------------------------
module WhatMorphism.TemplateHaskell
( deriveFoldBuildFusion
, deriveFold
, deriveBuild
, deriveFusion
) where
--------------------------------------------------------------------------------
import Control.Monad (forM, mapM)
import Data.Char (toLower)
import Language.Haskell.TH
--------------------------------------------------------------------------------
deriveFoldBuildFusion :: Name -> String -> String -> Q [Dec]
deriveFoldBuildFusion typeName foldName buildName = do
fold <- deriveFold typeName foldName
build <- deriveBuild typeName buildName
fusion <- deriveFusion typeName foldName buildName
return $ concat [fold, build, fusion]
--------------------------------------------------------------------------------
deriveFold :: Name -> String -> Q [Dec]
deriveFold typeName foldName = do
info <- reify typeName
case info of
TyConI (DataD _ctx name bndrs cs _derives) ->
mkFold foldName name bndrs cs
_ -> fail $
"WhatMorphism.TemplateHaskell.deriveFold: " ++
"can only derive simple data declarations"
--------------------------------------------------------------------------------
mkFold :: String -> Name -> [TyVarBndr] -> [Con] -> Q [Dec]
mkFold foldName typeName typeBndrs cons = do
a <- newName "a" -- Result type of the fold
consFs <- forM cons $ \c -> do
f <- newName "f"
tys <- forM (conTypes c) $ \t -> do
y <- newName "y"
return (y, t)
return $ (c, f, tys)
go <- newName (foldName ++ "_go") -- Worker
return
[ SigD foldName' $ ForallT (typeBndrs ++ [PlainTV a]) [] $
mkFunTy
([mkFunTy
[ if isRecursive t then (VarT a) else t
| t <- conTypes con
]
(VarT a)
| con <- cons
] ++ [typ])
(VarT a)
, FunD foldName'
[ Clause
([VarP f | (_, f, _) <- consFs])
(NormalB (VarE go))
[ FunD go
[ Clause
[ConP (conName c) (map VarP $ map fst ys)]
(NormalB $ mkAppE (VarE f)
[ if isRecursive t
then AppE (VarE go) (VarE y)
else VarE y
| (y, t) <- ys]
)
[]
| (c, f, ys) <- consFs
]
]
]
, inlineFromPhase foldName' 0
-- , PragmaD (InlineP foldName' NoInline FunLike AllPhases)
]
where
foldName' = mkName foldName
typ = mkAppTy typeName typeBndrs
isRecursive t = typ == t
--------------------------------------------------------------------------------
deriveBuild :: Name -> String -> Q [Dec]
deriveBuild typeName buildName = do
info <- reify typeName
case info of
TyConI (DataD _ctx name bndrs cs _derives) ->
mkBuild buildName name bndrs cs
_ -> fail $
"WhatMorphism.TemplateHaskell.deriveBuild: " ++
"can only derive simple data declarations"
--------------------------------------------------------------------------------
mkBuild :: String -> Name -> [TyVarBndr] -> [Con] -> Q [Dec]
mkBuild buildName typeName typeBndrs cons = do
b <- newName "b" -- Internal return type
g <- newName "g" -- Function given by the user
gTy <- mkGTy typeName typeBndrs cons
return
[ SigD buildName' $ ForallT (typeBndrs) [] $
mkFunTy [gTy] typ
, FunD buildName'
[ Clause
[VarP g]
(NormalB (mkAppE
(VarE g)
[ConE (conName con) | con <- cons]))
[]
]
, inlineFromPhase buildName' 0
-- , PragmaD (InlineP buildName' NoInline FunLike AllPhases)
]
where
buildName' = mkName buildName
typ = mkAppTy typeName typeBndrs
isRecursive t = typ == t
--------------------------------------------------------------------------------
deriveFusion :: Name -> String -> String -> Q [Dec]
deriveFusion typeName foldName buildName = do
info <- reify typeName
case info of
TyConI (DataD _ctx name bndrs cs _derives) ->
mkFusion foldName buildName name bndrs cs
_ -> fail $
"WhatMorphism.TemplateHaskell.deriveBuild: " ++
"can only derive simple data declarations"
--------------------------------------------------------------------------------
mkFusion :: String -> String -> Name -> [TyVarBndr] -> [Con] -> Q [Dec]
mkFusion foldName buildName typeName typeBndrs cons = do
cvars <- mapM (newName . map toLower . nameBase . conName) cons
g <- newName "g"
gTy <- mkGTy typeName typeBndrs cons
return
[ PragmaD $
RuleP
(foldName ++ "/" ++ buildName ++ "-fusion")
(map RuleVar cvars ++ [TypedRuleVar g gTy])
(mkAppE (VarE foldName')
(map VarE cvars ++ [AppE (VarE buildName') (VarE g)]))
(mkAppE (VarE g) (map VarE cvars))
AllPhases
]
where
foldName' = mkName foldName
buildName' = mkName buildName
--------------------------------------------------------------------------------
mkGTy :: Name -> [TyVarBndr] -> [Con] -> Q Type
mkGTy typeName typeBndrs cons = do
b <- newName "b"
return $ ForallT [PlainTV b] [] $ mkFunTy
[ mkFunTy
[ if isRecursive t then (VarT b) else t
| t <- conTypes con
]
(VarT b)
| con <- cons
]
(VarT b)
where
typ = mkAppTy typeName typeBndrs
isRecursive t = typ == t
--------------------------------------------------------------------------------
mkAppTy :: Name -> [TyVarBndr] -> Type
mkAppTy name = foldl (\t tv -> AppT t (VarT (getTv tv))) (ConT name)
where
getTv (PlainTV x) = x
getTv (KindedTV x _) = x
--------------------------------------------------------------------------------
mkAppE :: Exp -> [Exp] -> Exp
mkAppE f [] = f
mkAppE f (a : as) = mkAppE (AppE f a) as
--------------------------------------------------------------------------------
mkFunTy :: [Type] -> Type -> Type
mkFunTy (x : xs) y = AppT (AppT ArrowT x) (mkFunTy xs y)
mkFunTy [] y = y
--------------------------------------------------------------------------------
conName :: Con -> Name
conName (NormalC n _) = n
conName (RecC n _) = n
conName (InfixC _ n _) = n
conName (ForallC _ _ _) = error $
"WhatMorphism.TemplateHaskell.conTypes: " ++
"cannot yet define folds for forall'd types"
--------------------------------------------------------------------------------
conTypes :: Con -> [Type]
conTypes (NormalC _ ts) = [t | (_, t) <- ts]
conTypes (RecC _ ts) = [t | (_, _, t) <- ts]
conTypes (InfixC t1 _ t2) = map snd [t1, t2]
conTypes (ForallC _ _ _) = error $
"WhatMorphism.TemplateHaskell.conTypes: " ++
"cannot yet define folds for forall'd types"
--------------------------------------------------------------------------------
inlineFromPhase :: Name -> Int -> Dec
inlineFromPhase name n = PragmaD (InlineP name Inline FunLike (FromPhase n))
|
jaspervdj/what-morphism
|
src/WhatMorphism/TemplateHaskell.hs
|
bsd-3-clause
| 7,737 | 0 | 23 | 2,514 | 2,075 | 1,052 | 1,023 | 152 | 3 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Syntax.Pretty where
import qualified Data.Map as Map
import Data.String (IsString, fromString)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import qualified Data.Vector as Vector
import Syntax.Common
import Syntax.Decl
import Syntax.Internal
import Syntax.Subst
import Utils
data Doc = Atom Text | Group Int Doc | Doc :$$ Doc
| Doc :<%> Doc | Doc :<%%> Doc | Doc :<> Doc
instance IsString Doc where
fromString = Atom . fromString
instance Monoid Doc where
mempty = ""
mappend = (:<>)
type PrettyT a = a -> Doc
class Pretty a where
pretty :: PrettyT a
useParen :: a -> Bool
prettyParen :: Pretty a => a -> Doc
prettyParen x | useParen x = "(" <> pretty x <> ")"
| otherwise = pretty x
intersperse :: Doc -> [Doc] -> Doc
intersperse _ [] = Atom ""
intersperse _ [x] = x
intersperse sep (x : xs) = x :<> sep :<> intersperse sep xs
instance Pretty Int where
pretty = Atom . Text.pack . show
useParen _ = False
instance Pretty QName where
pretty (QName _xs name) = name' -- foldr (\x rs -> Atom x :<> "." :<> rs) name' xs
where name' = Atom name
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Definition where
pretty (Definition x) = pretty x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Variable where
pretty (Variable x) = Atom x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Binder where
pretty (Binder x) = Atom x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty NTVariable where
pretty (NTVariable x) = Atom x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty NTBinder where
pretty (NTBinder x) = Atom x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty PTVariable where
pretty (PTVariable x) = Atom x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Constructor where
pretty (Constructor x) = pretty x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty TConstructor where
pretty (TConstructor x) = pretty x
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Projection where
pretty (Projection p) = pretty p
useParen _ = False
{-# INLINE useParen #-}
instance Pretty a => Pretty (Vector a) where
pretty = foldr (:$$) "" . fmap pretty
useParen x = Vector.length x > 1
{-# INLINE useParen #-}
instance Pretty TLit where
pretty TInt = "Int"
pretty TString = "String"
useParen _ = False
{-# INLINE useParen #-}
instance Pretty PType where
pretty (PVar v) = pretty v
pretty (PLit l) = pretty l
pretty (PCon c as) = pretty c :<> args as
pretty (PCoProduct xs) = "[" :<%> intersperse " | " (fmap (\(c, x) -> pretty c :<%> ":" :<%> pretty x) (Map.toList xs)) :<%> "]"
pretty (PStruct xs) = "(" :<> intersperse ", " (Vector.toList $ fmap pretty xs) :<> ")"
pretty (Ptr n) = "Ptr" :<%> prettyParen n
useParen (PVar{}) = False
useParen (PLit l) = useParen l
useParen (PCon _ as) = not $ null as
useParen (PCoProduct{}) = False
useParen (PStruct{}) = False
useParen (Ptr{}) = True
{-# INLINE useParen #-}
instance Pretty Kind where
pretty (KFun p n) = pretty p :<%> "→" :<%> pretty n
pretty (KForall b n) = "forall" :<%> pretty b <> "." :<%> pretty n
pretty (KObject xs) = "<" :<%> intersperse " & " (fmap (\(p, x) -> pretty p :<%> "as" :<%> pretty x) (Map.toList xs)) :<%> ">"
pretty (KVar v) = pretty v
pretty (KUniverse) = "Type"
useParen (KFun _ _) = True
useParen (KForall _ _) = True
useParen (KObject _) = False
useParen (KVar v) = useParen v
useParen (KUniverse) = False
{-# INLINE useParen #-}
instance Pretty NType where
pretty (Fun p n) = pretty p :<%> "→" :<%> pretty n
pretty (Forall b n) = "forall" :<%> pretty b <> "." :<%> pretty n
pretty (NObject xs) = "<" :<%> intersperse " & " (fmap (\(p, x) -> pretty p :<%> "as" :<%> pretty x) (Map.toList xs)) :<%> ">"
pretty (NCon c ns) = pretty c :<> args ns
pretty (NVar v) = pretty v
pretty (Mon p) = "{" :<> pretty p :<> "}"
useParen (Fun _ _) = True
useParen (Forall _ _) = True
useParen (NObject _) = False
useParen (NCon _ as) = not $ null as
useParen (NVar v) = useParen v
useParen (Mon _) = False
{-# INLINE useParen #-}
instance Pretty CallFun where
pretty (CDef q) = pretty q
pretty (CVar x) = "*" :<> pretty x
useParen (CDef q) = useParen q
useParen (CVar x) = useParen x
{-# INLINE useParen #-}
instance Pretty Call where
pretty (Apply c as) = pretty c :<> args as
useParen (Apply _ as) = not $ null as
{-# INLINE useParen #-}
instance Pretty Literal where
pretty (LInt x) = pretty x
pretty (LStr x) = "\"" <> Atom x <> "\""
useParen _ = False
{-# INLINE useParen #-}
instance Pretty Val where
pretty (Var x) = pretty x
pretty (Lit l) = pretty l
pretty (Con c xs) = pretty c :<%> prettyParen xs
pretty (Struct xs) = "⦃" :<> intersperse ", " (Vector.toList $ fmap pretty xs) :<> "⦄"
pretty (Thunk ct) = "Thunk{" :<> pretty ct :<> "}"
pretty (ThunkVal ct) = "$" :<> prettyParen ct
useParen (Var x) = useParen x
useParen (Lit l) = useParen l
useParen (Con _ _) = True
useParen (Struct _) = False
useParen (Thunk _) = False
useParen (ThunkVal _) = False
{-# INLINE useParen #-}
instance Pretty Arg where
pretty (Push x) = pretty x
pretty (Proj p) = "." :<> prettyParen p
pretty (Type n) = "#" :<> prettyParen n
useParen (Push x) = useParen x
useParen (Proj p) = useParen p
useParen (Type n) = useParen n
{-# INLINE useParen #-}
{-
instance (Pretty nb, Pretty pf, Pretty nf, Pretty pb, Eq f, Convert b f,Pretty f) => Pretty (WhereClause pb pf nb nf b f) where
pretty (WhereClause _name decls) = "where" :$$ pretty decls
useParen _ = True -- ever used?
{-# INLINE useParen #-}
-}
-- we should have a way of only putting parentheses on things that need it
args :: (Traversable t, Pretty a) => t a -> Doc
args = foldr (\a as -> " " :<> prettyParen a :<> as) ""
instance Pretty Act where
pretty (PutStrLn s) = "PutStrLn" :<%> pretty s
pretty ReadLn = "ReadLn"
useParen (PutStrLn _) = True
useParen ReadLn = False
{-
instance (Pretty d, Pretty pf, Pretty nb, Pretty nf, Pretty f) => Pretty (RHS d pf nb nf b f) where
pretty (Call c) = pretty c
pretty (Return v) = pretty v
pretty (Act a) = pretty a
useParen (Call c) = useParen c
useParen (Return v) = useParen v
useParen (Act a) = useParen a
-}
data Equation mon
= Equation Call mon -- (CMonad d pf nb nf b f)
splitLines :: Vector Doc -> Doc
splitLines xs | null xs = ""
| length xs == 1 = Vector.head xs
| otherwise = foldr1 (:$$) xs
equations :: Definition -> Term mon -> Vector (Equation mon)
equations name = go Vector.empty
where
append xs y = xs <> Vector.singleton y
mkCa = Apply (CDef name)
prv = Push . Var . convert
-- goL xs (With v b t) = Vector.singleton $ EqWith (mkCa xs) v $ go (append xs $ prv b) t
goL xs (Derefence v t) = go (fmap (substValOne v (ThunkVal $ Var $ v)) xs) t
goL xs (Split x bs t) = go (fmap (substValOne x (Struct $ fmap (Var . convert) bs)) xs) t
goL xs (Case x bs) = bs >>= \ (Branch c t) ->
let xs' = fmap (substValOne x (Con c (Var x))) xs
in go xs' t
goR xs (Lam x t) = go (append xs $ prv x) t
goR xs (TLam x t) = go (append xs $ Type $ NVar $ convert x) t
goR xs (New bs) = bs >>= \ (CoBranch p t) -> go (append xs $ Proj p) t
-- go :: (Eq f, Convert b f) => Args d pf nb nf b f -> Term d pf nb nf b f -> Vector (Equation d pf nb nf b f)
go xs (RightTerm t) = goR xs t
go xs (LeftTerm t) = goL xs t
go xs (Do c) = Vector.singleton (Equation (mkCa xs) c)
instance Pretty CMonad where
pretty (Act a) = pretty a
pretty (TCall c) = pretty c
pretty (Return r) = "return" :<%> prettyParen r
pretty (Bind a b m) = pretty b :<%> "<-" :<%> pretty a :<> ";" :<%> pretty m
pretty (With c b m) = pretty b :<%> ":=" :<%> pretty c :<> ";" :<%> pretty m
useParen (Act a) = useParen a
useParen (TCall c) = useParen c
useParen (Return _) = True
useParen (Bind _ _ _) = True
useParen (With{}) = True
useParen (CLeftTerm{}) = True
instance (Pretty mon) => Pretty (Equation mon) where
pretty (Equation c r) = pretty c :<%> "= do {" :<%> pretty r :<%> "}"
{-
pretty (EqLet ca (v,p) eqs) = Group 2 $ pretty ca :<%> "let"
:<%> pretty v :<%> ":" :<%> pretty p
:$$ splitLines (fmap pretty eqs)
-}
useParen _ = True
instance Pretty Using where
pretty (Using vs) = "using" :<%> "(" :<> args vs :<> ")"
pretty (Except vs) | null vs = ""
| otherwise = "hiding" :<%> "(" :<> args vs :<> ")"
useParen _ = True
instance Pretty Atom where
pretty (AtomName n) = pretty n
pretty (AtomModule n) = "module" :<%> pretty n
useParen (AtomName n) = useParen n
useParen (AtomModule _) = True
instance Pretty Renaming where
pretty (Renaming xs)
| null xs = ""
| otherwise = "renaming" :<%>
"(" :<> foldr (\ a r -> " " :<> inner a :<> r) "" xs :<> ")"
where
inner (atm , n) = pretty atm :<%> "to" :<%> pretty n :<> ";"
useParen _ = True
instance Pretty ModuleOps where
pretty (ModuleOps use ren) = pretty use :<%%> pretty ren
useParen _ = True
instance Pretty Decl where
pretty (DData name ki ty) = Group 2 $ "data" :<%> pretty name :<%> ":" :<%> pretty ki :$$
pretty (equations (Definition name) ty)
pretty (CoData name ki ty) = Group 2 $ "codata" :<%> pretty name :<%> ":" :<%> pretty ki :$$
pretty (equations (Definition name) ty)
pretty (Module ns) = prettyNs "module" "" ns
pretty (Template ns) = prettyNs "template" "" ns
pretty (Specialise name temp tybinds _tele ren) =
"module" :<%> pretty name :<%> pretty temp :<> args tybinds :<%%> pretty ren
pretty (DDef name typ ter) =
pretty name :<%> ":" :<%> pretty typ :$$
pretty (equations (Definition name) ter)
useParen _ = True
{-# INLINE useParen #-}
prettyNs :: Text -> Doc -> PrettyT NameSpace
prettyNs modu pArgs (Namespace name decls) = Group 2 $
Atom modu :<%> pretty name :<> pArgs :<%> "where" :$$
pretty decls
instance Pretty Program where
pretty (Program ns) = prettyNs "module" "" ns
useParen _ = True
{-# INLINE useParen #-}
toText :: Int -> Doc -> Text
toText _ (Atom t) = t
toText i (Group i' d) = toText (i + i') d
toText i (d :$$ d') = toText i d <> "\n" <> Text.replicate i " " <> toText i d'
toText i (d :<%> d') = toText i d <> " " <> toText i d'
toText i (d :<%%> d') = let pre = toText i d
pos = toText i d'
in if " " `Text.isSuffixOf` pre
then pre <> pos
else pre <> " " <> pos
toText i (d :<> d') = toText i d <> toText i d'
pprint :: Pretty a => a -> Text
pprint = toText 0 . pretty
|
Danten/lejf
|
src/Syntax/Pretty.hs
|
bsd-3-clause
| 11,095 | 0 | 19 | 2,917 | 4,226 | 2,092 | 2,134 | 261 | 7 |
-----------------------------------------------------------------------------
--
-- Module : FNIStash.Logic.Env
-- Copyright : 2013 Daniel Austin
-- License : AllRightsReserved
--
-- Maintainer : [email protected]
-- Stability : Development
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
module FNIStash.Logic.Env
( buildEnv
, searchAncestryFor
, sharedStashPath
, Env (..)
, EffectKey (..)
) where
-- An ENV is the data environment that is passed around by the reader monad. It has all the reference
-- data we need to do computations like lookups.
-- FNIStash stuff
import FNIStash.File.PAK
import FNIStash.File.DAT
import FNIStash.File.Variables
import FNIStash.File.General
import FNIStash.File.Item (LocationBytes(..))
import FNIStash.Logic.Config
-- General stuff
import qualified Data.Text as T
import Data.Maybe
import Data.Configurator
import Data.Binary.Get
import Data.Word
import Data.Int
import qualified Data.Map as M
import qualified Data.List as L
import Database.HDBC.Sqlite3
import Data.Configurator.Types
import Data.Configurator
import System.FilePath.Windows
import qualified Filesystem.Path.CurrentOS as F
import Debug.Trace
-- Env is the lookup environment we pass around manually. (I suppose we could use a Reader monad
-- but I tried it out and found it to be more complicated than simple argument passing)
data Env = Env
{ lkupEffect :: EffectKey -> Maybe DATNode
, lkupAffix :: T.Text -> Maybe DATNode
, lkupSkill :: T.Text -> Maybe DATNode
, lkupMonster :: T.Text -> Maybe DATNode
, lkupLocNodes :: LocationBytes -> (DATNode, Maybe DATNode) -- location, containerID -> Container node, slot node
, lkupLocIDs :: String -> String -> (Maybe SlotID, Maybe ContainerID)
, lkupItemGUID :: GUID -> Maybe DATNode
, lkupTriggerable :: T.Text -> Maybe DATNode
, lkupStat :: T.Text -> Maybe DATNode
, lkupPath :: T.Text -> Maybe DATNode
, lkupGraph :: T.Text -> Float -> Float
, lkupSpawnClass :: T.Text -> Maybe DATNode -- map of spawnclass object's UNIT variable to the spawnclass itself
, lkupSet :: T.Text -> Maybe DATNode
, allItems :: DATFiles GUID -- map of GUID to item nodes
, dbConn :: Connection
, config :: Config
, dbPath :: F.FilePath
}
data EffectKey = EffectIndex
{ effectIndexVal :: Word32
}
| EffectName
{ effectName :: String
} deriving (Eq, Ord, Show)
-- build the lookup environment needed for app operations
buildEnv pak conn cfg dbPath =
let effects = effectLookup pak
skills = skillLookup pak
(bytesToNodesFxn, nodesToBytesFxn) = locLookup pak
(itemsGUID, allItemsMap) = itemLookupGUID pak
byPath = lookupPath pak
graph = graphLookup byPath
affixes = affixLookup pak
monsters = monsterLookup pak
trigs = triggerableLookup pak
stats = statLookup pak
spawn = spawnclassLookup pak
sets = setLookup pak
in Env effects affixes skills monsters bytesToNodesFxn
nodesToBytesFxn itemsGUID trigs stats byPath graph
spawn sets
allItemsMap conn
cfg
dbPath
sharedStashPath Env{..} = do
searchHead <- require config $ T.pack $ show SHAREDSTASHLOCATION
searchName <- require config $ T.pack $ show SHAREDSTASHFILE
rightFile <- simpleFind ((searchName ==) . takeFileName) searchHead
return (rightFile, searchName, searchHead)
-- Each of the functions below returns a lookup function. This is how we can keep the loaded PAK
-- handy for repeated lookups since we cannot have a global. The PAK stays on the stack.
itemLookupGUID pak =
let guidFinder = \x -> vUNIT_GUID x
dat = readDATFiles pak "MEDIA/UNITS/ITEMS" guidFinder -- p is pak
in (\idInt64 -> lkupDATFile dat idInt64, dat)
lookupPath pak =
let ffp p = T.replace "\\" "/" p -- fix file path
in \name -> lkupPAKFile (ffp name) pak >>= return . (runGetSuppress getDAT)
effectLookup pak =
\effID -> lkupPAKFile "MEDIA/EFFECTSLIST.DAT" pak >>=
return . (runGetSuppress getDAT) >>= case effID of
EffectIndex i -> subNodeAt i
EffectName n -> searchNodeTreeWith (\node ->
let mName = return node >>= vNAME
in case mName of
Nothing -> False
Just name -> n == T.unpack name)
-- Given a prefix path, makes a lookup table of pak files with NAME as lookup key
makeLookupByName path pak =
let nameFinder = \x -> vNAME x >>= return . T.toUpper
dat = readDATFiles pak path nameFinder
in (\name -> lkupDATFile dat $ T.toUpper name)
affixLookup = makeLookupByName "MEDIA/AFFIXES/ITEMS"
skillLookup = makeLookupByName "MEDIA/SKILLS/"
monsterLookup = makeLookupByName "MEDIA/UNITS/MONSTERS/PETS/"
setLookup = makeLookupByName "MEDIA/SETS/"
spawnclassLookup pak =
let dat = readDATFiles pak "MEDIA/SPAWNCLASSES" vNAME
collectTuples node@DATNode{..} = map (\sn -> (vUNIT sn, node)) $ datSubNodes
tuples = concatMap collectTuples $ M.elems dat
filteredTuples = mapMaybe (\(k, n) -> case k of
Just l -> Just (T.toUpper l, n)
_ -> Nothing) tuples
lkupMap = M.fromList filteredTuples
in (\unitName -> let k = M.lookup (T.toUpper unitName) lkupMap
in k)
priceIsRightSearch :: LocationBytes -> (DATNode -> SlotID) -> [DATNode] -> DATNode
--priceIsRightSearch realPrice [] = who knows
priceIsRightSearch (LocationBytes {..}) pricer (guess:guesses) =
let realPrice = lBytesSlotIndex
i = fromIntegral . slotIDVal
startDiff = realPrice - (i . pricer) guess
helper p f (diff, gBest) [] = gBest
helper p f (diff, gBest) (g:gs) =
if p-(f g) < diff && p-(f g) >= 0 then helper p f (p-(f g), g) gs else helper p f (diff, gBest) gs
in helper realPrice (i . pricer) (startDiff, guess) guesses
locLookup pak =
-- ok, this is a little messy because Runic made an update in early April 2013 that changed
-- how the slots are organized. Originally they were all separate nodes in a INVENTORYSLOTS.DAT
-- file, but now each slot is its own dat file. I did as little as I needed to adapt the old
-- algorithm to the new organizational scheme
let invenSlotFiles = M.filterWithKey (\k _ -> T.isInfixOf "MEDIA/INVENTORY" k && not (T.isInfixOf "MEDIA/INVENTORY/CONTAINERS" k)) pak
getName = vNAME
slotsDatFiles = readDATFiles invenSlotFiles "MEDIA/INVENTORY" getName
allSlotTypesList = M.elems slotsDatFiles
-- allSlotTypeList is a list of all Dat files for slots. SlotDatFiles is a map of slot name
-- to Dat file
-- containers is a map of container ID to DATNode for the container
containers = readDATFiles pak "MEDIA/INVENTORY/CONTAINERS" vContainerID
-- make a search function for finding the slot type with unique ID closest but no greater
-- than the locBytes Word16
getID = fromJust . vSlotID
winningSlot locBytes = priceIsRightSearch locBytes getID allSlotTypesList
-- Now to piece it all together
locBytesToSlotCont (locBytes@LocationBytes {..}) =
let cont = lkupDATFile containers $ ContainerID lBytesContainer
slot = winningSlot locBytes
in (slot, cont)
-- Now create the reverse lookup: Given container and slot name, get container ID and slot ID
slotNameToId :: String -> Maybe SlotID
slotNameToId name = M.lookup (T.pack name) slotsDatFiles >>= vSlotID
revContMap = M.fromList $ map (\(a,b) -> (fromJust $ getName b, a)) $ M.toList containers
slotContToLocBytesContID slotName contName = (slotNameToId slotName, M.lookup (T.pack contName) revContMap)
in (locBytesToSlotCont, slotContToLocBytesContID)
interp sortedPairs val
| (not . isJust) topEndFind = snd $ last sortedPairs
| (not . isJust) lowEndFind = snd $ head sortedPairs
| otherwise = interpVal
where
topEndFind = L.find (\(x,y) -> x > val) sortedPairs
lowEndFind = L.find (\(x,y) -> x <= val) $ reverse sortedPairs
pointA = fromJust lowEndFind
pointB = fromJust topEndFind
yOf = snd
xOf = fst
rise = yOf pointB - (yOf pointA)
run = xOf pointB - (xOf pointA)
interpVal = yOf pointA + (rise/run * (val - xOf pointA))
getPoints byPathFxn file =
let Just dat = byPathFxn file
pointNodes = datSubNodes dat
mkPair pointNode = (fromJust $ vX pointNode, fromJust $ vY pointNode)
points = map mkPair pointNodes
sortFxn (x,y) (x',y') = compare x x'
sortedPoints = L.sortBy sortFxn points
in sortedPoints
graphLookup byPathFxn =
(\graphFile value -> interp (getPoints byPathFxn graphFile) value)
triggerableLookup = makeLookupByName "MEDIA/TRIGGERABLES/"
statLookup = makeLookupByName "MEDIA/STATS/"
-- Recurses down from a Dat Node (usually an Item dat node) looking for a particular variable.
-- Looks deeper into each BASEFILE, if it exists, until it has to give up.
searchAncestryFor (env@Env{..}) findMeVar itemDat =
let foundVar = return itemDat >>= findMeVar
itemBase = return itemDat >>= vBASEFILE
in case foundVar of
Just var -> foundVar -- we found the data we want!
Nothing ->
itemBase >>= lkupPath >>= searchAncestryFor env findMeVar
|
fluffynukeit/FNIStash
|
src/FNIStash/Logic/Env.hs
|
bsd-3-clause
| 9,608 | 0 | 20 | 2,306 | 2,309 | 1,229 | 1,080 | 168 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Agon.Data.Instances where
import Agon.Data.Types
import Control.Applicative
import Control.Monad
import Data.Aeson
instance FromJSON UUIDs where
parseJSON (Object v) = UUIDs <$> v .: "uuids"
parseJSON _ = mzero
instance FromJSON CouchUpdateResult where
parseJSON (Object v) = CouchUpdateResult <$> v .: "rev"
parseJSON _ = mzero
instance FromJSON e => FromJSON (CouchList e) where
parseJSON (Object v) = CouchList <$> v .: "rows"
parseJSON _ = mzero
instance FromJSON e => FromJSON (CouchListItem e) where
parseJSON (Object v) = CouchListItem <$> v .: "value"
parseJSON _ = mzero
|
Feeniks/Agon
|
app/Agon/Data/Instances.hs
|
bsd-3-clause
| 661 | 0 | 8 | 130 | 207 | 107 | 100 | 18 | 0 |
{-# LANGUAGE ConstraintKinds #-}
{-# OPTIONS_GHC -Wall #-}
-- | Calculate incremental changes of data structures.
--
-- The 'Incremental' class provides a set of functions that work like the unix
-- utilities 'diff' and 'patch'. 'changes' generates an incremental diff between
-- two data values. The incremental diff can then be applied by the
-- 'applyChanges' function.
--
-- The primary intention of this library is to support efficient serialization.
-- As such, default 'Increment' types are automatically provided with
-- 'Beamable' instances.
--
-- > {-# LANGUAGE DeriveGenerics #-}
-- > {-# LANGUAGE ConstraintKinds #-}
-- > {-# LANGUAGE FlexibleContexts #-}
-- > {-# LANGUAGE UndecidableInstances #-}
-- >
-- > import GHC.Generics
-- > import Data.Beamable
-- > import Data.ByteString as B
-- >
-- > data Foo a b = Foo Int (Maybe a) b deriving (Generic, Eq, Show)
-- >
-- > -- If a 'Generic' instance is available, the default definition is
-- > -- fine, after adding some constraints.
-- > instance (IncrementalCnstr a, IncrementalCnstr b) => Incremental (Foo a b)
-- >
-- > -- generate some test data
-- > foo1 = Foo 1 Nothing "foo1"
-- > foo2 = Foo 1 (Just "foo2") "foo1"
-- >
-- > -- the 'changes' function calculates an incremental changeset from
-- > -- 'foo1' to 'foo2'
-- > diff = changes foo1 foo2
-- >
-- > -- 'applyChanges' applies the changes in an incremental patch to some data
-- > -- applyChanges foo1 diff == foo2
-- > -- True
-- >
-- > -- incremental changes can be smaller (sometimes significantly smaller)
-- > -- than the data source
-- > -- B.length $ encode diff
-- > -- 8
-- > -- B.length $ encode foo2
-- > -- 12
--
-- Incremental changes are not in general commutative or optional, and
-- it can be an error to apply a change to a data structure that doesn't match
-- the originating structure. For example:
--
-- > *Data.Increments> let diff = changes (Left 1) (Left 2 :: Either Int Char)
-- > *Data.Increments> applyChanges (Right 'a') diff
-- > *** Exception: Data.Increments: malformed Increment Rep
--
module Data.Increments (
Incremental (..)
, Changed (..)
, IncrementalCnstr
) where
import Data.Increments.Containers ()
import Data.Increments.Internal
|
JohnLato/increments
|
src/Data/Increments.hs
|
bsd-3-clause
| 2,207 | 0 | 5 | 395 | 95 | 82 | 13 | 8 | 0 |
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.System.Gcc
-- Copyright : (c) 2008 Benedikt Huber
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Invoking gcc for preprocessing and compiling.
-----------------------------------------------------------------------------
module Language.C.System.GCC (
GCC,newGCC,
)
where
import Language.C.Data.RList as RList
import Language.C.System.Preprocess
import Data.Maybe
import System.Process
import System.Directory
import Data.List
-- | @GCC@ represents a reference to the gcc compiler
newtype GCC = GCC { gccPath :: FilePath }
-- | create a reference to @gcc@
newGCC :: FilePath -> GCC
newGCC = GCC
instance Preprocessor GCC where
parseCPPArgs _ = gccParseCPPArgs
runCPP gcc cpp_args =
do -- copy the input to the outputfile, because in case the input is preprocessed,
-- gcc -E will do nothing.
maybe (return()) (copyWritable (inputFile cpp_args)) (outputFile cpp_args)
rawSystem (gccPath gcc) (buildCppArgs cpp_args)
where copyWritable source target = do copyFile source target
p <- getPermissions target
setPermissions target p{writable=True}
-- | Parse arguments for preprocessing via GCC.
-- At least one .c, .hc or .h file has to be present.
-- For now we only support the most important gcc options.
--
-- 1) Parse all flags relevant to CppArgs
-- 2) Move -c,-S,-M? to other_args
-- 3) Strip -E
-- 4) The rest goes into extra_args
gccParseCPPArgs :: [String] -> Either String (CppArgs, [String])
gccParseCPPArgs args =
case mungeArgs ((Nothing,Nothing,RList.empty),(RList.empty,RList.empty)) args of
Left err -> Left err
Right ((Nothing,_,_),_) -> Left "No .c / .hc / .h source file given"
Right ((Just input_file,output_file_opt,cpp_opts),(extra_args,other_args))
-> Right ((rawCppArgs (RList.reverse extra_args) input_file)
{ outputFile = output_file_opt, cppOptions = RList.reverse cpp_opts },
RList.reverse other_args)
where
mungeArgs :: ParseArgsState -> [String] -> Either String ParseArgsState
mungeArgs parsed@( cpp_args@(inp,out,cpp_opts),
unparsed@(extra,other))
unparsed_args =
case unparsed_args of
("-E":rest) -> mungeArgs parsed rest
(flag:rest) | flag == "-c"
|| flag == "-S"
|| "-M" `isPrefixOf` flag
-> mungeArgs (cpp_args,(extra,other `snoc` flag)) rest
("-o":file:rest) | isJust out -> Left "two output files given"
| otherwise -> mungeArgs ((inp,Just file,cpp_opts),unparsed) rest
(cpp_opt:rest) | Just (opt,rest') <- getArgOpt cpp_opt rest
-> mungeArgs ((inp,out,cpp_opts `snoc` opt),unparsed) rest'
(cfile:rest) | any (flip isSuffixOf cfile) (words ".c .hc .h")
-> if isJust inp
then Left "two input files given"
else mungeArgs ((Just cfile,out,cpp_opts),unparsed) rest
(unknown:rest) -> mungeArgs (cpp_args,(extra `snoc` unknown,other)) rest
[] -> Right parsed
getArgOpt cpp_opt rest | "-I" `isPrefixOf` cpp_opt = Just (IncludeDir (drop 2 cpp_opt),rest)
| "-U" `isPrefixOf` cpp_opt = Just (Undefine (drop 2 cpp_opt),rest)
| "-D" `isPrefixOf` cpp_opt = Just (getDefine (drop 2 cpp_opt),rest)
getArgOpt "-include" (f:rest') = Just (IncludeFile f, rest')
getArgOpt _ _ = Nothing
getDefine opt = let (key,val) = break (== '=') opt in Define key (if null val then "" else tail val)
type ParseArgsState = ((Maybe FilePath, Maybe FilePath, RList CppOption), (RList String, RList String))
buildCppArgs :: CppArgs -> [String]
buildCppArgs (CppArgs options extra_args _tmpdir input_file output_file_opt) = do
(concatMap tOption options)
++ outputFileOpt
++ ["-E", input_file]
++ extra_args
where
tOption (IncludeDir incl) = ["-I",incl]
tOption (Define key value) = [ "-D" ++ key ++ (if null value then "" else "=" ++ value) ]
tOption (Undefine key) = [ "-U" ++ key ]
tOption (IncludeFile f) = [ "-include", f]
outputFileOpt = concat [ ["-o",output_file] | output_file <- maybeToList output_file_opt ]
|
ian-ross/language-c
|
src/Language/C/System/GCC.hs
|
bsd-3-clause
| 4,824 | 0 | 17 | 1,430 | 1,267 | 688 | 579 | 68 | 13 |
{-# LANGUAGE CPP, FlexibleInstances, BangPatterns, ViewPatterns #-}
#if __GLASGOW_HASKELL__ >= 700
{-# OPTIONS -fllvm #-}
#endif
module Data.TrieMap.RadixTrie.Split () where
import Data.TrieMap.RadixTrie.Base
#define V(f) f (VVector) (k)
#define U(f) f (PVector) (Word)
#define LOC(args) !(locView -> Loc args)
#define DEEP(args) !(pView -> Deep args)
instance TrieKey k => Splittable (TrieMap (VVector k)) where
before (Hole LOC(_ _ path)) = Radix (beforeE path)
after (Hole LOC(ks ts path)) = case cEdge ks Nothing ts of
Nothing -> Radix (afterE path)
Just e -> Radix (Just (afterWithE e path))
beforeWith a (Hole LOC(ks _ path)) = Radix (Just (beforeWithE (singletonEdge ks a) path))
afterWith a (Hole LOC(ks ts path)) = Radix (Just (afterWithE (edge ks (Just a) ts) path))
instance Splittable (TrieMap (PVector Word)) where
before (WHole LOC(_ _ path)) = WRadix (beforeE path)
after (WHole LOC(ks ts path)) = case cEdge ks Nothing ts of
Nothing -> WRadix (afterE path)
Just e -> WRadix (Just (afterWithE e path))
beforeWith a (WHole LOC(ks _ path)) = WRadix (Just (beforeWithE (singletonEdge ks a) path))
afterWith a (WHole LOC(ks ts path)) = WRadix (Just (afterWithE (edge ks (Just a) ts) path))
{-# SPECIALIZE beforeE ::
(TrieKey k, Sized a) => V(Path) a -> V(MEdge) a,
Sized a => U(Path) a -> U(MEdge) a #-}
{-# SPECIALIZE afterE ::
(TrieKey k, Sized a) => V(Path) a -> V(MEdge) a,
Sized a => U(Path) a -> U(MEdge) a #-}
{-# SPECIALIZE beforeWithE ::
(TrieKey k, Sized a) => V(Edge) a -> V(Path) a -> V(Edge) a,
Sized a => U(Edge) a -> U(Path) a -> U(Edge) a #-}
{-# SPECIALIZE afterWithE ::
(TrieKey k, Sized a) => V(Edge) a -> V(Path) a -> V(Edge) a,
Sized a => U(Edge) a -> U(Path) a -> U(Edge) a #-}
beforeE, afterE :: (Sized a, Label v k) => Path v k a -> MEdge v k a
beforeWithE, afterWithE :: (Sized a, Label v k) => Edge v k a -> Path v k a -> Edge v k a
beforeE DEEP(path ks v tHole) = case cEdge ks v (before tHole) of
Nothing -> beforeE path
Just e -> Just $ beforeWithE e path
beforeE _ = Nothing
beforeWithE e DEEP(path ks v tHole)
= beforeWithE (edge ks v (beforeWith e tHole)) path
beforeWithE e _ = e
afterE DEEP(path ks _ tHole) = case cEdge ks Nothing (after tHole) of
Nothing -> afterE path
Just e -> Just $ afterWithE e path
afterE _ = Nothing
afterWithE e DEEP(path ks _ tHole)
= afterWithE (edge ks Nothing (afterWith e tHole)) path
afterWithE e _ = e
|
lowasser/TrieMap
|
Data/TrieMap/RadixTrie/Split.hs
|
bsd-3-clause
| 2,493 | 38 | 13 | 542 | 881 | 445 | 436 | -1 | -1 |
module Ariadne.Types where
import Data.Generics.Zipper
type Query h x = Zipper h -> Maybe x
type Move h = Zipper h -> Maybe (Zipper h)
|
apsk/ariadne
|
src/Ariadne/Types.hs
|
bsd-3-clause
| 147 | 0 | 8 | 34 | 53 | 30 | 23 | 4 | 0 |
{-# LANGUAGE PackageImports #-}
-- import Prelude hiding (take)
import Control.Applicative
import "monads-tf" Control.Monad.Trans
import Data.Pipe
import Data.Pipe.Lazy
import Data.Pipe.List
import Data.Pipe.ByteString hiding (toLazy)
import qualified Data.Pipe.ByteString as PBS
import Data.Time
import System.IO
import System.IO.Unsafe
import qualified Data.ByteString.Char8 as BSC
import qualified Data.ByteString.Lazy as LBS
timePipe :: Pipe () UTCTime IO ()
timePipe = lift getCurrentTime >>= yield >> timePipe
{-
take :: Monad m => Int -> Pipe a a m ()
take 0 = return ()
take n = await >>= maybe (return ()) ((>> take (n - 1)) . yield)
-}
hGetLines :: Handle -> IO [String]
hGetLines h = unsafeInterleaveIO $ (:) <$> hGetLine h <*> hGetLines h
hGetLines' :: Handle -> IO [String]
hGetLines' h = hGetLines h >>=
(toLazy :: Pipe () String IO () -> IO [String]) . fromList
bsHGetLine :: Handle -> IO LBS.ByteString
bsHGetLine = PBS.toLazy . (fromHandleLn :: Handle -> Pipe () BSC.ByteString IO ())
|
YoshikuniJujo/simple-pipe
|
try/testToLazy.hs
|
bsd-3-clause
| 1,012 | 2 | 10 | 165 | 280 | 159 | 121 | 22 | 1 |
import Control.Monad (replicateM)
import Data.Bits
import qualified Data.ByteString.Lazy as B
import Data.Char (ord)
import Data.List (nub, (\\))
import Data.Maybe
import Data.Word
import qualified Data.UUID as U
import qualified Data.UUID.V1 as U
import qualified Data.UUID.V3 as U3
import qualified Data.UUID.V5 as U5
import Test.HUnit
import Test.QuickCheck hiding ((.&.))
import System.IO
import System.Random
isValidVersion :: Int -> U.UUID -> Bool
isValidVersion v u = lenOK && variantOK && versionOK
where bs = U.toByteString u
lenOK = B.length bs == 16
variantOK = (B.index bs 8) .&. 0xc0 == 0x80
versionOK = (B.index bs 6) .&. 0xf0 == fromIntegral (v `shiftL` 4)
instance Arbitrary U.UUID where
-- the UUID random instance ignores bounds
arbitrary = choose (U.nil, U.nil)
-- instance Arbitrary Word8 where
-- arbitrary = (fromIntegral . fst . randomR (0,255::Int)) `fmap` rand
test_null :: Test
test_null = TestList [
"nil is null" ~: assertBool "" (U.null U.nil),
"namespaceDNS is not null" ~: assertBool "" (not $ U.null U3.namespaceDNS)
]
test_nil :: Test
test_nil = TestList [
"nil string" ~: U.toString U.nil @?= "00000000-0000-0000-0000-000000000000",
"nil bytes" ~: U.toByteString U.nil @?= B.pack (replicate 16 0)
]
test_conv :: Test
test_conv = TestList [
"conv bytes to string" ~:
maybe "" (U.toString) (U.fromByteString b16) @?= s16,
"conv string to bytes" ~:
maybe B.empty (U.toByteString) (U.fromString s16) @?= b16
]
where b16 = B.pack [1..16]
s16 = "01020304-0506-0708-090a-0b0c0d0e0f10"
test_v1 :: [Maybe U.UUID] -> Test
test_v1 v1s = TestList [
"V1 unique" ~: nub (v1s \\ nub v1s) @?= [],
"V1 not null" ~: TestList $ map (testUUID (not . U.null)) v1s,
"V1 valid" ~: TestList $ map (testUUID (isValidVersion 1)) v1s
]
where testUUID :: (U.UUID -> Bool) -> Maybe U.UUID -> Test
testUUID p u = maybe False p u ~? show u
test_v3 :: Test
test_v3 = TestList [
"V3 computation" ~:
U3.generateNamed U3.namespaceDNS name @?= uV3
]
where name = map (fromIntegral . ord) "www.widgets.com" :: [Word8]
uV3 = fromJust $ U.fromString "3d813cbb-47fb-32ba-91df-831e1593ac29"
test_v5 :: Test
test_v5 = TestList [
"V5 computation" ~:
U5.generateNamed U5.namespaceDNS name @?= uV5
]
where name = map (fromIntegral . ord) "www.widgets.com" :: [Word8]
uV5 = fromJust $ U.fromString "21f7f8de-8051-5b89-8680-0195ef798b6a"
prop_stringRoundTrip :: Property
prop_stringRoundTrip = label "String round trip" stringRoundTrip
where stringRoundTrip :: U.UUID -> Bool
stringRoundTrip u = maybe False (== u) $ U.fromString (U.toString u)
prop_byteStringRoundTrip :: Property
prop_byteStringRoundTrip = label "ByteString round trip" byteStringRoundTrip
where byteStringRoundTrip :: U.UUID -> Bool
byteStringRoundTrip u = maybe False (== u)
$ U.fromByteString (U.toByteString u)
prop_stringLength :: Property
prop_stringLength = label "String length" stringLength
where stringLength :: U.UUID -> Bool
stringLength u = length (U.toString u) == 36
prop_byteStringLength :: Property
prop_byteStringLength = label "ByteString length" byteStringLength
where byteStringLength :: U.UUID -> Bool
byteStringLength u = B.length (U.toByteString u) == 16
prop_randomsDiffer :: Property
prop_randomsDiffer = label "Randoms differ" randomsDiffer
where randomsDiffer :: (U.UUID, U.UUID) -> Bool
randomsDiffer (u1, u2) = u1 /= u2
prop_randomNotNull :: Property
prop_randomNotNull = label "Random not null" randomNotNull
where randomNotNull :: U.UUID -> Bool
randomNotNull = not. U.null
prop_randomsValid :: Property
prop_randomsValid = label "Random valid" randomsValid
where randomsValid :: U.UUID -> Bool
randomsValid = isValidVersion 4
prop_v3NotNull :: Property
prop_v3NotNull = label "V3 not null" v3NotNull
where v3NotNull :: [Word8] -> Bool
v3NotNull = not . U.null . U3.generateNamed U3.namespaceDNS
prop_v3Valid :: Property
prop_v3Valid = label "V3 valid" v3Valid
where v3Valid :: [Word8] -> Bool
v3Valid = isValidVersion 3 . U3.generateNamed U3.namespaceDNS
prop_v5NotNull :: Property
prop_v5NotNull = label "V5 not null" v5NotNull
where v5NotNull :: [Word8] -> Bool
v5NotNull = not . U.null . U5.generateNamed U5.namespaceDNS
prop_v5Valid :: Property
prop_v5Valid = label "V5 valid" v5Valid
where v5Valid :: [Word8] -> Bool
v5Valid = isValidVersion 5 . U5.generateNamed U5.namespaceDNS
prop_readShowRoundTrip :: Property
prop_readShowRoundTrip = label "Read/Show round-trip" prop
where -- we're using 'Maybe UUID' to add a bit of
-- real-world complexity.
prop :: U.UUID -> Bool
prop uuid = read (show (Just uuid)) == Just uuid
main :: IO ()
main = do
v1s <- replicateM 100 U.nextUUID
runTestText (putTextToHandle stderr False) (TestList [
test_null,
test_nil,
test_conv,
test_v1 v1s,
test_v3,
test_v5
])
mapM_ quickCheck $ [
prop_stringRoundTrip,
prop_readShowRoundTrip,
prop_byteStringRoundTrip,
prop_stringLength,
prop_byteStringLength,
prop_randomsDiffer,
prop_randomNotNull,
prop_randomsValid,
prop_v3NotNull,
prop_v3Valid,
prop_v5NotNull,
prop_v5Valid
]
|
alphaHeavy/uuid
|
tests/TestUUID.hs
|
bsd-3-clause
| 5,592 | 0 | 13 | 1,333 | 1,536 | 822 | 714 | 130 | 1 |
area :: Double -> Double
area x = c + s
where
c = (x / 2) ^ 2 * pi / 2
s = x ^ 2
|
YoshikuniJujo/funpaala
|
samples/12_syntaxes/where.hs
|
bsd-3-clause
| 84 | 0 | 11 | 31 | 57 | 30 | 27 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Snap.Utilities.Configuration.Types (
ConfigPair
) where
import qualified Data.Configurator.Types as CT
import Data.Text (Text)
------------------------------------------------------------------------------
-- | Look up a value.
type ConfigPair = (Text, CT.Value)
|
anchor/snap-configuration-utilities
|
lib/Snap/Utilities/Configuration/Types.hs
|
bsd-3-clause
| 309 | 2 | 6 | 34 | 51 | 35 | 16 | 6 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
--
-- This is basically a semaphore. I made this because 'sem_wait(3)'
-- terminates the application without throwing an exception and there
-- is no chance to clean up resources. There is a version also
-- implemented using real semaphores included in
-- Development.Redo.TokenSem, which works well for redo using delegate
-- processes not threads.
--
-----------------------------------------------------------------------------
module Development.Redo.TokenServer (createProcessorTokens,
destroyProcessorTokens,
acquireProcessorToken,
releaseProcessorToken,
withProcessorToken,
withoutProcessorToken
) where
import qualified Development.Redo.Config as C
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.State.Lazy
import qualified Data.PQueue.Prio.Max as Q
import Network.Socket
import System.Directory
import System.FilePath.Posix
import System.IO.Unsafe
import System.Posix.Process
import System.Posix.Types
createProcessorTokens :: Int -> IO ()
createProcessorTokens n = when (C.parallelBuild > 1) $ do
_ <- forkOS $ server n
return ()
{-# NOINLINE clientSocket #-}
clientSocket :: Socket
clientSocket = unsafePerformIO $ do
s <- socket AF_UNIX Datagram defaultProtocol
bind s clientAddr
return s
sendToServer :: String -> IO ()
sendToServer msg = do
_ <- sendTo clientSocket msg serverAddr
return ()
recvFromServer :: String -> IO String
recvFromServer msg = do
_ <- sendTo clientSocket msg serverAddr
(msg', _, _) <- recvFrom clientSocket 10
return msg'
destroyProcessorTokens :: IO ()
destroyProcessorTokens = when (C.parallelBuild > 1) $ do
sendToServer "shutdown"
takeMVar mvar
removeDirectoryRecursive socketDirPath `catch` (\(_::IOException) -> return ())
acquireProcessorToken :: IO ()
acquireProcessorToken = when (C.parallelBuild > 1) $ do
msg <- recvFromServer ('w':show C.callDepth) -- send wait using call depth as a priority.
assert (msg == "ack") $ return ()
releaseProcessorToken :: IO ()
releaseProcessorToken = when (C.parallelBuild > 1) $ sendToServer "post"
withProcessorToken :: IO a -> IO a
withProcessorToken = bracket_ acquireProcessorToken releaseProcessorToken
withoutProcessorToken :: IO a -> IO a
withoutProcessorToken = bracket_ releaseProcessorToken acquireProcessorToken
{-# NOINLINE processID #-}
processID :: ProcessID
processID = unsafePerformIO getProcessID
-- | This is the directory where temporary files are created.
{-# NOINLINE socketDirPath #-}
socketDirPath :: FilePath
socketDirPath = unsafePerformIO $ do
let p = C.tempDirPath </> "uds"
createDirectoryIfMissing True p
return p
serverName :: String
serverName = socketDirPath </> "token_" ++ C.sessionID
clientName :: String
clientName = serverName ++ "_" ++ show processID
serverAddr :: SockAddr
serverAddr = SockAddrUnix serverName
clientAddr :: SockAddr
clientAddr = SockAddrUnix clientName
{-# NOINLINE mvar #-}
mvar :: MVar ()
mvar = unsafePerformIO newEmptyMVar
server :: Int -> IO ()
server value = bracket enter exit $ \s -> do
(_, (v, _)) <- runStateT (go s) (value, Q.empty)
liftIO . C.printDebug $ "final tokens = " ++ show v
return ()
where
enter = do
s <- liftIO $ socket AF_UNIX Datagram defaultProtocol
liftIO $ bind s serverAddr
liftIO $ C.printDebug "token server start"
return s
exit s = do
liftIO $ close s
liftIO $ C.printDebug "token server stop"
putMVar mvar ()
go s = do
(msg, _, client) <- liftIO $ recvFrom s 128 `onException` C.printDebug "error on recvFrom"
case msg of
('w':priority) -> do
v <- countTokens
if v == 0
then enqueueRequest (read priority, client)
else do _ <- liftIO $ ack s client
decreaseToken
go s
"post" -> do
addr' <- dequeueRequest
case addr' of
Nothing -> increaseToken
Just addr -> do
when C.debugMode $ do
increaseToken
decreaseToken
_ <- liftIO $ ack s addr
return ()
go s
_ -> get >>= mapM_ (ack s) . Q.elems . snd
ack s client = liftIO $ sendTo s "ack" client `catch` \(_::IOException) -> return (-1)
type ClientQueue = Q.MaxPQueue Int SockAddr
countTokens :: StateT (Int, ClientQueue) IO Int
countTokens = fst <$> get
decreaseToken :: StateT (Int, ClientQueue) IO ()
decreaseToken = do
(v, q) <- get
put (v - 1, q)
liftIO . C.printDebug $ "tokens = " ++ show (v - 1) ++ "(-)"
increaseToken :: StateT (Int, ClientQueue) IO ()
increaseToken = do
(v, q) <- get
put (v + 1, q)
liftIO . C.printDebug $ "tokens = " ++ show (v + 1) ++ "(+)"
enqueueRequest :: (Int, SockAddr) -> StateT (Int, ClientQueue) IO ()
enqueueRequest (p, r) = do
(v, q) <- get
put (v, Q.insert p r q)
dequeueRequest :: StateT (Int, ClientQueue) IO (Maybe SockAddr)
dequeueRequest = do
(v, q) <- get
if Q.null q
then return Nothing
else do let (_, r) = Q.findMax q
put (v, Q.deleteMax q)
return $ Just r
|
comatose/redo
|
src/Development/Redo/TokenServer.hs
|
bsd-3-clause
| 5,462 | 0 | 21 | 1,325 | 1,591 | 810 | 781 | 137 | 5 |
module Karamaan.Opaleye.Applicative where
import Control.Applicative (Applicative, (<$>), (<*>), pure)
-- vv TODO: don't want to have to import all those explicitly. What to do?
import Data.Profunctor.Product.Flatten
-- vv and these
import Data.Profunctor.Product.Tuples
liftA0 :: Applicative f => r -> f r
liftA0 = pure
liftA1 :: Applicative f => (a1 -> r) -> (f a1 -> f r)
liftA1 f x1 = f <$> x1
liftA2 :: Applicative f => (a1 -> a2 -> r)
-> (f a1 -> f a2 -> f r)
liftA2 f x1 x2 = f <$> x1 <*> x2
liftA3 :: Applicative f => (a1 -> a2 -> a3 -> r)
-> (f a1 -> f a2 -> f a3 -> f r)
liftA3 f x1 x2 x3 = f <$> x1 <*> x2 <*> x3
liftA4 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f r)
liftA4 f x1 x2 x3 x4 = f <$> x1 <*> x2 <*> x3 <*> x4
liftA5 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f r)
liftA5 f x1 x2 x3 x4 x5 = f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5
liftA6 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f a6 -> f r)
liftA6 f x1 x2 x3 x4 x5 x6 = f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5 <*> x6
liftA7 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> a6
-> a7 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f a6
-> f a7 -> f r)
liftA7 f x1 x2 x3 x4 x5 x6 x7 = f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5 <*> x6
<*> x7
liftA8 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> a6
-> a7 -> a8 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f a6
-> f a7 -> f a8 -> f r)
liftA8 f x1 x2 x3 x4 x5 x6 x7 x8 = f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5 <*> x6
<*> x7 <*> x8
liftA9 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> a6
-> a7 -> a8 -> a9 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f a6
-> f a7 -> f a8 -> f a9 -> f r)
liftA9 f x1 x2 x3 x4 x5 x6 x7 x8 x9
= f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5 <*> x6
<*> x7 <*> x8 <*> x9
liftA10 :: Applicative f => (a1 -> a2 -> a3 -> a4 -> a5 -> a6
-> a7 -> a8 -> a9 -> a10 -> r)
-> (f a1 -> f a2 -> f a3 -> f a4 -> f a5 -> f a6
-> f a7 -> f a8 -> f a9 -> f a10 -> f r)
liftA10 f x1 x2 x3 x4 x5 x6 x7 x8 x9 x10
= f <$> x1 <*> x2 <*> x3 <*> x4 <*> x5 <*> x6
<*> x7 <*> x8 <*> x9 <*> x10
colspecApp :: Applicative f => (b -> a) -> (a -> b) -> f a -> f b
colspecApp _ = fmap
aT1 :: Applicative f => T1 (f a1) -> f (T1 a1)
aT1 = id
aT2 :: Applicative f => T2 (f a1) (f a2) -> f (T2 a1 a2)
aT2 (c, c') = (,) <$> c <*> c'
chain :: Applicative f => (t -> f b) -> (f a, t) -> f (a, b)
chain rest (a, as) = aT2 (a, rest as)
aT3 :: Applicative f => T3 (f a1) (f a2) (f a3) -> f (T3 a1 a2 a3)
aT3 = chain aT2
aT4 :: Applicative f => T4 (f a1) (f a2) (f a3) (f a4)
-> f (T4 a1 a2 a3 a4)
aT4 = chain aT3
aT5 :: Applicative f => T5 (f a1) (f a2) (f a3) (f a4) (f a5)
-> f (T5 a1 a2 a3 a4 a5)
aT5 = chain aT4
aT6 :: Applicative f => T6 (f a1) (f a2) (f a3) (f a4) (f a5)
(f a6)
-> f (T6 a1 a2 a3 a4 a5 a6)
aT6 = chain aT5
aT7 :: Applicative f => T7 (f a1) (f a2) (f a3) (f a4) (f a5)
(f a6) (f a7)
-> f (T7 a1 a2 a3 a4 a5 a6 a7)
aT7 = chain aT6
aT8 :: Applicative f => T8 (f a1) (f a2) (f a3) (f a4) (f a5)
(f a6) (f a7) (f a8)
-> f (T8 a1 a2 a3 a4 a5 a6 a7 a8)
aT8 = chain aT7
aT9 :: Applicative f => T9 (f a1) (f a2) (f a3) (f a4) (f a5)
(f a6) (f a7) (f a8) (f a9)
-> f (T9 a1 a2 a3 a4 a5 a6 a7 a8 a9)
aT9 = chain aT8
aT10 :: Applicative f => T10 (f a1) (f a2) (f a3) (f a4) (f a5)
(f a6) (f a7) (f a8) (f a9) (f a10)
-> f (T10 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
aT10 = chain aT9
convert :: Applicative f => (b -> a1) -> (a -> b1) -> (a1 -> b) -> (b1 -> f a1)
-> a -> f b
convert u u' f c = colspecApp u f . c . u'
a1 :: Applicative f => f a -> f a
a1 = convert unflatten1 unflatten1 flatten1 aT1
a2 :: Applicative f => (f a, f b) -> f (a, b)
a2 = convert unflatten2 unflatten2 flatten2 aT2
a3 :: Applicative f => (f a, f b, f a3) -> f (a, b, a3)
a3 = convert unflatten3 unflatten3 flatten3 aT3
a4 :: Applicative f => (f a, f b, f a3, f a4)
-> f (a, b, a3, a4)
a4 = convert unflatten4 unflatten4 flatten4 aT4
a5 :: Applicative f => (f a, f b, f a3, f a4, f a5)
-> f (a, b, a3, a4, a5)
a5 = convert unflatten5 unflatten5 flatten5 aT5
a6 :: Applicative f => (f a, f b, f a3, f a4, f a5, f a6)
-> f (a, b, a3, a4, a5, a6)
a6 = convert unflatten6 unflatten6 flatten6 aT6
a7 :: Applicative f => (f a, f b, f a3, f a4, f a5, f a6,
f a7)
-> f (a, b, a3, a4, a5, a6, a7)
a7 = convert unflatten7 unflatten7 flatten7 aT7
a8 :: Applicative f => (f a, f b, f a3, f a4, f a5, f a6,
f a7, f a8)
-> f (a, b, a3, a4, a5, a6, a7, a8)
a8 = convert unflatten8 unflatten8 flatten8 aT8
a9 :: Applicative f => (f a1, f a2, f a3, f a4, f a5,f a6,
f a7, f a8, f a9)
-> f (a1, a2, a3, a4, a5, a6, a7, a8, a9)
a9 = convert unflatten9 unflatten9 flatten9 aT9
a10 :: Applicative f => (f a1, f a2, f a3, f a4,f a5,f a6,
f a7, f a8, f a9, f a10)
-> f (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10)
a10 = convert unflatten10 unflatten10 flatten10 aT10
|
dbp/karamaan-opaleye
|
Karamaan/Opaleye/Applicative.hs
|
bsd-3-clause
| 5,491 | 0 | 18 | 1,901 | 3,259 | 1,633 | 1,626 | 119 | 1 |
{-- snippet chanExample --}
import Control.Concurrent
import Control.Concurrent.Chan
chanExample = do
ch <- newChan
forkIO $ do
writeChan ch "hello world"
writeChan ch "now i quit"
readChan ch >>= print
readChan ch >>= print
{-- /snippet chanExample --}
|
binesiyu/ifl
|
examples/ch24/Chan.hs
|
mit
| 271 | 0 | 10 | 55 | 69 | 32 | 37 | 9 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Haddock.Doc (
docAppend
, docParagraph
, combineDocumentation
) where
import Data.Maybe
import Data.Monoid
import Haddock.Types
import Data.Char (isSpace)
-- We put it here so that we can avoid a circular import
-- anything relevant imports this module anyway
instance Monoid (Doc id) where
mempty = DocEmpty
mappend = docAppend
combineDocumentation :: Documentation name -> Maybe (Doc name)
combineDocumentation (Documentation Nothing Nothing) = Nothing
combineDocumentation (Documentation mDoc mWarning) = Just (fromMaybe mempty mWarning `mappend` fromMaybe mempty mDoc)
docAppend :: Doc id -> Doc id -> Doc id
docAppend (DocDefList ds1) (DocDefList ds2) = DocDefList (ds1++ds2)
docAppend (DocDefList ds1) (DocAppend (DocDefList ds2) d) = DocAppend (DocDefList (ds1++ds2)) d
docAppend (DocOrderedList ds1) (DocOrderedList ds2) = DocOrderedList (ds1 ++ ds2)
docAppend (DocUnorderedList ds1) (DocUnorderedList ds2) = DocUnorderedList (ds1 ++ ds2)
docAppend DocEmpty d = d
docAppend d DocEmpty = d
docAppend (DocString s1) (DocString s2) = DocString (s1 ++ s2)
docAppend (DocAppend d (DocString s1)) (DocString s2) = DocAppend d (DocString (s1 ++ s2))
docAppend (DocString s1) (DocAppend (DocString s2) d) = DocAppend (DocString (s1 ++ s2)) d
docAppend d1 d2 = DocAppend d1 d2
-- again to make parsing easier - we spot a paragraph whose only item
-- is a DocMonospaced and make it into a DocCodeBlock
docParagraph :: Doc id -> Doc id
docParagraph (DocMonospaced p)
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocString s1) (DocMonospaced p))
| all isSpace s1
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocString s1)
(DocAppend (DocMonospaced p) (DocString s2)))
| all isSpace s1 && all isSpace s2
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocMonospaced p) (DocString s2))
| all isSpace s2
= DocCodeBlock (docCodeBlock p)
docParagraph p
= DocParagraph p
-- Drop trailing whitespace from @..@ code blocks. Otherwise this:
--
-- -- @
-- -- foo
-- -- @
--
-- turns into (DocCodeBlock "\nfoo\n ") which when rendered in HTML
-- gives an extra vertical space after the code block. The single space
-- on the final line seems to trigger the extra vertical space.
--
docCodeBlock :: Doc id -> Doc id
docCodeBlock (DocString s)
= DocString (reverse $ dropWhile (`elem` " \t") $ reverse s)
docCodeBlock (DocAppend l r)
= DocAppend l (docCodeBlock r)
docCodeBlock d = d
|
jwiegley/ghc-release
|
utils/haddock/src/Haddock/Doc.hs
|
gpl-3.0
| 2,496 | 0 | 11 | 421 | 791 | 399 | 392 | 47 | 1 |
{-# LANGUAGE PatternGuards, ViewPatterns #-}
{-
map f [] = []
map f (x:xs) = f x : map f xs
foldr f z [] = z
foldr f z (x:xs) = f x (foldr f z xs)
foldl f z [] = z
foldl f z (x:xs) = foldl f (f z x) xs
-}
{-
<TEST>
f (x:xs) = negate x + f xs ; f [] = 0 -- f xs = foldr ((+) . negate) 0 xs
f (x:xs) = x + 1 : f xs ; f [] = [] -- f xs = map (+ 1) xs
f z (x:xs) = f (z*x) xs ; f z [] = z -- f z xs = foldl (*) z xs
f a (x:xs) b = x + a + b : f a xs b ; f a [] b = [] -- f a xs b = map (\ x -> x + a + b) xs
f [] a = return a ; f (x:xs) a = a + x >>= \fax -> f xs fax -- f xs a = foldM (+) a xs
foos [] x = x; foos (y:ys) x = foo y $ foos ys x -- foos ys x = foldr foo x ys
f [] y = y; f (x:xs) y = f xs $ g x y -- f xs y = foldl (flip g) y xs
f [] y = y; f (x : xs) y = let z = g x y in f xs z -- f xs y = foldl (flip g) y xs
f [] y = y; f (x:xs) y = f xs (f xs z)
</TEST>
-}
module Hint.ListRec(listRecHint) where
import Hint.Type
import Util
import Hint.Util
import Data.List
import Data.Maybe
import Data.Ord
import Data.Either
import Control.Monad
listRecHint :: DeclHint
listRecHint _ _ = concatMap f . universe
where
f o = maybeToList $ do
let x = o
(x, addCase) <- findCase x
(use,severity,x) <- matchListRec x
let y = addCase x
guard $ recursiveStr `notElem` varss y
return $ idea severity ("Use " ++ use) o y
recursiveStr = "_recursive_"
recursive = toNamed recursiveStr
-- recursion parameters, nil-case, (x,xs,cons-case)
-- for cons-case delete any recursive calls with xs from them
-- any recursive calls are marked "_recursive_"
data ListCase = ListCase [String] Exp_ (String,String,Exp_)
deriving Show
data BList = BNil | BCons String String
deriving (Eq,Ord,Show)
-- function name, parameters, list-position, list-type, body (unmodified)
data Branch = Branch String [String] Int BList Exp_
deriving Show
---------------------------------------------------------------------
-- MATCH THE RECURSION
matchListRec :: ListCase -> Maybe (String,Severity,Exp_)
matchListRec o@(ListCase vs nil (x,xs,cons))
| [] <- vs, nil ~= "[]", InfixApp _ lhs c rhs <- cons, opExp c ~= ":"
, fromParen rhs =~= recursive, xs `notElem` vars lhs
= Just $ (,,) "map" Error $ appsBracket
[toNamed "map", niceLambda [x] lhs, toNamed xs]
| [] <- vs, App2 op lhs rhs <- view cons
, vars op `disjoint` [x,xs]
, fromParen rhs == recursive, xs `notElem` vars lhs
= Just $ (,,) "foldr" Warning $ appsBracket
[toNamed "foldr", niceLambda [x] $ appsBracket [op,lhs], nil, toNamed xs]
| [v] <- vs, view nil == Var_ v, App _ r lhs <- cons, r =~= recursive
, xs `notElem` vars lhs
= Just $ (,,) "foldl" Warning $ appsBracket
[toNamed "foldl", niceLambda [v,x] lhs, toNamed v, toNamed xs]
| [v] <- vs, App _ ret res <- nil, ret ~= "return", res ~= "()" || view res == Var_ v
, [Generator _ (view -> PVar_ b1) e, Qualifier _ (fromParen -> App _ r (view -> Var_ b2))] <- asDo cons
, b1 == b2, r == recursive, xs `notElem` vars e
, name <- "foldM" ++ ['_'|res ~= "()"]
= Just $ (,,) name Warning $ appsBracket
[toNamed name, niceLambda [v,x] e, toNamed v, toNamed xs]
| otherwise = Nothing
-- Very limited attempt to convert >>= to do, only useful for foldM/foldM_
asDo :: Exp_ -> [Stmt S]
asDo (view -> App2 bind lhs (Lambda _ [v] rhs)) = [Generator an v lhs, Qualifier an rhs]
asDo (Do _ x) = x
asDo x = [Qualifier an x]
---------------------------------------------------------------------
-- FIND THE CASE ANALYSIS
findCase :: Decl_ -> Maybe (ListCase, Exp_ -> Decl_)
findCase x = do
FunBind _ [x1,x2] <- return x
Branch name1 ps1 p1 c1 b1 <- findBranch x1
Branch name2 ps2 p2 c2 b2 <- findBranch x2
guard (name1 == name2 && ps1 == ps2 && p1 == p2)
[(BNil, b1), (BCons x xs, b2)] <- return $ sortBy (comparing fst) [(c1,b1), (c2,b2)]
b2 <- transformAppsM (delCons name1 p1 xs) b2
(ps,b2) <- return $ eliminateArgs ps1 b2
let ps12 = let (a,b) = splitAt p1 ps1 in map toNamed $ a ++ xs : b
return (ListCase ps b1 (x,xs,b2)
,\e -> FunBind an [Match an (toNamed name1) ps12 (UnGuardedRhs an e) Nothing])
delCons :: String -> Int -> String -> Exp_ -> Maybe Exp_
delCons func pos var (fromApps -> (view -> Var_ x):xs) | func == x = do
(pre, (view -> Var_ v):post) <- return $ splitAt pos xs
guard $ v == var
return $ apps $ recursive : pre ++ post
delCons _ _ _ x = return x
eliminateArgs :: [String] -> Exp_ -> ([String], Exp_)
eliminateArgs ps cons = (remove ps, transform f cons)
where
args = [zs | z:zs <- map fromApps $ universeApps cons, z =~= recursive]
elim = [all (\xs -> length xs > i && view (xs !! i) == Var_ p) args | (i,p) <- zip [0..] ps] ++ repeat False
remove = concat . zipWith (\b x -> [x | not b]) elim
f (fromApps -> x:xs) | x == recursive = apps $ x : remove xs
f x = x
---------------------------------------------------------------------
-- FIND A BRANCH
findBranch :: Match S -> Maybe Branch
findBranch x = do
Match _ name ps (UnGuardedRhs _ bod) Nothing <- return x
(a,b,c) <- findPat ps
return $ Branch (fromNamed name) a b c $ simplifyExp bod
findPat :: [Pat_] -> Maybe ([String], Int, BList)
findPat ps = do
ps <- mapM readPat ps
[i] <- return $ findIndices isRight_ ps
let (left,[right]) = partitionEithers ps
return (left, i, right)
readPat :: Pat_ -> Maybe (Either String BList)
readPat (view -> PVar_ x) = Just $ Left x
readPat (PParen _ (PInfixApp _ (view -> PVar_ x) (Special _ Cons{}) (view -> PVar_ xs))) = Just $ Right $ BCons x xs
readPat (PList _ []) = Just $ Right BNil
readPat _ = Nothing
|
bergmark/hlint
|
src/Hint/ListRec.hs
|
bsd-3-clause
| 5,799 | 9 | 17 | 1,503 | 2,095 | 1,076 | 1,019 | 94 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Nav_msgs.Path where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Data.Vector.Storable as V
import qualified Ros.Geometry_msgs.PoseStamped as PoseStamped
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data Path = Path { _header :: Header.Header
, _poses :: [PoseStamped.PoseStamped]
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''Path)
instance RosBinary Path where
put obj' = put (_header obj') *> putList (_poses obj')
get = Path <$> get <*> getList
putMsg = putStampedMsg
instance HasHeader Path where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo Path where
sourceMD5 _ = "6227e2b7e9cce15051f669a5e197bbf7"
msgTypeName _ = "nav_msgs/Path"
instance D.Default Path
|
acowley/roshask
|
msgs/Nav_msgs/Ros/Nav_msgs/Path.hs
|
bsd-3-clause
| 1,378 | 1 | 10 | 228 | 376 | 224 | 152 | 36 | 0 |
{-# LANGUAGE UndecidableInstances #-}
module Compiler.Generics where
import Data.Foldable
import Data.Monoid
newtype FixA a f = In { out :: a f (FixA a f) }
newtype Id f a = Id { unId :: f a }
deriving Eq
instance Eq (a f (FixA a f)) => Eq (FixA a f) where
In x == In y = x == y
type Fix f = FixA Id f
foldA :: Functor f => (FixA a f -> f (FixA a f)) -> (f c -> c) -> FixA a f -> c
foldA un f = f . fmap (foldA un f) . un
foldId :: Functor f => (f c -> c) -> FixA Id f -> c
foldId = foldA (unId . out)
reduce :: (Monoid a, Foldable f) => (f (Fix f) -> a) -> Fix f -> a
reduce f = foldMap (\x -> f (unId (out x)) `mappend` reduce f x) . unId . out
fixpoint :: Eq a => (a -> a) -> a -> a
fixpoint f a = let fa = f a in if a == fa then a else fixpoint f fa
|
tomlokhorst/AwesomePrelude
|
src/Compiler/Generics.hs
|
bsd-3-clause
| 768 | 0 | 15 | 207 | 447 | 231 | 216 | -1 | -1 |
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Test.StateMachine.Types.GenSym
-- Copyright : (C) 2018, HERE Europe B.V.
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Stevan Andjelkovic <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-----------------------------------------------------------------------------
module Test.StateMachine.Types.GenSym
( GenSym
, runGenSym
, genSym
, Counter
, newCounter
)
where
import Control.Monad.State
(State, get, put, runState)
import Data.Typeable
(Typeable)
import Prelude
import Test.StateMachine.Types.References
------------------------------------------------------------------------
newtype GenSym a = GenSym (State Counter a)
deriving newtype (Functor, Applicative, Monad)
runGenSym :: GenSym a -> Counter -> (a, Counter)
runGenSym (GenSym m) = runState m
genSym :: Typeable a => GenSym (Reference a Symbolic)
genSym = GenSym $ do
Counter i <- get
put (Counter (i + 1))
return (Reference (Symbolic (Var i)))
newtype Counter = Counter Int
deriving stock Show
newCounter :: Counter
newCounter = Counter 0
|
advancedtelematic/quickcheck-state-machine-model
|
src/Test/StateMachine/Types/GenSym.hs
|
bsd-3-clause
| 1,399 | 0 | 14 | 290 | 263 | 151 | 112 | 27 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.LocalBuildInfo
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Once a package has been configured we have resolved conditionals and
-- dependencies, configured the compiler and other needed external programs.
-- The 'LocalBuildInfo' is used to hold all this information. It holds the
-- install dirs, the compiler, the exact package dependencies, the configured
-- programs, the package database to use and a bunch of miscellaneous configure
-- flags. It gets saved and reloaded from a file (@dist\/setup-config@). It gets
-- passed in to very many subsequent build actions.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.LocalBuildInfo (
LocalBuildInfo(..),
externalPackageDeps,
inplacePackageId,
-- * Buildable package components
Component(..),
ComponentName(..),
showComponentName,
ComponentLocalBuildInfo(..),
LibraryName(..),
foldComponent,
componentName,
componentBuildInfo,
componentEnabled,
componentDisabledReason,
ComponentDisabledReason(..),
pkgComponents,
pkgEnabledComponents,
lookupComponent,
getComponent,
getComponentLocalBuildInfo,
allComponentsInBuildOrder,
componentsInBuildOrder,
checkComponentsCyclic,
withAllComponentsInBuildOrder,
withComponentsInBuildOrder,
withComponentsLBI,
withLibLBI,
withExeLBI,
withTestLBI,
-- * Installation directories
module Distribution.Simple.InstallDirs,
absoluteInstallDirs, prefixRelativeInstallDirs,
substPathTemplate
) where
import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs,
prefixRelativeInstallDirs,
substPathTemplate, )
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.Program (ProgramConfiguration)
import Distribution.PackageDescription
( PackageDescription(..), withLib, Library(libBuildInfo), withExe
, Executable(exeName, buildInfo), withTest, TestSuite(..)
, BuildInfo(buildable), Benchmark(..) )
import Distribution.Package
( PackageId, Package(..), InstalledPackageId(..) )
import Distribution.Simple.Compiler
( Compiler(..), PackageDBStack, OptimisationLevel )
import Distribution.Simple.PackageIndex
( PackageIndex )
import Distribution.Simple.Setup
( ConfigFlags )
import Distribution.Text
( display )
import Distribution.System
( Platform )
import Data.List (nub, find)
import Data.Graph
import Data.Tree (flatten)
import Data.Array ((!))
import Data.Maybe
-- | Data cached after configuration step. See also
-- 'Distribution.Simple.Setup.ConfigFlags'.
data LocalBuildInfo = LocalBuildInfo {
configFlags :: ConfigFlags,
-- ^ Options passed to the configuration step.
-- Needed to re-run configuration when .cabal is out of date
extraConfigArgs :: [String],
-- ^ Extra args on the command line for the configuration step.
-- Needed to re-run configuration when .cabal is out of date
installDirTemplates :: InstallDirTemplates,
-- ^ The installation directories for the various differnt
-- kinds of files
--TODO: inplaceDirTemplates :: InstallDirs FilePath
compiler :: Compiler,
-- ^ The compiler we're building with
hostPlatform :: Platform,
-- ^ The platform we're building for
buildDir :: FilePath,
-- ^ Where to build the package.
--TODO: eliminate hugs's scratchDir, use builddir
scratchDir :: FilePath,
-- ^ Where to put the result of the Hugs build.
componentsConfigs :: [(ComponentName, ComponentLocalBuildInfo, [ComponentName])],
-- ^ All the components to build, ordered by topological sort, and with their dependencies
-- over the intrapackage dependency graph
installedPkgs :: PackageIndex,
-- ^ All the info about the installed packages that the
-- current package depends on (directly or indirectly).
pkgDescrFile :: Maybe FilePath,
-- ^ the filename containing the .cabal file, if available
localPkgDescr :: PackageDescription,
-- ^ The resolved package description, that does not contain
-- any conditionals.
withPrograms :: ProgramConfiguration, -- ^Location and args for all programs
withPackageDB :: PackageDBStack, -- ^What package database to use, global\/user
withVanillaLib:: Bool, -- ^Whether to build normal libs.
withProfLib :: Bool, -- ^Whether to build profiling versions of libs.
withSharedLib :: Bool, -- ^Whether to build shared versions of libs.
withDynExe :: Bool, -- ^Whether to link executables dynamically
withProfExe :: Bool, -- ^Whether to build executables for profiling.
withOptimization :: OptimisationLevel, -- ^Whether to build with optimization (if available).
withGHCiLib :: Bool, -- ^Whether to build libs suitable for use with GHCi.
splitObjs :: Bool, -- ^Use -split-objs with GHC, if available
stripExes :: Bool, -- ^Whether to strip executables during install
progPrefix :: PathTemplate, -- ^Prefix to be prepended to installed executables
progSuffix :: PathTemplate -- ^Suffix to be appended to installed executables
} deriving (Read, Show)
-- | External package dependencies for the package as a whole. This is the
-- union of the individual 'componentPackageDeps', less any internal deps.
externalPackageDeps :: LocalBuildInfo -> [(InstalledPackageId, PackageId)]
externalPackageDeps lbi =
-- TODO: what about non-buildable components?
nub [ (ipkgid, pkgid)
| (_,clbi,_) <- componentsConfigs lbi
, (ipkgid, pkgid) <- componentPackageDeps clbi
, not (internal pkgid) ]
where
-- True if this dependency is an internal one (depends on the library
-- defined in the same package).
internal pkgid = pkgid == packageId (localPkgDescr lbi)
-- | The installed package Id we use for local packages registered in the local
-- package db. This is what is used for intra-package deps between components.
--
inplacePackageId :: PackageId -> InstalledPackageId
inplacePackageId pkgid = InstalledPackageId (display pkgid ++ "-inplace")
-- -----------------------------------------------------------------------------
-- Buildable components
data Component = CLib Library
| CExe Executable
| CTest TestSuite
| CBench Benchmark
deriving (Show, Eq, Read)
data ComponentName = CLibName -- currently only a single lib
| CExeName String
| CTestName String
| CBenchName String
deriving (Show, Eq, Ord, Read)
showComponentName :: ComponentName -> String
showComponentName CLibName = "library"
showComponentName (CExeName name) = "executable '" ++ name ++ "'"
showComponentName (CTestName name) = "test suite '" ++ name ++ "'"
showComponentName (CBenchName name) = "benchmark '" ++ name ++ "'"
data ComponentLocalBuildInfo
= LibComponentLocalBuildInfo {
-- | Resolved internal and external package dependencies for this component.
-- The 'BuildInfo' specifies a set of build dependencies that must be
-- satisfied in terms of version ranges. This field fixes those dependencies
-- to the specific versions available on this machine for this compiler.
componentPackageDeps :: [(InstalledPackageId, PackageId)],
componentLibraries :: [LibraryName]
}
| ExeComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
| TestComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
| BenchComponentLocalBuildInfo {
componentPackageDeps :: [(InstalledPackageId, PackageId)]
}
deriving (Read, Show)
foldComponent :: (Library -> a)
-> (Executable -> a)
-> (TestSuite -> a)
-> (Benchmark -> a)
-> Component
-> a
foldComponent f _ _ _ (CLib lib) = f lib
foldComponent _ f _ _ (CExe exe) = f exe
foldComponent _ _ f _ (CTest tst) = f tst
foldComponent _ _ _ f (CBench bch) = f bch
data LibraryName = LibraryName String
deriving (Read, Show)
componentBuildInfo :: Component -> BuildInfo
componentBuildInfo =
foldComponent libBuildInfo buildInfo testBuildInfo benchmarkBuildInfo
componentName :: Component -> ComponentName
componentName =
foldComponent (const CLibName)
(CExeName . exeName)
(CTestName . testName)
(CBenchName . benchmarkName)
-- | All the components in the package (libs, exes, or test suites).
--
pkgComponents :: PackageDescription -> [Component]
pkgComponents pkg =
[ CLib lib | Just lib <- [library pkg] ]
++ [ CExe exe | exe <- executables pkg ]
++ [ CTest tst | tst <- testSuites pkg ]
++ [ CBench bm | bm <- benchmarks pkg ]
-- | All the components in the package that are buildable and enabled.
-- Thus this excludes non-buildable components and test suites or benchmarks
-- that have been disabled.
--
pkgEnabledComponents :: PackageDescription -> [Component]
pkgEnabledComponents = filter componentEnabled . pkgComponents
componentEnabled :: Component -> Bool
componentEnabled = isNothing . componentDisabledReason
data ComponentDisabledReason = DisabledComponent
| DisabledAllTests
| DisabledAllBenchmarks
componentDisabledReason :: Component -> Maybe ComponentDisabledReason
componentDisabledReason (CLib lib)
| not (buildable (libBuildInfo lib)) = Just DisabledComponent
componentDisabledReason (CExe exe)
| not (buildable (buildInfo exe)) = Just DisabledComponent
componentDisabledReason (CTest tst)
| not (buildable (testBuildInfo tst)) = Just DisabledComponent
| not (testEnabled tst) = Just DisabledAllTests
componentDisabledReason (CBench bm)
| not (buildable (benchmarkBuildInfo bm)) = Just DisabledComponent
| not (benchmarkEnabled bm) = Just DisabledAllBenchmarks
componentDisabledReason _ = Nothing
lookupComponent :: PackageDescription -> ComponentName -> Maybe Component
lookupComponent pkg CLibName =
fmap CLib $ library pkg
lookupComponent pkg (CExeName name) =
fmap CExe $ find ((name ==) . exeName) (executables pkg)
lookupComponent pkg (CTestName name) =
fmap CTest $ find ((name ==) . testName) (testSuites pkg)
lookupComponent pkg (CBenchName name) =
fmap CBench $ find ((name ==) . benchmarkName) (benchmarks pkg)
getComponent :: PackageDescription -> ComponentName -> Component
getComponent pkg cname =
case lookupComponent pkg cname of
Just cpnt -> cpnt
Nothing -> missingComponent
where
missingComponent =
error $ "internal error: the package description contains no "
++ "component corresponding to " ++ show cname
getComponentLocalBuildInfo :: LocalBuildInfo -> ComponentName -> ComponentLocalBuildInfo
getComponentLocalBuildInfo lbi cname =
case [ clbi
| (cname', clbi, _) <- componentsConfigs lbi
, cname == cname' ] of
[clbi] -> clbi
_ -> missingComponent
where
missingComponent =
error $ "internal error: there is no configuration data "
++ "for component " ++ show cname
-- |If the package description has a library section, call the given
-- function with the library build info as argument. Extended version of
-- 'withLib' that also gives corresponding build info.
withLibLBI :: PackageDescription -> LocalBuildInfo
-> (Library -> ComponentLocalBuildInfo -> IO ()) -> IO ()
withLibLBI pkg_descr lbi f =
withLib pkg_descr $ \lib ->
f lib (getComponentLocalBuildInfo lbi CLibName)
-- | Perform the action on each buildable 'Executable' in the package
-- description. Extended version of 'withExe' that also gives corresponding
-- build info.
withExeLBI :: PackageDescription -> LocalBuildInfo
-> (Executable -> ComponentLocalBuildInfo -> IO ()) -> IO ()
withExeLBI pkg_descr lbi f =
withExe pkg_descr $ \exe ->
f exe (getComponentLocalBuildInfo lbi (CExeName (exeName exe)))
withTestLBI :: PackageDescription -> LocalBuildInfo
-> (TestSuite -> ComponentLocalBuildInfo -> IO ()) -> IO ()
withTestLBI pkg_descr lbi f =
withTest pkg_descr $ \test ->
f test (getComponentLocalBuildInfo lbi (CTestName (testName test)))
{-# DEPRECATED withComponentsLBI "Use withAllComponentsInBuildOrder" #-}
withComponentsLBI :: PackageDescription -> LocalBuildInfo
-> (Component -> ComponentLocalBuildInfo -> IO ())
-> IO ()
withComponentsLBI = withAllComponentsInBuildOrder
-- | Perform the action on each buildable 'Library' or 'Executable' (Component)
-- in the PackageDescription, subject to the build order specified by the
-- 'compBuildOrder' field of the given 'LocalBuildInfo'
withAllComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo
-> (Component -> ComponentLocalBuildInfo -> IO ())
-> IO ()
withAllComponentsInBuildOrder pkg lbi f =
sequence_
[ f (getComponent pkg cname) clbi
| (cname, clbi) <- allComponentsInBuildOrder lbi ]
withComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo
-> [ComponentName]
-> (Component -> ComponentLocalBuildInfo -> IO ())
-> IO ()
withComponentsInBuildOrder pkg lbi cnames f =
sequence_
[ f (getComponent pkg cname') clbi
| (cname', clbi) <- componentsInBuildOrder lbi cnames ]
allComponentsInBuildOrder :: LocalBuildInfo
-> [(ComponentName, ComponentLocalBuildInfo)]
allComponentsInBuildOrder lbi =
componentsInBuildOrder lbi
[ cname | (cname, _, _) <- componentsConfigs lbi ]
componentsInBuildOrder :: LocalBuildInfo -> [ComponentName]
-> [(ComponentName, ComponentLocalBuildInfo)]
componentsInBuildOrder lbi cnames =
map ((\(clbi,cname,_) -> (cname,clbi)) . vertexToNode)
. postOrder graph
. map (\cname -> fromMaybe (noSuchComp cname) (keyToVertex cname))
$ cnames
where
(graph, vertexToNode, keyToVertex) =
graphFromEdges (map (\(a,b,c) -> (b,a,c)) (componentsConfigs lbi))
noSuchComp cname = error $ "internal error: componentsInBuildOrder: "
++ "no such component: " ++ show cname
postOrder :: Graph -> [Vertex] -> [Vertex]
postOrder g vs = postorderF (dfs g vs) []
postorderF :: Forest a -> [a] -> [a]
postorderF ts = foldr (.) id $ map postorderT ts
postorderT :: Tree a -> [a] -> [a]
postorderT (Node a ts) = postorderF ts . (a :)
checkComponentsCyclic :: Ord key => [(node, key, [key])]
-> Maybe [(node, key, [key])]
checkComponentsCyclic es =
let (graph, vertexToNode, _) = graphFromEdges es
cycles = [ flatten c | c <- scc graph, isCycle c ]
isCycle (Node v []) = selfCyclic v
isCycle _ = True
selfCyclic v = v `elem` graph ! v
in case cycles of
[] -> Nothing
(c:_) -> Just (map vertexToNode c)
-- -----------------------------------------------------------------------------
-- Wrappers for a couple functions from InstallDirs
-- |See 'InstallDirs.absoluteInstallDirs'
absoluteInstallDirs :: PackageDescription -> LocalBuildInfo -> CopyDest
-> InstallDirs FilePath
absoluteInstallDirs pkg lbi copydest =
InstallDirs.absoluteInstallDirs
(packageId pkg)
(compilerId (compiler lbi))
copydest
(hostPlatform lbi)
(installDirTemplates lbi)
-- |See 'InstallDirs.prefixRelativeInstallDirs'
prefixRelativeInstallDirs :: PackageId -> LocalBuildInfo
-> InstallDirs (Maybe FilePath)
prefixRelativeInstallDirs pkg_descr lbi =
InstallDirs.prefixRelativeInstallDirs
(packageId pkg_descr)
(compilerId (compiler lbi))
(hostPlatform lbi)
(installDirTemplates lbi)
substPathTemplate :: PackageId -> LocalBuildInfo
-> PathTemplate -> FilePath
substPathTemplate pkgid lbi = fromPathTemplate
. ( InstallDirs.substPathTemplate env )
where env = initialPathTemplateEnv
pkgid
(compilerId (compiler lbi))
(hostPlatform lbi)
|
jwiegley/ghc-release
|
libraries/Cabal/cabal/Distribution/Simple/LocalBuildInfo.hs
|
gpl-3.0
| 18,620 | 0 | 13 | 4,629 | 3,274 | 1,811 | 1,463 | 285 | 3 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1993-1998
TcRules: Typechecking transformation rules
-}
{-# LANGUAGE ViewPatterns #-}
module TcRules ( tcRules ) where
import HsSyn
import TcRnMonad
import TcSimplify
import TcMType
import TcType
import TcHsType
import TcExpr
import TcEnv
import TcEvidence
import TcUnify( buildImplicationFor )
import Type
import Id
import Var ( EvVar )
import Name
import BasicTypes ( RuleName )
import SrcLoc
import Outputable
import FastString
import Bag
import Data.List( partition )
{-
Note [Typechecking rules]
~~~~~~~~~~~~~~~~~~~~~~~~~
We *infer* the typ of the LHS, and use that type to *check* the type of
the RHS. That means that higher-rank rules work reasonably well. Here's
an example (test simplCore/should_compile/rule2.hs) produced by Roman:
foo :: (forall m. m a -> m b) -> m a -> m b
foo f = ...
bar :: (forall m. m a -> m a) -> m a -> m a
bar f = ...
{-# RULES "foo/bar" foo = bar #-}
He wanted the rule to typecheck.
-}
tcRules :: [LRuleDecls Name] -> TcM [LRuleDecls TcId]
tcRules decls = mapM (wrapLocM tcRuleDecls) decls
tcRuleDecls :: RuleDecls Name -> TcM (RuleDecls TcId)
tcRuleDecls (HsRules src decls)
= do { tc_decls <- mapM (wrapLocM tcRule) decls
; return (HsRules src tc_decls) }
tcRule :: RuleDecl Name -> TcM (RuleDecl TcId)
tcRule (HsRule name act hs_bndrs lhs fv_lhs rhs fv_rhs)
= addErrCtxt (ruleCtxt $ snd $ unLoc name) $
do { traceTc "---- Rule ------" (pprFullRuleName name)
-- Note [Typechecking rules]
; (vars, bndr_wanted) <- captureConstraints $
tcRuleBndrs hs_bndrs
-- bndr_wanted constraints can include wildcard hole
-- constraints, which we should not forget about.
-- It may mention the skolem type variables bound by
-- the RULE. c.f. Trac #10072
; let (id_bndrs, tv_bndrs) = partition isId vars
; (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty)
<- tcExtendTyVarEnv tv_bndrs $
tcExtendIdEnv id_bndrs $
do { -- See Note [Solve order for RULES]
((lhs', rule_ty), lhs_wanted) <- captureConstraints (tcInferRho lhs)
; (rhs', rhs_wanted) <- captureConstraints $
tcMonoExpr rhs (mkCheckExpType rule_ty)
; return (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty) }
; traceTc "tcRule 1" (vcat [ pprFullRuleName name
, ppr lhs_wanted
, ppr rhs_wanted ])
; let all_lhs_wanted = bndr_wanted `andWC` lhs_wanted
; lhs_evs <- simplifyRule (snd $ unLoc name)
all_lhs_wanted
rhs_wanted
-- Now figure out what to quantify over
-- c.f. TcSimplify.simplifyInfer
-- We quantify over any tyvars free in *either* the rule
-- *or* the bound variables. The latter is important. Consider
-- ss (x,(y,z)) = (x,z)
-- RULE: forall v. fst (ss v) = fst v
-- The type of the rhs of the rule is just a, but v::(a,(b,c))
--
-- We also need to get the completely-uconstrained tyvars of
-- the LHS, lest they otherwise get defaulted to Any; but we do that
-- during zonking (see TcHsSyn.zonkRule)
; let tpl_ids = lhs_evs ++ id_bndrs
forall_tkvs = splitDepVarsOfTypes $
rule_ty : map idType tpl_ids
; gbls <- tcGetGlobalTyCoVars -- Even though top level, there might be top-level
-- monomorphic bindings from the MR; test tc111
; qtkvs <- quantifyTyVars gbls forall_tkvs
; traceTc "tcRule" (vcat [ pprFullRuleName name
, ppr forall_tkvs
, ppr qtkvs
, ppr rule_ty
, vcat [ ppr id <+> dcolon <+> ppr (idType id) | id <- tpl_ids ]
])
-- Simplify the RHS constraints
; let skol_info = RuleSkol (snd $ unLoc name)
; (rhs_implic, rhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs rhs_wanted
-- For the LHS constraints we must solve the remaining constraints
-- (a) so that we report insoluble ones
-- (b) so that we bind any soluble ones
; (lhs_implic, lhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs
(all_lhs_wanted { wc_simple = emptyBag })
-- simplifyRule consumed all simple
-- constraints
; emitImplications (lhs_implic `unionBags` rhs_implic)
; return (HsRule name act
(map (noLoc . RuleBndr . noLoc) (qtkvs ++ tpl_ids))
(mkHsDictLet lhs_binds lhs') fv_lhs
(mkHsDictLet rhs_binds rhs') fv_rhs) }
tcRuleBndrs :: [LRuleBndr Name] -> TcM [Var]
tcRuleBndrs []
= return []
tcRuleBndrs (L _ (RuleBndr (L _ name)) : rule_bndrs)
= do { ty <- newOpenFlexiTyVarTy
; vars <- tcRuleBndrs rule_bndrs
; return (mkLocalId name ty : vars) }
tcRuleBndrs (L _ (RuleBndrSig (L _ name) rn_ty) : rule_bndrs)
-- e.g x :: a->a
-- The tyvar 'a' is brought into scope first, just as if you'd written
-- a::*, x :: a->a
= do { let ctxt = RuleSigCtxt name
; (id_ty, tvs, _) <- tcHsPatSigType ctxt rn_ty
; let id = mkLocalIdOrCoVar name id_ty
-- See Note [Pattern signature binders] in TcHsType
-- The type variables scope over subsequent bindings; yuk
; vars <- tcExtendTyVarEnv tvs $
tcRuleBndrs rule_bndrs
; return (tvs ++ id : vars) }
ruleCtxt :: FastString -> SDoc
ruleCtxt name = text "When checking the transformation rule" <+>
doubleQuotes (ftext name)
{-
*********************************************************************************
* *
Constraint simplification for rules
* *
***********************************************************************************
Note [Simplifying RULE constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example. Consider the following left-hand side of a rule
f (x == y) (y > z) = ...
If we typecheck this expression we get constraints
d1 :: Ord a, d2 :: Eq a
We do NOT want to "simplify" to the LHS
forall x::a, y::a, z::a, d1::Ord a.
f ((==) (eqFromOrd d1) x y) ((>) d1 y z) = ...
Instead we want
forall x::a, y::a, z::a, d1::Ord a, d2::Eq a.
f ((==) d2 x y) ((>) d1 y z) = ...
Here is another example:
fromIntegral :: (Integral a, Num b) => a -> b
{-# RULES "foo" fromIntegral = id :: Int -> Int #-}
In the rule, a=b=Int, and Num Int is a superclass of Integral Int. But
we *dont* want to get
forall dIntegralInt.
fromIntegral Int Int dIntegralInt (scsel dIntegralInt) = id Int
because the scsel will mess up RULE matching. Instead we want
forall dIntegralInt, dNumInt.
fromIntegral Int Int dIntegralInt dNumInt = id Int
Even if we have
g (x == y) (y == z) = ..
where the two dictionaries are *identical*, we do NOT WANT
forall x::a, y::a, z::a, d1::Eq a
f ((==) d1 x y) ((>) d1 y z) = ...
because that will only match if the dict args are (visibly) equal.
Instead we want to quantify over the dictionaries separately.
In short, simplifyRuleLhs must *only* squash equalities, leaving
all dicts unchanged, with absolutely no sharing.
Also note that we can't solve the LHS constraints in isolation:
Example foo :: Ord a => a -> a
foo_spec :: Int -> Int
{-# RULE "foo" foo = foo_spec #-}
Here, it's the RHS that fixes the type variable
HOWEVER, under a nested implication things are different
Consider
f :: (forall a. Eq a => a->a) -> Bool -> ...
{-# RULES "foo" forall (v::forall b. Eq b => b->b).
f b True = ...
#-}
Here we *must* solve the wanted (Eq a) from the given (Eq a)
resulting from skolemising the agument type of g. So we
revert to SimplCheck when going under an implication.
------------------------ So the plan is this -----------------------
* Step 0: typecheck the LHS and RHS to get constraints from each
* Step 1: Simplify the LHS and RHS constraints all together in one bag
We do this to discover all unification equalities
* Step 2: Zonk the ORIGINAL (unsimplified) lhs constraints, to take
advantage of those unifications, and partition them into the
ones we will quantify over, and the others
See Note [RULE quantification over equalities]
* Step 3: Decide on the type variables to quantify over
* Step 4: Simplify the LHS and RHS constraints separately, using the
quantified constraints as givens
Note [Solve order for RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step 1 above, we need to be a bit careful about solve order.
Consider
f :: Int -> T Int
type instance T Int = Bool
RULE f 3 = True
From the RULE we get
lhs-constraints: T Int ~ alpha
rhs-constraints: Bool ~ alpha
where 'alpha' is the type that connects the two. If we glom them
all together, and solve the RHS constraint first, we might solve
with alpha := Bool. But then we'd end up with a RULE like
RULE: f 3 |> (co :: T Int ~ Booo) = True
which is terrible. We want
RULE: f 3 = True |> (sym co :: Bool ~ T Int)
So we are careful to solve the LHS constraints first, and *then* the
RHS constraints. Actually much of this is done by the on-the-fly
constraint solving, so the same order must be observed in
tcRule.
Note [RULE quantification over equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Deciding which equalities to quantify over is tricky:
* We do not want to quantify over insoluble equalities (Int ~ Bool)
(a) because we prefer to report a LHS type error
(b) because if such things end up in 'givens' we get a bogus
"inaccessible code" error
* But we do want to quantify over things like (a ~ F b), where
F is a type function.
The difficulty is that it's hard to tell what is insoluble!
So we see whether the simplification step yielded any type errors,
and if so refrain from quantifying over *any* equalities.
Note [Quantifying over coercion holes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Equality constraints from the LHS will emit coercion hole Wanteds.
These don't have a name, so we can't quantify over them directly.
Instead, because we really do want to quantify here, invent a new
EvVar for the coercion, fill the hole with the invented EvVar, and
then quantify over the EvVar. Not too tricky -- just some
impedence matching, really.
Note [Simplify *derived* constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
At this stage, we're simplifying constraints only for insolubility
and for unification. Note that all the evidence is quickly discarded.
We make this explicit by working over derived constraints, for which
there is no evidence. Using derived constraints also prevents solved
equalities from being written to coercion holes. If we don't do this,
then RHS coercion-hole constraints get filled in, only to get filled
in *again* when solving the implications emitted from tcRule. That's
terrible, so we avoid the problem by using derived constraints.
-}
simplifyRule :: RuleName
-> WantedConstraints -- Constraints from LHS
-> WantedConstraints -- Constraints from RHS
-> TcM [EvVar] -- LHS evidence variables,
-- See Note [Simplifying RULE constraints] in TcRule
-- NB: This consumes all simple constraints on the LHS, but not
-- any LHS implication constraints.
simplifyRule name lhs_wanted rhs_wanted
= do { -- We allow ourselves to unify environment
-- variables: runTcS runs with topTcLevel
; tc_lvl <- getTcLevel
; insoluble <- runTcSDeriveds $
do { -- First solve the LHS and *then* solve the RHS
-- See Note [Solve order for RULES]
-- See Note [Simplify *derived* constraints]
lhs_resid <- solveWanteds $ toDerivedWC lhs_wanted
; rhs_resid <- solveWanteds $ toDerivedWC rhs_wanted
; return ( insolubleWC tc_lvl lhs_resid ||
insolubleWC tc_lvl rhs_resid ) }
; zonked_lhs_simples <- zonkSimples (wc_simple lhs_wanted)
; ev_ids <- mapMaybeM (quantify_ct insoluble) $
bagToList zonked_lhs_simples
; traceTc "simplifyRule" $
vcat [ text "LHS of rule" <+> doubleQuotes (ftext name)
, text "lhs_wantd" <+> ppr lhs_wanted
, text "rhs_wantd" <+> ppr rhs_wanted
, text "zonked_lhs_simples" <+> ppr zonked_lhs_simples
, text "ev_ids" <+> ppr ev_ids
]
; return ev_ids }
where
quantify_ct insol -- Note [RULE quantification over equalities]
| insol = quantify_insol
| otherwise = quantify_normal
quantify_insol ct
| isEqPred (ctPred ct)
= return Nothing
| otherwise
= return $ Just $ ctEvId $ ctEvidence ct
quantify_normal (ctEvidence -> CtWanted { ctev_dest = dest
, ctev_pred = pred })
= case dest of -- See Note [Quantifying over coercion holes]
HoleDest hole
| EqPred NomEq t1 t2 <- classifyPredType pred
, t1 `tcEqType` t2
-> do { -- These are trivial. Don't quantify. But do fill in
-- the hole.
; fillCoercionHole hole (mkTcNomReflCo t1)
; return Nothing }
| otherwise
-> do { ev_id <- newEvVar pred
; fillCoercionHole hole (mkTcCoVarCo ev_id)
; return (Just ev_id) }
EvVarDest evar -> return (Just evar)
quantify_normal ct = pprPanic "simplifyRule.quantify_normal" (ppr ct)
|
oldmanmike/ghc
|
compiler/typecheck/TcRules.hs
|
bsd-3-clause
| 14,503 | 2 | 17 | 4,474 | 1,640 | 849 | 791 | 134 | 3 |
module TreeParser
( parseTree
)
where
import BasicPrelude ( Maybe(Just, Nothing)
, Either(..)
, error
, head
, ($)
)
import qualified Data.HashMap.Lazy as HM
import Text.InterpolatedString.Perl6 ( qq )
import ChapterFile ( ChapterMap
, parseChapter
)
import HtmlUtil ( Html )
import IndexFile ( parseTitlesAndChapters )
import Config
import Models.Chapter
import Models.Tree
parseTree :: Html -> ChapterMap -> Tree
parseTree indexFile chapterMap = Tree
{ chapter0 = parseChapterZero chapterMap
, titles = parseTitlesAndChapters indexFile (allButChapterZero chapterMap)
}
parseChapterZero :: ChapterMap -> Chapter
parseChapterZero chapterMap =
let path = chapterZeroPathname
in case HM.lookup path chapterMap of
Just html -> case parseChapter html of
Left message -> error [qq| Can't parse chap. zero: $message |]
Right aChapter -> aChapter
Nothing -> error
[qq| Chap. Zero $path not found in {head $ HM.keys chapterMap} |]
allButChapterZero :: ChapterMap -> ChapterMap
allButChapterZero = HM.delete chapterZeroPathname
|
dogweather/nevada-revised-statutes-parser
|
src/TreeParser.hs
|
bsd-3-clause
| 1,600 | 0 | 13 | 723 | 264 | 152 | 112 | -1 | -1 |
-- | Make sure this program runs without leaking memory
import FRP.Sodium
import Control.Applicative
import Control.Exception
import Control.Monad
import System.Timeout
verbose = False
main = do
(et, _) <- sync newEvent
(eChange, pushC) <- sync $ newEvent
out <- sync $ hold 0 eChange
kill <- sync $ listen (value out) $ \x ->
if verbose then print (x :: Int) else (evaluate x >> return ())
timeout 4000000 $ forM_ [0..] $ \i -> do
sync $ pushC i
kill
|
kevintvh/sodium
|
haskell/examples/tests/memory-test-5.hs
|
bsd-3-clause
| 495 | 0 | 14 | 124 | 185 | 94 | 91 | 15 | 2 |
-----------------------------------------------------------------------------
--
-- Object-file symbols (called CLabel for histerical raisins).
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module CLabel (
CLabel, -- abstract type
ForeignLabelSource(..),
pprDebugCLabel,
mkClosureLabel,
mkSRTLabel,
mkTopSRTLabel,
mkInfoTableLabel,
mkEntryLabel,
mkSlowEntryLabel,
mkConEntryLabel,
mkStaticConEntryLabel,
mkRednCountsLabel,
mkConInfoTableLabel,
mkStaticInfoTableLabel,
mkLargeSRTLabel,
mkApEntryLabel,
mkApInfoTableLabel,
mkClosureTableLabel,
mkLocalClosureLabel,
mkLocalInfoTableLabel,
mkLocalEntryLabel,
mkLocalConEntryLabel,
mkLocalStaticConEntryLabel,
mkLocalConInfoTableLabel,
mkLocalStaticInfoTableLabel,
mkLocalClosureTableLabel,
mkReturnPtLabel,
mkReturnInfoLabel,
mkAltLabel,
mkDefaultLabel,
mkBitmapLabel,
mkStringLitLabel,
mkAsmTempLabel,
mkPlainModuleInitLabel,
mkSplitMarkerLabel,
mkDirty_MUT_VAR_Label,
mkUpdInfoLabel,
mkBHUpdInfoLabel,
mkIndStaticInfoLabel,
mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel,
mkMAP_FROZEN0_infoLabel,
mkMAP_DIRTY_infoLabel,
mkSMAP_FROZEN_infoLabel,
mkSMAP_FROZEN0_infoLabel,
mkSMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel,
mkArrWords_infoLabel,
mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel,
mkCAFBlackHoleEntryLabel,
mkRtsPrimOpLabel,
mkRtsSlowFastTickyCtrLabel,
mkSelectorInfoLabel,
mkSelectorEntryLabel,
mkCmmInfoLabel,
mkCmmEntryLabel,
mkCmmRetInfoLabel,
mkCmmRetLabel,
mkCmmCodeLabel,
mkCmmDataLabel,
mkCmmClosureLabel,
mkRtsApFastLabel,
mkPrimCallLabel,
mkForeignLabel,
addLabelSize,
foreignLabelStdcallInfo,
mkCCLabel, mkCCSLabel,
DynamicLinkerLabelInfo(..),
mkDynamicLinkerLabel,
dynamicLinkerLabelInfo,
mkPicBaseLabel,
mkDeadStripPreventer,
mkHpcTicksLabel,
hasCAF,
needsCDecl, isAsmTemp, maybeAsmTemp, externallyVisibleCLabel,
isMathFun,
isCFunctionLabel, isGcPtrLabel, labelDynamic,
-- * Conversions
toClosureLbl, toSlowEntryLbl, toEntryLbl, toInfoLbl, toRednCountsLbl, hasHaskellName,
pprCLabel
) where
import IdInfo
import BasicTypes
import Packages
import Module
import Name
import Unique
import PrimOp
import Config
import CostCentre
import Outputable
import FastString
import DynFlags
import Platform
import UniqSet
-- -----------------------------------------------------------------------------
-- The CLabel type
{-
| CLabel is an abstract type that supports the following operations:
- Pretty printing
- In a C file, does it need to be declared before use? (i.e. is it
guaranteed to be already in scope in the places we need to refer to it?)
- If it needs to be declared, what type (code or data) should it be
declared to have?
- Is it visible outside this object file or not?
- Is it "dynamic" (see details below)
- Eq and Ord, so that we can make sets of CLabels (currently only
used in outputting C as far as I can tell, to avoid generating
more than one declaration for any given label).
- Converting an info table label into an entry label.
-}
data CLabel
= -- | A label related to the definition of a particular Id or Con in a .hs file.
IdLabel
Name
CafInfo
IdLabelInfo -- encodes the suffix of the label
-- | A label from a .cmm file that is not associated with a .hs level Id.
| CmmLabel
PackageId -- what package the label belongs to.
FastString -- identifier giving the prefix of the label
CmmLabelInfo -- encodes the suffix of the label
-- | A label with a baked-in \/ algorithmically generated name that definitely
-- comes from the RTS. The code for it must compile into libHSrts.a \/ libHSrts.so
-- If it doesn't have an algorithmically generated name then use a CmmLabel
-- instead and give it an appropriate PackageId argument.
| RtsLabel
RtsLabelInfo
-- | A 'C' (or otherwise foreign) label.
--
| ForeignLabel
FastString -- name of the imported label.
(Maybe Int) -- possible '@n' suffix for stdcall functions
-- When generating C, the '@n' suffix is omitted, but when
-- generating assembler we must add it to the label.
ForeignLabelSource -- what package the foreign label is in.
FunctionOrData
-- | A family of labels related to a particular case expression.
| CaseLabel
{-# UNPACK #-} !Unique -- Unique says which case expression
CaseLabelInfo
| AsmTempLabel
{-# UNPACK #-} !Unique
| StringLitLabel
{-# UNPACK #-} !Unique
| PlainModuleInitLabel -- without the version & way info
Module
| CC_Label CostCentre
| CCS_Label CostCentreStack
-- | These labels are generated and used inside the NCG only.
-- They are special variants of a label used for dynamic linking
-- see module PositionIndependentCode for details.
| DynamicLinkerLabel DynamicLinkerLabelInfo CLabel
-- | This label is generated and used inside the NCG only.
-- It is used as a base for PIC calculations on some platforms.
-- It takes the form of a local numeric assembler label '1'; and
-- is pretty-printed as 1b, referring to the previous definition
-- of 1: in the assembler source file.
| PicBaseLabel
-- | A label before an info table to prevent excessive dead-stripping on darwin
| DeadStripPreventer CLabel
-- | Per-module table of tick locations
| HpcTicksLabel Module
-- | Static reference table
| SRTLabel !Unique
-- | Label of an StgLargeSRT
| LargeSRTLabel
{-# UNPACK #-} !Unique
-- | A bitmap (function or case return)
| LargeBitmapLabel
{-# UNPACK #-} !Unique
deriving (Eq, Ord)
-- | Record where a foreign label is stored.
data ForeignLabelSource
-- | Label is in a named package
= ForeignLabelInPackage PackageId
-- | Label is in some external, system package that doesn't also
-- contain compiled Haskell code, and is not associated with any .hi files.
-- We don't have to worry about Haskell code being inlined from
-- external packages. It is safe to treat the RTS package as "external".
| ForeignLabelInExternalPackage
-- | Label is in the package currenly being compiled.
-- This is only used for creating hacky tmp labels during code generation.
-- Don't use it in any code that might be inlined across a package boundary
-- (ie, core code) else the information will be wrong relative to the
-- destination module.
| ForeignLabelInThisPackage
deriving (Eq, Ord)
-- | For debugging problems with the CLabel representation.
-- We can't make a Show instance for CLabel because lots of its components don't have instances.
-- The regular Outputable instance only shows the label name, and not its other info.
--
pprDebugCLabel :: CLabel -> SDoc
pprDebugCLabel lbl
= case lbl of
IdLabel{} -> ppr lbl <> (parens $ text "IdLabel")
CmmLabel pkg _name _info
-> ppr lbl <> (parens $ text "CmmLabel" <+> ppr pkg)
RtsLabel{} -> ppr lbl <> (parens $ text "RtsLabel")
ForeignLabel _name mSuffix src funOrData
-> ppr lbl <> (parens $ text "ForeignLabel"
<+> ppr mSuffix
<+> ppr src
<+> ppr funOrData)
_ -> ppr lbl <> (parens $ text "other CLabel)")
data IdLabelInfo
= Closure -- ^ Label for closure
| SRT -- ^ Static reference table (TODO: could be removed
-- with the old code generator, but might be needed
-- when we implement the New SRT Plan)
| InfoTable -- ^ Info tables for closures; always read-only
| Entry -- ^ Entry point
| Slow -- ^ Slow entry point
| LocalInfoTable -- ^ Like InfoTable but not externally visible
| LocalEntry -- ^ Like Entry but not externally visible
| RednCounts -- ^ Label of place to keep Ticky-ticky info for this Id
| ConEntry -- ^ Constructor entry point
| ConInfoTable -- ^ Corresponding info table
| StaticConEntry -- ^ Static constructor entry point
| StaticInfoTable -- ^ Corresponding info table
| ClosureTable -- ^ Table of closures for Enum tycons
deriving (Eq, Ord)
data CaseLabelInfo
= CaseReturnPt
| CaseReturnInfo
| CaseAlt ConTag
| CaseDefault
deriving (Eq, Ord)
data RtsLabelInfo
= RtsSelectorInfoTable Bool{-updatable-} Int{-offset-} -- ^ Selector thunks
| RtsSelectorEntry Bool{-updatable-} Int{-offset-}
| RtsApInfoTable Bool{-updatable-} Int{-arity-} -- ^ AP thunks
| RtsApEntry Bool{-updatable-} Int{-arity-}
| RtsPrimOp PrimOp
| RtsApFast FastString -- ^ _fast versions of generic apply
| RtsSlowFastTickyCtr String
deriving (Eq, Ord)
-- NOTE: Eq on LitString compares the pointer only, so this isn't
-- a real equality.
-- | What type of Cmm label we're dealing with.
-- Determines the suffix appended to the name when a CLabel.CmmLabel
-- is pretty printed.
data CmmLabelInfo
= CmmInfo -- ^ misc rts info tabless, suffix _info
| CmmEntry -- ^ misc rts entry points, suffix _entry
| CmmRetInfo -- ^ misc rts ret info tables, suffix _info
| CmmRet -- ^ misc rts return points, suffix _ret
| CmmData -- ^ misc rts data bits, eg CHARLIKE_closure
| CmmCode -- ^ misc rts code
| CmmClosure -- ^ closures eg CHARLIKE_closure
| CmmPrimCall -- ^ a prim call to some hand written Cmm code
deriving (Eq, Ord)
data DynamicLinkerLabelInfo
= CodeStub -- MachO: Lfoo$stub, ELF: foo@plt
| SymbolPtr -- MachO: Lfoo$non_lazy_ptr, Windows: __imp_foo
| GotSymbolPtr -- ELF: foo@got
| GotSymbolOffset -- ELF: foo@gotoff
deriving (Eq, Ord)
-- -----------------------------------------------------------------------------
-- Constructing CLabels
-- -----------------------------------------------------------------------------
-- Constructing IdLabels
-- These are always local:
mkSlowEntryLabel :: Name -> CafInfo -> CLabel
mkSlowEntryLabel name c = IdLabel name c Slow
mkTopSRTLabel :: Unique -> CLabel
mkTopSRTLabel u = SRTLabel u
mkSRTLabel :: Name -> CafInfo -> CLabel
mkRednCountsLabel :: Name -> CLabel
mkSRTLabel name c = IdLabel name c SRT
mkRednCountsLabel name =
IdLabel name NoCafRefs RednCounts -- Note [ticky for LNE]
-- These have local & (possibly) external variants:
mkLocalClosureLabel :: Name -> CafInfo -> CLabel
mkLocalInfoTableLabel :: Name -> CafInfo -> CLabel
mkLocalEntryLabel :: Name -> CafInfo -> CLabel
mkLocalClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalClosureLabel name c = IdLabel name c Closure
mkLocalInfoTableLabel name c = IdLabel name c LocalInfoTable
mkLocalEntryLabel name c = IdLabel name c LocalEntry
mkLocalClosureTableLabel name c = IdLabel name c ClosureTable
mkClosureLabel :: Name -> CafInfo -> CLabel
mkInfoTableLabel :: Name -> CafInfo -> CLabel
mkEntryLabel :: Name -> CafInfo -> CLabel
mkClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalConInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalConEntryLabel :: CafInfo -> Name -> CLabel
mkLocalStaticInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalStaticConEntryLabel :: CafInfo -> Name -> CLabel
mkConInfoTableLabel :: Name -> CafInfo -> CLabel
mkStaticInfoTableLabel :: Name -> CafInfo -> CLabel
mkClosureLabel name c = IdLabel name c Closure
mkInfoTableLabel name c = IdLabel name c InfoTable
mkEntryLabel name c = IdLabel name c Entry
mkClosureTableLabel name c = IdLabel name c ClosureTable
mkLocalConInfoTableLabel c con = IdLabel con c ConInfoTable
mkLocalConEntryLabel c con = IdLabel con c ConEntry
mkLocalStaticInfoTableLabel c con = IdLabel con c StaticInfoTable
mkLocalStaticConEntryLabel c con = IdLabel con c StaticConEntry
mkConInfoTableLabel name c = IdLabel name c ConInfoTable
mkStaticInfoTableLabel name c = IdLabel name c StaticInfoTable
mkConEntryLabel :: Name -> CafInfo -> CLabel
mkStaticConEntryLabel :: Name -> CafInfo -> CLabel
mkConEntryLabel name c = IdLabel name c ConEntry
mkStaticConEntryLabel name c = IdLabel name c StaticConEntry
-- Constructing Cmm Labels
mkDirty_MUT_VAR_Label, mkSplitMarkerLabel, mkUpdInfoLabel,
mkBHUpdInfoLabel, mkIndStaticInfoLabel, mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel, mkMAP_FROZEN0_infoLabel, mkMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel, mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel, mkCAFBlackHoleEntryLabel,
mkArrWords_infoLabel, mkSMAP_FROZEN_infoLabel, mkSMAP_FROZEN0_infoLabel,
mkSMAP_DIRTY_infoLabel :: CLabel
mkDirty_MUT_VAR_Label = mkForeignLabel (fsLit "dirty_MUT_VAR") Nothing ForeignLabelInExternalPackage IsFunction
mkSplitMarkerLabel = CmmLabel rtsPackageId (fsLit "__stg_split_marker") CmmCode
mkUpdInfoLabel = CmmLabel rtsPackageId (fsLit "stg_upd_frame") CmmInfo
mkBHUpdInfoLabel = CmmLabel rtsPackageId (fsLit "stg_bh_upd_frame" ) CmmInfo
mkIndStaticInfoLabel = CmmLabel rtsPackageId (fsLit "stg_IND_STATIC") CmmInfo
mkMainCapabilityLabel = CmmLabel rtsPackageId (fsLit "MainCapability") CmmData
mkMAP_FROZEN_infoLabel = CmmLabel rtsPackageId (fsLit "stg_MUT_ARR_PTRS_FROZEN") CmmInfo
mkMAP_FROZEN0_infoLabel = CmmLabel rtsPackageId (fsLit "stg_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkMAP_DIRTY_infoLabel = CmmLabel rtsPackageId (fsLit "stg_MUT_ARR_PTRS_DIRTY") CmmInfo
mkEMPTY_MVAR_infoLabel = CmmLabel rtsPackageId (fsLit "stg_EMPTY_MVAR") CmmInfo
mkTopTickyCtrLabel = CmmLabel rtsPackageId (fsLit "top_ct") CmmData
mkCAFBlackHoleInfoTableLabel = CmmLabel rtsPackageId (fsLit "stg_CAF_BLACKHOLE") CmmInfo
mkCAFBlackHoleEntryLabel = CmmLabel rtsPackageId (fsLit "stg_CAF_BLACKHOLE") CmmEntry
mkArrWords_infoLabel = CmmLabel rtsPackageId (fsLit "stg_ARR_WORDS") CmmInfo
mkSMAP_FROZEN_infoLabel = CmmLabel rtsPackageId (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN") CmmInfo
mkSMAP_FROZEN0_infoLabel = CmmLabel rtsPackageId (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkSMAP_DIRTY_infoLabel = CmmLabel rtsPackageId (fsLit "stg_SMALL_MUT_ARR_PTRS_DIRTY") CmmInfo
-----
mkCmmInfoLabel, mkCmmEntryLabel, mkCmmRetInfoLabel, mkCmmRetLabel,
mkCmmCodeLabel, mkCmmDataLabel, mkCmmClosureLabel
:: PackageId -> FastString -> CLabel
mkCmmInfoLabel pkg str = CmmLabel pkg str CmmInfo
mkCmmEntryLabel pkg str = CmmLabel pkg str CmmEntry
mkCmmRetInfoLabel pkg str = CmmLabel pkg str CmmRetInfo
mkCmmRetLabel pkg str = CmmLabel pkg str CmmRet
mkCmmCodeLabel pkg str = CmmLabel pkg str CmmCode
mkCmmDataLabel pkg str = CmmLabel pkg str CmmData
mkCmmClosureLabel pkg str = CmmLabel pkg str CmmClosure
-- Constructing RtsLabels
mkRtsPrimOpLabel :: PrimOp -> CLabel
mkRtsPrimOpLabel primop = RtsLabel (RtsPrimOp primop)
mkSelectorInfoLabel :: Bool -> Int -> CLabel
mkSelectorEntryLabel :: Bool -> Int -> CLabel
mkSelectorInfoLabel upd off = RtsLabel (RtsSelectorInfoTable upd off)
mkSelectorEntryLabel upd off = RtsLabel (RtsSelectorEntry upd off)
mkApInfoTableLabel :: Bool -> Int -> CLabel
mkApEntryLabel :: Bool -> Int -> CLabel
mkApInfoTableLabel upd off = RtsLabel (RtsApInfoTable upd off)
mkApEntryLabel upd off = RtsLabel (RtsApEntry upd off)
-- A call to some primitive hand written Cmm code
mkPrimCallLabel :: PrimCall -> CLabel
mkPrimCallLabel (PrimCall str pkg)
= CmmLabel pkg str CmmPrimCall
-- Constructing ForeignLabels
-- | Make a foreign label
mkForeignLabel
:: FastString -- name
-> Maybe Int -- size prefix
-> ForeignLabelSource -- what package it's in
-> FunctionOrData
-> CLabel
mkForeignLabel str mb_sz src fod
= ForeignLabel str mb_sz src fod
-- | Update the label size field in a ForeignLabel
addLabelSize :: CLabel -> Int -> CLabel
addLabelSize (ForeignLabel str _ src fod) sz
= ForeignLabel str (Just sz) src fod
addLabelSize label _
= label
-- | Get the label size field from a ForeignLabel
foreignLabelStdcallInfo :: CLabel -> Maybe Int
foreignLabelStdcallInfo (ForeignLabel _ info _ _) = info
foreignLabelStdcallInfo _lbl = Nothing
-- Constructing Large*Labels
mkLargeSRTLabel :: Unique -> CLabel
mkBitmapLabel :: Unique -> CLabel
mkLargeSRTLabel uniq = LargeSRTLabel uniq
mkBitmapLabel uniq = LargeBitmapLabel uniq
-- Constructin CaseLabels
mkReturnPtLabel :: Unique -> CLabel
mkReturnInfoLabel :: Unique -> CLabel
mkAltLabel :: Unique -> ConTag -> CLabel
mkDefaultLabel :: Unique -> CLabel
mkReturnPtLabel uniq = CaseLabel uniq CaseReturnPt
mkReturnInfoLabel uniq = CaseLabel uniq CaseReturnInfo
mkAltLabel uniq tag = CaseLabel uniq (CaseAlt tag)
mkDefaultLabel uniq = CaseLabel uniq CaseDefault
-- Constructing Cost Center Labels
mkCCLabel :: CostCentre -> CLabel
mkCCSLabel :: CostCentreStack -> CLabel
mkCCLabel cc = CC_Label cc
mkCCSLabel ccs = CCS_Label ccs
mkRtsApFastLabel :: FastString -> CLabel
mkRtsApFastLabel str = RtsLabel (RtsApFast str)
mkRtsSlowFastTickyCtrLabel :: String -> CLabel
mkRtsSlowFastTickyCtrLabel pat = RtsLabel (RtsSlowFastTickyCtr pat)
-- Constructing Code Coverage Labels
mkHpcTicksLabel :: Module -> CLabel
mkHpcTicksLabel = HpcTicksLabel
-- Constructing labels used for dynamic linking
mkDynamicLinkerLabel :: DynamicLinkerLabelInfo -> CLabel -> CLabel
mkDynamicLinkerLabel = DynamicLinkerLabel
dynamicLinkerLabelInfo :: CLabel -> Maybe (DynamicLinkerLabelInfo, CLabel)
dynamicLinkerLabelInfo (DynamicLinkerLabel info lbl) = Just (info, lbl)
dynamicLinkerLabelInfo _ = Nothing
mkPicBaseLabel :: CLabel
mkPicBaseLabel = PicBaseLabel
-- Constructing miscellaneous other labels
mkDeadStripPreventer :: CLabel -> CLabel
mkDeadStripPreventer lbl = DeadStripPreventer lbl
mkStringLitLabel :: Unique -> CLabel
mkStringLitLabel = StringLitLabel
mkAsmTempLabel :: Uniquable a => a -> CLabel
mkAsmTempLabel a = AsmTempLabel (getUnique a)
mkPlainModuleInitLabel :: Module -> CLabel
mkPlainModuleInitLabel mod = PlainModuleInitLabel mod
-- -----------------------------------------------------------------------------
-- Convert between different kinds of label
toClosureLbl :: CLabel -> CLabel
toClosureLbl (IdLabel n c _) = IdLabel n c Closure
toClosureLbl (CmmLabel m str _) = CmmLabel m str CmmClosure
toClosureLbl l = pprPanic "toClosureLbl" (ppr l)
toSlowEntryLbl :: CLabel -> CLabel
toSlowEntryLbl (IdLabel n c _) = IdLabel n c Slow
toSlowEntryLbl l = pprPanic "toSlowEntryLbl" (ppr l)
toEntryLbl :: CLabel -> CLabel
toEntryLbl (IdLabel n c LocalInfoTable) = IdLabel n c LocalEntry
toEntryLbl (IdLabel n c ConInfoTable) = IdLabel n c ConEntry
toEntryLbl (IdLabel n c StaticInfoTable) = IdLabel n c StaticConEntry
toEntryLbl (IdLabel n c _) = IdLabel n c Entry
toEntryLbl (CaseLabel n CaseReturnInfo) = CaseLabel n CaseReturnPt
toEntryLbl (CmmLabel m str CmmInfo) = CmmLabel m str CmmEntry
toEntryLbl (CmmLabel m str CmmRetInfo) = CmmLabel m str CmmRet
toEntryLbl l = pprPanic "toEntryLbl" (ppr l)
toInfoLbl :: CLabel -> CLabel
toInfoLbl (IdLabel n c Entry) = IdLabel n c InfoTable
toInfoLbl (IdLabel n c LocalEntry) = IdLabel n c LocalInfoTable
toInfoLbl (IdLabel n c ConEntry) = IdLabel n c ConInfoTable
toInfoLbl (IdLabel n c StaticConEntry) = IdLabel n c StaticInfoTable
toInfoLbl (IdLabel n c _) = IdLabel n c InfoTable
toInfoLbl (CaseLabel n CaseReturnPt) = CaseLabel n CaseReturnInfo
toInfoLbl (CmmLabel m str CmmEntry) = CmmLabel m str CmmInfo
toInfoLbl (CmmLabel m str CmmRet) = CmmLabel m str CmmRetInfo
toInfoLbl l = pprPanic "CLabel.toInfoLbl" (ppr l)
toRednCountsLbl :: CLabel -> Maybe CLabel
toRednCountsLbl = fmap mkRednCountsLabel . hasHaskellName
hasHaskellName :: CLabel -> Maybe Name
hasHaskellName (IdLabel n _ _) = Just n
hasHaskellName _ = Nothing
-- -----------------------------------------------------------------------------
-- Does a CLabel's referent itself refer to a CAF?
hasCAF :: CLabel -> Bool
hasCAF (IdLabel _ _ RednCounts) = False -- Note [ticky for LNE]
hasCAF (IdLabel _ MayHaveCafRefs _) = True
hasCAF _ = False
-- Note [ticky for LNE]
-- ~~~~~~~~~~~~~~~~~~~~~
-- Until 14 Feb 2013, every ticky counter was associated with a
-- closure. Thus, ticky labels used IdLabel. It is odd that
-- CmmBuildInfoTables.cafTransfers would consider such a ticky label
-- reason to add the name to the CAFEnv (and thus eventually the SRT),
-- but it was harmless because the ticky was only used if the closure
-- was also.
--
-- Since we now have ticky counters for LNEs, it is no longer the case
-- that every ticky counter has an actual closure. So I changed the
-- generation of ticky counters' CLabels to not result in their
-- associated id ending up in the SRT.
--
-- NB IdLabel is still appropriate for ticky ids (as opposed to
-- CmmLabel) because the LNE's counter is still related to an .hs Id,
-- that Id just isn't for a proper closure.
-- -----------------------------------------------------------------------------
-- Does a CLabel need declaring before use or not?
--
-- See wiki:Commentary/Compiler/Backends/PprC#Prototypes
needsCDecl :: CLabel -> Bool
-- False <=> it's pre-declared; don't bother
-- don't bother declaring Bitmap labels, we always make sure
-- they are defined before use.
needsCDecl (SRTLabel _) = True
needsCDecl (LargeSRTLabel _) = False
needsCDecl (LargeBitmapLabel _) = False
needsCDecl (IdLabel _ _ _) = True
needsCDecl (CaseLabel _ _) = True
needsCDecl (PlainModuleInitLabel _) = True
needsCDecl (StringLitLabel _) = False
needsCDecl (AsmTempLabel _) = False
needsCDecl (RtsLabel _) = False
needsCDecl (CmmLabel pkgId _ _)
-- Prototypes for labels defined in the runtime system are imported
-- into HC files via includes/Stg.h.
| pkgId == rtsPackageId = False
-- For other labels we inline one into the HC file directly.
| otherwise = True
needsCDecl l@(ForeignLabel{}) = not (isMathFun l)
needsCDecl (CC_Label _) = True
needsCDecl (CCS_Label _) = True
needsCDecl (HpcTicksLabel _) = True
needsCDecl (DynamicLinkerLabel {}) = panic "needsCDecl DynamicLinkerLabel"
needsCDecl PicBaseLabel = panic "needsCDecl PicBaseLabel"
needsCDecl (DeadStripPreventer {}) = panic "needsCDecl DeadStripPreventer"
-- | Check whether a label is a local temporary for native code generation
isAsmTemp :: CLabel -> Bool
isAsmTemp (AsmTempLabel _) = True
isAsmTemp _ = False
-- | If a label is a local temporary used for native code generation
-- then return just its unique, otherwise nothing.
maybeAsmTemp :: CLabel -> Maybe Unique
maybeAsmTemp (AsmTempLabel uq) = Just uq
maybeAsmTemp _ = Nothing
-- | Check whether a label corresponds to a C function that has
-- a prototype in a system header somehere, or is built-in
-- to the C compiler. For these labels we avoid generating our
-- own C prototypes.
isMathFun :: CLabel -> Bool
isMathFun (ForeignLabel fs _ _ _) = fs `elementOfUniqSet` math_funs
isMathFun _ = False
math_funs :: UniqSet FastString
math_funs = mkUniqSet [
-- _ISOC99_SOURCE
(fsLit "acos"), (fsLit "acosf"), (fsLit "acosh"),
(fsLit "acoshf"), (fsLit "acoshl"), (fsLit "acosl"),
(fsLit "asin"), (fsLit "asinf"), (fsLit "asinl"),
(fsLit "asinh"), (fsLit "asinhf"), (fsLit "asinhl"),
(fsLit "atan"), (fsLit "atanf"), (fsLit "atanl"),
(fsLit "atan2"), (fsLit "atan2f"), (fsLit "atan2l"),
(fsLit "atanh"), (fsLit "atanhf"), (fsLit "atanhl"),
(fsLit "cbrt"), (fsLit "cbrtf"), (fsLit "cbrtl"),
(fsLit "ceil"), (fsLit "ceilf"), (fsLit "ceill"),
(fsLit "copysign"), (fsLit "copysignf"), (fsLit "copysignl"),
(fsLit "cos"), (fsLit "cosf"), (fsLit "cosl"),
(fsLit "cosh"), (fsLit "coshf"), (fsLit "coshl"),
(fsLit "erf"), (fsLit "erff"), (fsLit "erfl"),
(fsLit "erfc"), (fsLit "erfcf"), (fsLit "erfcl"),
(fsLit "exp"), (fsLit "expf"), (fsLit "expl"),
(fsLit "exp2"), (fsLit "exp2f"), (fsLit "exp2l"),
(fsLit "expm1"), (fsLit "expm1f"), (fsLit "expm1l"),
(fsLit "fabs"), (fsLit "fabsf"), (fsLit "fabsl"),
(fsLit "fdim"), (fsLit "fdimf"), (fsLit "fdiml"),
(fsLit "floor"), (fsLit "floorf"), (fsLit "floorl"),
(fsLit "fma"), (fsLit "fmaf"), (fsLit "fmal"),
(fsLit "fmax"), (fsLit "fmaxf"), (fsLit "fmaxl"),
(fsLit "fmin"), (fsLit "fminf"), (fsLit "fminl"),
(fsLit "fmod"), (fsLit "fmodf"), (fsLit "fmodl"),
(fsLit "frexp"), (fsLit "frexpf"), (fsLit "frexpl"),
(fsLit "hypot"), (fsLit "hypotf"), (fsLit "hypotl"),
(fsLit "ilogb"), (fsLit "ilogbf"), (fsLit "ilogbl"),
(fsLit "ldexp"), (fsLit "ldexpf"), (fsLit "ldexpl"),
(fsLit "lgamma"), (fsLit "lgammaf"), (fsLit "lgammal"),
(fsLit "llrint"), (fsLit "llrintf"), (fsLit "llrintl"),
(fsLit "llround"), (fsLit "llroundf"), (fsLit "llroundl"),
(fsLit "log"), (fsLit "logf"), (fsLit "logl"),
(fsLit "log10l"), (fsLit "log10"), (fsLit "log10f"),
(fsLit "log1pl"), (fsLit "log1p"), (fsLit "log1pf"),
(fsLit "log2"), (fsLit "log2f"), (fsLit "log2l"),
(fsLit "logb"), (fsLit "logbf"), (fsLit "logbl"),
(fsLit "lrint"), (fsLit "lrintf"), (fsLit "lrintl"),
(fsLit "lround"), (fsLit "lroundf"), (fsLit "lroundl"),
(fsLit "modf"), (fsLit "modff"), (fsLit "modfl"),
(fsLit "nan"), (fsLit "nanf"), (fsLit "nanl"),
(fsLit "nearbyint"), (fsLit "nearbyintf"), (fsLit "nearbyintl"),
(fsLit "nextafter"), (fsLit "nextafterf"), (fsLit "nextafterl"),
(fsLit "nexttoward"), (fsLit "nexttowardf"), (fsLit "nexttowardl"),
(fsLit "pow"), (fsLit "powf"), (fsLit "powl"),
(fsLit "remainder"), (fsLit "remainderf"), (fsLit "remainderl"),
(fsLit "remquo"), (fsLit "remquof"), (fsLit "remquol"),
(fsLit "rint"), (fsLit "rintf"), (fsLit "rintl"),
(fsLit "round"), (fsLit "roundf"), (fsLit "roundl"),
(fsLit "scalbln"), (fsLit "scalblnf"), (fsLit "scalblnl"),
(fsLit "scalbn"), (fsLit "scalbnf"), (fsLit "scalbnl"),
(fsLit "sin"), (fsLit "sinf"), (fsLit "sinl"),
(fsLit "sinh"), (fsLit "sinhf"), (fsLit "sinhl"),
(fsLit "sqrt"), (fsLit "sqrtf"), (fsLit "sqrtl"),
(fsLit "tan"), (fsLit "tanf"), (fsLit "tanl"),
(fsLit "tanh"), (fsLit "tanhf"), (fsLit "tanhl"),
(fsLit "tgamma"), (fsLit "tgammaf"), (fsLit "tgammal"),
(fsLit "trunc"), (fsLit "truncf"), (fsLit "truncl"),
-- ISO C 99 also defines these function-like macros in math.h:
-- fpclassify, isfinite, isinf, isnormal, signbit, isgreater,
-- isgreaterequal, isless, islessequal, islessgreater, isunordered
-- additional symbols from _BSD_SOURCE
(fsLit "drem"), (fsLit "dremf"), (fsLit "dreml"),
(fsLit "finite"), (fsLit "finitef"), (fsLit "finitel"),
(fsLit "gamma"), (fsLit "gammaf"), (fsLit "gammal"),
(fsLit "isinf"), (fsLit "isinff"), (fsLit "isinfl"),
(fsLit "isnan"), (fsLit "isnanf"), (fsLit "isnanl"),
(fsLit "j0"), (fsLit "j0f"), (fsLit "j0l"),
(fsLit "j1"), (fsLit "j1f"), (fsLit "j1l"),
(fsLit "jn"), (fsLit "jnf"), (fsLit "jnl"),
(fsLit "lgamma_r"), (fsLit "lgammaf_r"), (fsLit "lgammal_r"),
(fsLit "scalb"), (fsLit "scalbf"), (fsLit "scalbl"),
(fsLit "significand"), (fsLit "significandf"), (fsLit "significandl"),
(fsLit "y0"), (fsLit "y0f"), (fsLit "y0l"),
(fsLit "y1"), (fsLit "y1f"), (fsLit "y1l"),
(fsLit "yn"), (fsLit "ynf"), (fsLit "ynl")
]
-- -----------------------------------------------------------------------------
-- | Is a CLabel visible outside this object file or not?
-- From the point of view of the code generator, a name is
-- externally visible if it has to be declared as exported
-- in the .o file's symbol table; that is, made non-static.
externallyVisibleCLabel :: CLabel -> Bool -- not C "static"
externallyVisibleCLabel (CaseLabel _ _) = False
externallyVisibleCLabel (StringLitLabel _) = False
externallyVisibleCLabel (AsmTempLabel _) = False
externallyVisibleCLabel (PlainModuleInitLabel _)= True
externallyVisibleCLabel (RtsLabel _) = True
externallyVisibleCLabel (CmmLabel _ _ _) = True
externallyVisibleCLabel (ForeignLabel{}) = True
externallyVisibleCLabel (IdLabel name _ info) = isExternalName name && externallyVisibleIdLabel info
externallyVisibleCLabel (CC_Label _) = True
externallyVisibleCLabel (CCS_Label _) = True
externallyVisibleCLabel (DynamicLinkerLabel _ _) = False
externallyVisibleCLabel (HpcTicksLabel _) = True
externallyVisibleCLabel (LargeBitmapLabel _) = False
externallyVisibleCLabel (SRTLabel _) = False
externallyVisibleCLabel (LargeSRTLabel _) = False
externallyVisibleCLabel (PicBaseLabel {}) = panic "externallyVisibleCLabel PicBaseLabel"
externallyVisibleCLabel (DeadStripPreventer {}) = panic "externallyVisibleCLabel DeadStripPreventer"
externallyVisibleIdLabel :: IdLabelInfo -> Bool
externallyVisibleIdLabel SRT = False
externallyVisibleIdLabel LocalInfoTable = False
externallyVisibleIdLabel LocalEntry = False
externallyVisibleIdLabel _ = True
-- -----------------------------------------------------------------------------
-- Finding the "type" of a CLabel
-- For generating correct types in label declarations:
data CLabelType
= CodeLabel -- Address of some executable instructions
| DataLabel -- Address of data, not a GC ptr
| GcPtrLabel -- Address of a (presumably static) GC object
isCFunctionLabel :: CLabel -> Bool
isCFunctionLabel lbl = case labelType lbl of
CodeLabel -> True
_other -> False
isGcPtrLabel :: CLabel -> Bool
isGcPtrLabel lbl = case labelType lbl of
GcPtrLabel -> True
_other -> False
-- | Work out the general type of data at the address of this label
-- whether it be code, data, or static GC object.
labelType :: CLabel -> CLabelType
labelType (CmmLabel _ _ CmmData) = DataLabel
labelType (CmmLabel _ _ CmmClosure) = GcPtrLabel
labelType (CmmLabel _ _ CmmCode) = CodeLabel
labelType (CmmLabel _ _ CmmInfo) = DataLabel
labelType (CmmLabel _ _ CmmEntry) = CodeLabel
labelType (CmmLabel _ _ CmmRetInfo) = DataLabel
labelType (CmmLabel _ _ CmmRet) = CodeLabel
labelType (RtsLabel (RtsSelectorInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApFast _)) = CodeLabel
labelType (CaseLabel _ CaseReturnInfo) = DataLabel
labelType (CaseLabel _ _) = CodeLabel
labelType (PlainModuleInitLabel _) = CodeLabel
labelType (SRTLabel _) = DataLabel
labelType (LargeSRTLabel _) = DataLabel
labelType (LargeBitmapLabel _) = DataLabel
labelType (ForeignLabel _ _ _ IsFunction) = CodeLabel
labelType (IdLabel _ _ info) = idInfoLabelType info
labelType _ = DataLabel
idInfoLabelType :: IdLabelInfo -> CLabelType
idInfoLabelType info =
case info of
InfoTable -> DataLabel
LocalInfoTable -> DataLabel
Closure -> GcPtrLabel
ConInfoTable -> DataLabel
StaticInfoTable -> DataLabel
ClosureTable -> DataLabel
RednCounts -> DataLabel
_ -> CodeLabel
-- -----------------------------------------------------------------------------
-- Does a CLabel need dynamic linkage?
-- When referring to data in code, we need to know whether
-- that data resides in a DLL or not. [Win32 only.]
-- @labelDynamic@ returns @True@ if the label is located
-- in a DLL, be it a data reference or not.
labelDynamic :: DynFlags -> PackageId -> Module -> CLabel -> Bool
labelDynamic dflags this_pkg this_mod lbl =
case lbl of
-- is the RTS in a DLL or not?
RtsLabel _ -> not (gopt Opt_Static dflags) && (this_pkg /= rtsPackageId)
IdLabel n _ _ -> isDllName dflags this_pkg this_mod n
-- When compiling in the "dyn" way, each package is to be linked into
-- its own shared library.
CmmLabel pkg _ _
| os == OSMinGW32 ->
not (gopt Opt_Static dflags) && (this_pkg /= pkg)
| otherwise ->
True
ForeignLabel _ _ source _ ->
if os == OSMinGW32
then case source of
-- Foreign label is in some un-named foreign package (or DLL).
ForeignLabelInExternalPackage -> True
-- Foreign label is linked into the same package as the
-- source file currently being compiled.
ForeignLabelInThisPackage -> False
-- Foreign label is in some named package.
-- When compiling in the "dyn" way, each package is to be
-- linked into its own DLL.
ForeignLabelInPackage pkgId ->
(not (gopt Opt_Static dflags)) && (this_pkg /= pkgId)
else -- On Mac OS X and on ELF platforms, false positives are OK,
-- so we claim that all foreign imports come from dynamic
-- libraries
True
PlainModuleInitLabel m -> not (gopt Opt_Static dflags) && this_pkg /= (modulePackageId m)
-- Note that DynamicLinkerLabels do NOT require dynamic linking themselves.
_ -> False
where os = platformOS (targetPlatform dflags)
{-
OLD?: These GRAN functions are needed for spitting out GRAN_FETCH() at the
right places. It is used to detect when the abstractC statement of an
CCodeBlock actually contains the code for a slow entry point. -- HWL
We need at least @Eq@ for @CLabels@, because we want to avoid
duplicate declarations in generating C (see @labelSeenTE@ in
@PprAbsC@).
-}
-----------------------------------------------------------------------------
-- Printing out CLabels.
{-
Convention:
<name>_<type>
where <name> is <Module>_<name> for external names and <unique> for
internal names. <type> is one of the following:
info Info table
srt Static reference table
srtd Static reference table descriptor
entry Entry code (function, closure)
slow Slow entry code (if any)
ret Direct return address
vtbl Vector table
<n>_alt Case alternative (tag n)
dflt Default case alternative
btm Large bitmap vector
closure Static closure
con_entry Dynamic Constructor entry code
con_info Dynamic Constructor info table
static_entry Static Constructor entry code
static_info Static Constructor info table
sel_info Selector info table
sel_entry Selector entry code
cc Cost centre
ccs Cost centre stack
Many of these distinctions are only for documentation reasons. For
example, _ret is only distinguished from _entry to make it easy to
tell whether a code fragment is a return point or a closure/function
entry.
Note [Closure and info labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For a function 'foo, we have:
foo_info : Points to the info table describing foo's closure
(and entry code for foo with tables next to code)
foo_closure : Static (no-free-var) closure only:
points to the statically-allocated closure
For a data constructor (such as Just or Nothing), we have:
Just_con_info: Info table for the data constructor itself
the first word of a heap-allocated Just
Just_info: Info table for the *worker function*, an
ordinary Haskell function of arity 1 that
allocates a (Just x) box:
Just = \x -> Just x
Just_closure: The closure for this worker
Nothing_closure: a statically allocated closure for Nothing
Nothing_static_info: info table for Nothing_closure
All these must be exported symbol, EXCEPT Just_info. We don't need to
export this because in other modules we either have
* A reference to 'Just'; use Just_closure
* A saturated call 'Just x'; allocate using Just_con_info
Not exporting these Just_info labels reduces the number of symbols
somewhat.
-}
instance Outputable CLabel where
ppr c = sdocWithPlatform $ \platform -> pprCLabel platform c
pprCLabel :: Platform -> CLabel -> SDoc
pprCLabel platform (AsmTempLabel u)
| cGhcWithNativeCodeGen == "YES"
= getPprStyle $ \ sty ->
if asmStyle sty then
ptext (asmTempLabelPrefix platform) <> pprUnique u
else
char '_' <> pprUnique u
pprCLabel platform (DynamicLinkerLabel info lbl)
| cGhcWithNativeCodeGen == "YES"
= pprDynamicLinkerAsmLabel platform info lbl
pprCLabel _ PicBaseLabel
| cGhcWithNativeCodeGen == "YES"
= ptext (sLit "1b")
pprCLabel platform (DeadStripPreventer lbl)
| cGhcWithNativeCodeGen == "YES"
= pprCLabel platform lbl <> ptext (sLit "_dsp")
pprCLabel platform lbl
= getPprStyle $ \ sty ->
if cGhcWithNativeCodeGen == "YES" && asmStyle sty
then maybe_underscore (pprAsmCLbl platform lbl)
else pprCLbl lbl
maybe_underscore :: SDoc -> SDoc
maybe_underscore doc
| underscorePrefix = pp_cSEP <> doc
| otherwise = doc
pprAsmCLbl :: Platform -> CLabel -> SDoc
pprAsmCLbl platform (ForeignLabel fs (Just sz) _ _)
| platformOS platform == OSMinGW32
-- In asm mode, we need to put the suffix on a stdcall ForeignLabel.
-- (The C compiler does this itself).
= ftext fs <> char '@' <> int sz
pprAsmCLbl _ lbl
= pprCLbl lbl
pprCLbl :: CLabel -> SDoc
pprCLbl (StringLitLabel u)
= pprUnique u <> ptext (sLit "_str")
pprCLbl (CaseLabel u CaseReturnPt)
= hcat [pprUnique u, ptext (sLit "_ret")]
pprCLbl (CaseLabel u CaseReturnInfo)
= hcat [pprUnique u, ptext (sLit "_info")]
pprCLbl (CaseLabel u (CaseAlt tag))
= hcat [pprUnique u, pp_cSEP, int tag, ptext (sLit "_alt")]
pprCLbl (CaseLabel u CaseDefault)
= hcat [pprUnique u, ptext (sLit "_dflt")]
pprCLbl (SRTLabel u)
= pprUnique u <> pp_cSEP <> ptext (sLit "srt")
pprCLbl (LargeSRTLabel u) = pprUnique u <> pp_cSEP <> ptext (sLit "srtd")
pprCLbl (LargeBitmapLabel u) = text "b" <> pprUnique u <> pp_cSEP <> ptext (sLit "btm")
-- Some bitsmaps for tuple constructors have a numeric tag (e.g. '7')
-- until that gets resolved we'll just force them to start
-- with a letter so the label will be legal assmbly code.
pprCLbl (CmmLabel _ str CmmCode) = ftext str
pprCLbl (CmmLabel _ str CmmData) = ftext str
pprCLbl (CmmLabel _ str CmmPrimCall) = ftext str
pprCLbl (RtsLabel (RtsApFast str)) = ftext str <> ptext (sLit "_fast")
pprCLbl (RtsLabel (RtsSelectorInfoTable upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsSelectorEntry upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (RtsLabel (RtsApInfoTable upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsApEntry upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (CmmLabel _ fs CmmInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmEntry)
= ftext fs <> ptext (sLit "_entry")
pprCLbl (CmmLabel _ fs CmmRetInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmRet)
= ftext fs <> ptext (sLit "_ret")
pprCLbl (CmmLabel _ fs CmmClosure)
= ftext fs <> ptext (sLit "_closure")
pprCLbl (RtsLabel (RtsPrimOp primop))
= ptext (sLit "stg_") <> ppr primop
pprCLbl (RtsLabel (RtsSlowFastTickyCtr pat))
= ptext (sLit "SLOW_CALL_fast_") <> text pat <> ptext (sLit "_ctr")
pprCLbl (ForeignLabel str _ _ _)
= ftext str
pprCLbl (IdLabel name _cafs flavor) = ppr name <> ppIdFlavor flavor
pprCLbl (CC_Label cc) = ppr cc
pprCLbl (CCS_Label ccs) = ppr ccs
pprCLbl (PlainModuleInitLabel mod)
= ptext (sLit "__stginit_") <> ppr mod
pprCLbl (HpcTicksLabel mod)
= ptext (sLit "_hpc_tickboxes_") <> ppr mod <> ptext (sLit "_hpc")
pprCLbl (AsmTempLabel {}) = panic "pprCLbl AsmTempLabel"
pprCLbl (DynamicLinkerLabel {}) = panic "pprCLbl DynamicLinkerLabel"
pprCLbl (PicBaseLabel {}) = panic "pprCLbl PicBaseLabel"
pprCLbl (DeadStripPreventer {}) = panic "pprCLbl DeadStripPreventer"
ppIdFlavor :: IdLabelInfo -> SDoc
ppIdFlavor x = pp_cSEP <>
(case x of
Closure -> ptext (sLit "closure")
SRT -> ptext (sLit "srt")
InfoTable -> ptext (sLit "info")
LocalInfoTable -> ptext (sLit "info")
Entry -> ptext (sLit "entry")
LocalEntry -> ptext (sLit "entry")
Slow -> ptext (sLit "slow")
RednCounts -> ptext (sLit "ct")
ConEntry -> ptext (sLit "con_entry")
ConInfoTable -> ptext (sLit "con_info")
StaticConEntry -> ptext (sLit "static_entry")
StaticInfoTable -> ptext (sLit "static_info")
ClosureTable -> ptext (sLit "closure_tbl")
)
pp_cSEP :: SDoc
pp_cSEP = char '_'
instance Outputable ForeignLabelSource where
ppr fs
= case fs of
ForeignLabelInPackage pkgId -> parens $ text "package: " <> ppr pkgId
ForeignLabelInThisPackage -> parens $ text "this package"
ForeignLabelInExternalPackage -> parens $ text "external package"
-- -----------------------------------------------------------------------------
-- Machine-dependent knowledge about labels.
underscorePrefix :: Bool -- leading underscore on assembler labels?
underscorePrefix = (cLeadingUnderscore == "YES")
asmTempLabelPrefix :: Platform -> LitString -- for formatting labels
asmTempLabelPrefix platform =
if platformOS platform == OSDarwin
then sLit "L"
else sLit ".L"
pprDynamicLinkerAsmLabel :: Platform -> DynamicLinkerLabelInfo -> CLabel -> SDoc
pprDynamicLinkerAsmLabel platform dllInfo lbl
= if platformOS platform == OSDarwin
then if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> char 'L' <> ppr lbl <> text "$stub"
SymbolPtr -> char 'L' <> ppr lbl <> text "$non_lazy_ptr"
GotSymbolPtr -> ppr lbl <> text "@GOTPCREL"
GotSymbolOffset -> ppr lbl
else case dllInfo of
CodeStub -> char 'L' <> ppr lbl <> text "$stub"
SymbolPtr -> char 'L' <> ppr lbl <> text "$non_lazy_ptr"
_ -> panic "pprDynamicLinkerAsmLabel"
else if osElfTarget (platformOS platform)
then if platformArch platform == ArchPPC
then case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> ppr lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
GotSymbolPtr -> ppr lbl <> text "@gotpcrel"
GotSymbolOffset -> ppr lbl
SymbolPtr -> text ".LC_" <> ppr lbl
else case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> ppr lbl
GotSymbolPtr -> ppr lbl <> text "@got"
GotSymbolOffset -> ppr lbl <> text "@gotoff"
else if platformOS platform == OSMinGW32
then case dllInfo of
SymbolPtr -> text "__imp_" <> ppr lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else panic "pprDynamicLinkerAsmLabel"
|
tibbe/ghc
|
compiler/cmm/CLabel.hs
|
bsd-3-clause
| 47,943 | 0 | 16 | 13,560 | 9,447 | 4,985 | 4,462 | 728 | 21 |
module Init
(
createSugarScape
) where
import Control.Monad.Random
import Data.List
import Agent
import Common
import Discrete
import Model
createSugarScape :: RandomGen g
=> Int
-> Discrete2dDimension
-> Bool
-> Rand g ([(AgentId, SugAgent g)], SugEnvironment)
createSugarScape agentCount dims@(_dx, _dy) rebirthFlag = do
randCoords <- randomCoords (0,0) dims agentCount
let ais = [1..agentCount]
ras <- mapM (\(aid, coord) -> randomAgent (aid, coord) (sugAgent rebirthFlag) id) (zip ais randCoords)
let as = map (\(aid, (a, _)) -> (aid, a)) (zip ais ras)
let occupations = map (\(aid, (_, s)) -> (sugAgCoord s, (aid, s))) (zip ais ras)
initRandomCells <- createCells dims occupations
let cells' = addSugar initRandomCells
let e = createDiscrete2d
dims
neumann
WrapBoth
cells'
return (as, e)
addSugar :: [(Discrete2dCoord, SugEnvCell)]
-> [(Discrete2dCoord, SugEnvCell)]
addSugar cells = cellsWithSugarLevel4
where
cellsWithSugarLevel1 = initSugar cells (circlesSugar 1 [((35, 35), 20.0), ((15, 15), 20.0)])
cellsWithSugarLevel2 = initSugar cellsWithSugarLevel1 (circlesSugar 2 [((35, 35), 15.0), ((15, 15), 15.0)])
cellsWithSugarLevel3 = initSugar cellsWithSugarLevel2 (circlesSugar 3 [((35, 35), 10.0), ((15, 15), 10.0)])
cellsWithSugarLevel4 = initSugar cellsWithSugarLevel3 (circlesSugar 4 [((35, 35), 5.0), ((15, 15), 5.0)])
initSugar :: [(Discrete2dCoord, SugEnvCell)]
-> ((Discrete2dCoord, SugEnvCell) -> Double)
-> [(Discrete2dCoord, SugEnvCell)]
initSugar cs sugarFunc = map initSugarAux cs
where
initSugarAux :: (Discrete2dCoord, SugEnvCell)
-> (Discrete2dCoord, SugEnvCell)
initSugarAux cp@(coord, cell) = (coord, cell')
where
sugar = sugarFunc cp
cell' = cell { sugEnvSugarLevel = sugar
, sugEnvSugarCapacity = sugar }
createCells :: RandomGen g
=> Discrete2dDimension
-> [(Discrete2dCoord, (AgentId, SugAgentState))]
-> Rand g [(Discrete2dCoord, SugEnvCell)]
createCells (maxX, maxY) occupations
= mapM (initRandomCell occupations) coords
where
coords = [ (x, y) | x <- [0..maxX-1], y <- [0..maxY-1] ]
initRandomCell :: RandomGen g
=> [(Discrete2dCoord, (AgentId, SugAgentState))]
-> Discrete2dCoord
-> Rand g (Discrete2dCoord, SugEnvCell)
initRandomCell os coord = do
let mayOccupier = Data.List.find ((==coord) . fst) os
occ = maybe Nothing (\(_, (aid, s)) -> (Just (cellOccupier aid s))) mayOccupier
let c = SugEnvCell {
sugEnvSugarCapacity = 0
, sugEnvSugarLevel = 0
, sugEnvOccupier = occ
}
return (coord, c)
-- NOTE: will draw random-coords within (0,0) and limits WITHOUT repeating any coordinate
randomCoords :: RandomGen g
=> Discrete2dDimension
-> Discrete2dDimension
-> Int
-> Rand g [Discrete2dCoord]
randomCoords (minX, minY) (maxX, maxY) n0
| n0 > totalCoords = error "Logical error: can't draw more elements from a finite set than there are elements in the set"
| otherwise = drawRandomCoordsAux n0 []
where
totalCoords = (maxX - minX) * (maxY - minY)
drawRandomCoordsAux :: RandomGen g
=> Int
-> [Discrete2dCoord]
-> Rand g [Discrete2dCoord]
drawRandomCoordsAux 0 acc = return acc
drawRandomCoordsAux n acc = do
randX <- getRandomR (minX, maxX - 1)
randY <- getRandomR (minY, maxY - 1)
let c = (randX, randY)
if c `elem` acc
then drawRandomCoordsAux n acc
else drawRandomCoordsAux (n-1) (c : acc)
_allZeroSugar :: (Discrete2dCoord, SugEnvCell) -> Double
_allZeroSugar _ = 0.0
circlesSugar :: Double
-> [(Discrete2dCoord, Double)]
-> (Discrete2dCoord, SugEnvCell)
-> Double
circlesSugar sugarLevel circles (coord, cell)
| withinRadius = sugarLevel
| otherwise = sugEnvSugarLevel cell -- NOTE: keep the level of before
where
withinRadius = any (\(p, r) -> distanceEuclideanDisc2d p coord <= r) circles
|
thalerjonathan/phd
|
thesis/code/concurrent/sugarscape/SugarScapeSTMTVar/src/Init.hs
|
gpl-3.0
| 4,319 | 0 | 16 | 1,197 | 1,394 | 771 | 623 | 95 | 3 |
-- Mutable and immutable byte arrays (identical internally), usable for
-- unboxed arrays, and built from FFI primitives.
module Hugs.ByteArray (
MutableByteArray,
newMutableByteArray, readMutableByteArray, writeMutableByteArray,
ByteArray,
unsafeFreezeMutableByteArray, thawByteArray, readByteArray
) where
import Data.Word ( Word8 )
import Foreign.ForeignPtr ( ForeignPtr,
mallocForeignPtrBytes, withForeignPtr )
import Foreign.Marshal.Utils ( copyBytes )
import Foreign.Ptr ( castPtr )
import Foreign.Storable ( Storable( peekElemOff, pokeElemOff ))
import Hugs.IOExts ( unsafeCoerce )
import Hugs.ST ( ST, unsafeRunST )
-- This implementation is based on the principle that the FFI primitives
-- used, though declared as IO actions, actually only manipulate local
-- state, and thus could have been declared in the strict ST monad:
--
-- mallocForeignPtrBytes :: Int -> ST s (STForeignPtr s a)
-- withForeignPtr :: STForeignPtr s a -> (STPtr s a -> ST s b) -> ST s b
-- copyBytes :: STPtr s a -> STPtr s a -> Int -> ST s ()
-- castPtr :: STPtr s a -> STPtr s b
-- peekElemOff :: Storable a => STPtr s a -> Int -> ST s a
-- pokeElemOff :: Storable a => STPtr s a -> Int -> a -> ST s ()
--
-- (where STPtr s and STForeignPtr s are just like Ptr and ForeignPtr,
-- but confined to the region s)
--
-- Since the strict ST monad has the same representation as the IO monad,
-- we are justified in coercing such actions to the ST monad.
-- This conversion may be safely applied to computations that manipulate
-- only local state, but will give a runtime error if the IO action does
-- any concurrency.
specialIOToST :: IO a -> ST s a
specialIOToST = unsafeCoerce
type BytePtr = ForeignPtr Word8
data MutableByteArray s = MutableByteArray !Int !BytePtr
newMutableByteArray :: Int -> ST s (MutableByteArray s)
newMutableByteArray size = do
fp <- specialIOToST (mallocForeignPtrBytes size)
return (MutableByteArray size fp)
readMutableByteArray :: Storable e => MutableByteArray s -> Int -> ST s e
readMutableByteArray (MutableByteArray _ fp) i =
specialIOToST $ withForeignPtr fp $ \a -> peekElemOff (castPtr a) i
writeMutableByteArray :: Storable e => MutableByteArray s -> Int -> e -> ST s ()
writeMutableByteArray (MutableByteArray _ fp) i e =
specialIOToST $ withForeignPtr fp $ \a -> pokeElemOff (castPtr a) i e
data ByteArray = ByteArray !Int !BytePtr
-- Don't change the MutableByteArray after calling this.
unsafeFreezeMutableByteArray :: MutableByteArray s -> ST s ByteArray
unsafeFreezeMutableByteArray (MutableByteArray size fp) =
return (ByteArray size fp)
thawByteArray :: ByteArray -> ST s (MutableByteArray s)
thawByteArray (ByteArray size fp) = specialIOToST $ do
fp' <- mallocForeignPtrBytes size
withForeignPtr fp $ \p ->
withForeignPtr fp' $ \p' ->
copyBytes p' p size
return (MutableByteArray size fp')
-- This one is safe because ByteArrays are immutable
-- (cf. unsafeFreezeMutableByteArray)
readByteArray :: Storable a => ByteArray -> Int -> a
readByteArray (ByteArray _ fp) i = unsafeRunST $ specialIOToST $
withForeignPtr fp $ \p -> peekElemOff (castPtr p) i
|
kaoskorobase/mescaline
|
resources/hugs/packages/hugsbase/Hugs/ByteArray.hs
|
gpl-3.0
| 3,153 | 28 | 12 | 561 | 653 | 345 | 308 | 49 | 1 |
module Main (main) where
import Graphics.UI.Gtk
main :: IO ()
main = do
initGUI
-- Create a new window
window <- windowNew
-- Here we connect the "destroy" event to a signal handler.
on window objectDestroy mainQuit
-- Sets the border width of the window.
set window [ containerBorderWidth := 10 ]
hbuttonbox <- hButtonBoxNew
set window [ containerChild := hbuttonbox ]
button1 <- buttonNewWithLabel "One"
button2 <- buttonNewWithLabel "Two"
button3 <- buttonNewWithLabel "Three"
-- Add each button to the button box with the default packing and padding
set hbuttonbox [ containerChild := button
| button <- [button1, button2, button3] ]
-- This sets button3 to be a so called 'secondary child'. When the layout
-- stlye is ButtonboxStart or ButtonboxEnd, the secondary children are
-- grouped seperately from the others. Resize the window to see the effect.
--
-- This is not interesting in itself but shows how to set child attributes.
-- Note that the child attribute 'buttonBoxChildSecondary' takes the
-- button box container child 'button3' as a parameter.
set hbuttonbox [ buttonBoxLayoutStyle := ButtonboxStart
, buttonBoxChildSecondary button3 := True ]
-- The final step is to display everything (the window and all the widgets
-- contained within it)
widgetShowAll window
-- All Gtk+ applications must run the main event loop. Control ends here and
-- waits for an event to occur (like a key press or mouse event).
mainGUI
|
k0001/gtk2hs
|
gtk/demo/buttonbox/ButtonBox.hs
|
gpl-3.0
| 1,536 | 0 | 11 | 334 | 198 | 102 | 96 | 19 | 1 |
module ShouldCompile where
postInlineUnconditionally
= case Just "Hey" of
-- The point of examining occ_info here is that for *non-values*
-- that occur outside a lambda, the call-site inliner won't have
-- a chance (because it doesn't know that the thing
-- only occurs once). The pre-inliner won't have gotten
-- it either, if the thing occurs in more than one branch
-- So the main target is things like
-- let x = f y in
-- case v of
-- True -> case x of ...
-- False -> case x of ...
-- I'm not sure how important this is in practice
Just a -- OneOcc => no work-duplication issue
-> True -- Small enough to dup
-- ToDo: consider discount on smallEnoughToInline if int_cxt is true
--
-- NB: Do NOT inline arbitrarily big things, even if one_br is True
-- Reason: doing so risks exponential behaviour. We simplify a big
-- expression, inline it, and simplify it again. But if the
-- very same thing happens in the big expression, we get
-- exponential cost!
-- PRINCIPLE: when we've already simplified an expression once,
-- make sure that we only inline it if it's reasonably small.
_ -> False
-- Here's an example that we don't handle well:
-- let f = if b then Left (\x.BIG) else Right (\y.BIG)
-- in \y. ....case f of {...} ....
-- Here f is used just once, and duplicating the case work is fine (exprIsCheap).
-- But
-- * We can't preInlineUnconditionally because that would invalidate
-- the occ info for b.
-- * We can't postInlineUnconditionally because the RHS is big, and
-- that risks exponential behaviour
-- * We can't call-site inline, because the rhs is big
-- Alas!
where
x = id
|
sdiehl/ghc
|
testsuite/tests/haddock/should_compile_noflag_haddock/haddockSimplUtilsBug.hs
|
bsd-3-clause
| 2,007 | 0 | 8 | 707 | 71 | 53 | 18 | 7 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utility functions on @Core@ syntax
-}
{-# LANGUAGE CPP #-}
module CoreSubst (
-- * Main data types
Subst(..), -- Implementation exported for supercompiler's Renaming.hs only
TvSubstEnv, IdSubstEnv, InScopeSet,
-- ** Substituting into expressions and related types
deShadowBinds, substSpec, substRulesForImportedIds,
substTy, substCo, substExpr, substExprSC, substBind, substBindSC,
substUnfolding, substUnfoldingSC,
lookupIdSubst, lookupTvSubst, lookupCvSubst, substIdOcc,
substTickish, substVarSet,
-- ** Operations on substitutions
emptySubst, mkEmptySubst, mkSubst, mkOpenSubst, substInScope, isEmptySubst,
extendIdSubst, extendIdSubstList, extendTvSubst, extendTvSubstList,
extendCvSubst, extendCvSubstList,
extendSubst, extendSubstList, extendSubstWithVar, zapSubstEnv,
addInScopeSet, extendInScope, extendInScopeList, extendInScopeIds,
isInScope, setInScope,
delBndr, delBndrs,
-- ** Substituting and cloning binders
substBndr, substBndrs, substRecBndrs,
cloneBndr, cloneBndrs, cloneIdBndr, cloneIdBndrs, cloneRecIdBndrs,
-- ** Simple expression optimiser
simpleOptPgm, simpleOptExpr, simpleOptExprWith,
exprIsConApp_maybe, exprIsLiteral_maybe, exprIsLambda_maybe,
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreUtils
import Literal ( Literal(MachStr) )
import qualified Data.ByteString as BS
import OccurAnal( occurAnalyseExpr, occurAnalysePgm )
import qualified Type
import qualified Coercion
-- We are defining local versions
import Type hiding ( substTy, extendTvSubst, extendTvSubstList
, isInScope, substTyVarBndr, cloneTyVarBndr )
import Coercion hiding ( substTy, substCo, extendTvSubst, substTyVarBndr, substCoVarBndr )
import TyCon ( tyConArity )
import DataCon
import PrelNames ( eqBoxDataConKey, coercibleDataConKey, unpackCStringIdKey
, unpackCStringUtf8IdKey )
import OptCoercion ( optCoercion )
import PprCore ( pprCoreBindings, pprRules )
import Module ( Module )
import VarSet
import VarEnv
import Id
import Name ( Name )
import Var
import IdInfo
import Unique
import UniqSupply
import Maybes
import ErrUtils
import DynFlags
import BasicTypes ( isAlwaysActive )
import Util
import Pair
import Outputable
import PprCore () -- Instances
import FastString
import Data.List
import TysWiredIn
{-
************************************************************************
* *
\subsection{Substitutions}
* *
************************************************************************
-}
-- | A substitution environment, containing both 'Id' and 'TyVar' substitutions.
--
-- Some invariants apply to how you use the substitution:
--
-- 1. #in_scope_invariant# The in-scope set contains at least those 'Id's and 'TyVar's that will be in scope /after/
-- applying the substitution to a term. Precisely, the in-scope set must be a superset of the free vars of the
-- substitution range that might possibly clash with locally-bound variables in the thing being substituted in.
--
-- 2. #apply_once# You may apply the substitution only /once/
--
-- There are various ways of setting up the in-scope set such that the first of these invariants hold:
--
-- * Arrange that the in-scope set really is all the things in scope
--
-- * Arrange that it's the free vars of the range of the substitution
--
-- * Make it empty, if you know that all the free vars of the substitution are fresh, and hence can't possibly clash
data Subst
= Subst InScopeSet -- Variables in in scope (both Ids and TyVars) /after/
-- applying the substitution
IdSubstEnv -- Substitution for Ids
TvSubstEnv -- Substitution from TyVars to Types
CvSubstEnv -- Substitution from CoVars to Coercions
-- INVARIANT 1: See #in_scope_invariant#
-- This is what lets us deal with name capture properly
-- It's a hard invariant to check...
--
-- INVARIANT 2: The substitution is apply-once; see Note [Apply once] with
-- Types.TvSubstEnv
--
-- INVARIANT 3: See Note [Extending the Subst]
{-
Note [Extending the Subst]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a core Subst, which binds Ids as well, we make a different choice for Ids
than we do for TyVars.
For TyVars, see Note [Extending the TvSubst] with Type.TvSubstEnv
For Ids, we have a different invariant
The IdSubstEnv is extended *only* when the Unique on an Id changes
Otherwise, we just extend the InScopeSet
In consequence:
* If the TvSubstEnv and IdSubstEnv are both empty, substExpr would be a
no-op, so substExprSC ("short cut") does nothing.
However, substExpr still goes ahead and substitutes. Reason: we may
want to replace existing Ids with new ones from the in-scope set, to
avoid space leaks.
* In substIdBndr, we extend the IdSubstEnv only when the unique changes
* If the CvSubstEnv, TvSubstEnv and IdSubstEnv are all empty,
substExpr does nothing (Note that the above rule for substIdBndr
maintains this property. If the incoming envts are both empty, then
substituting the type and IdInfo can't change anything.)
* In lookupIdSubst, we *must* look up the Id in the in-scope set, because
it may contain non-trivial changes. Example:
(/\a. \x:a. ...x...) Int
We extend the TvSubstEnv with [a |-> Int]; but x's unique does not change
so we only extend the in-scope set. Then we must look up in the in-scope
set when we find the occurrence of x.
* The requirement to look up the Id in the in-scope set means that we
must NOT take no-op short cut when the IdSubst is empty.
We must still look up every Id in the in-scope set.
* (However, we don't need to do so for expressions found in the IdSubst
itself, whose range is assumed to be correct wrt the in-scope set.)
Why do we make a different choice for the IdSubstEnv than the
TvSubstEnv and CvSubstEnv?
* For Ids, we change the IdInfo all the time (e.g. deleting the
unfolding), and adding it back later, so using the TyVar convention
would entail extending the substitution almost all the time
* The simplifier wants to look up in the in-scope set anyway, in case it
can see a better unfolding from an enclosing case expression
* For TyVars, only coercion variables can possibly change, and they are
easy to spot
-}
-- | An environment for substituting for 'Id's
type IdSubstEnv = IdEnv CoreExpr
----------------------------
isEmptySubst :: Subst -> Bool
isEmptySubst (Subst _ id_env tv_env cv_env)
= isEmptyVarEnv id_env && isEmptyVarEnv tv_env && isEmptyVarEnv cv_env
emptySubst :: Subst
emptySubst = Subst emptyInScopeSet emptyVarEnv emptyVarEnv emptyVarEnv
mkEmptySubst :: InScopeSet -> Subst
mkEmptySubst in_scope = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
mkSubst :: InScopeSet -> TvSubstEnv -> CvSubstEnv -> IdSubstEnv -> Subst
mkSubst in_scope tvs cvs ids = Subst in_scope ids tvs cvs
-- | Find the in-scope set: see "CoreSubst#in_scope_invariant"
substInScope :: Subst -> InScopeSet
substInScope (Subst in_scope _ _ _) = in_scope
-- | Remove all substitutions for 'Id's and 'Var's that might have been built up
-- while preserving the in-scope set
zapSubstEnv :: Subst -> Subst
zapSubstEnv (Subst in_scope _ _ _) = Subst in_scope emptyVarEnv emptyVarEnv emptyVarEnv
-- | Add a substitution for an 'Id' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendIdSubst :: Subst -> Id -> CoreExpr -> Subst
-- ToDo: add an ASSERT that fvs(subst-result) is already in the in-scope set
extendIdSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope (extendVarEnv ids v r) tvs cvs
-- | Adds multiple 'Id' substitutions to the 'Subst': see also 'extendIdSubst'
extendIdSubstList :: Subst -> [(Id, CoreExpr)] -> Subst
extendIdSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope (extendVarEnvList ids prs) tvs cvs
-- | Add a substitution for a 'TyVar' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendTvSubst :: Subst -> TyVar -> Type -> Subst
extendTvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids (extendVarEnv tvs v r) cvs
-- | Adds multiple 'TyVar' substitutions to the 'Subst': see also 'extendTvSubst'
extendTvSubstList :: Subst -> [(TyVar,Type)] -> Subst
extendTvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids (extendVarEnvList tvs prs) cvs
-- | Add a substitution from a 'CoVar' to a 'Coercion' to the 'Subst': you must ensure that the in-scope set is
-- such that the "CoreSubst#in_scope_invariant" is true after extending the substitution like this
extendCvSubst :: Subst -> CoVar -> Coercion -> Subst
extendCvSubst (Subst in_scope ids tvs cvs) v r = Subst in_scope ids tvs (extendVarEnv cvs v r)
-- | Adds multiple 'CoVar' -> 'Coercion' substitutions to the
-- 'Subst': see also 'extendCvSubst'
extendCvSubstList :: Subst -> [(CoVar,Coercion)] -> Subst
extendCvSubstList (Subst in_scope ids tvs cvs) prs = Subst in_scope ids tvs (extendVarEnvList cvs prs)
-- | Add a substitution appropriate to the thing being substituted
-- (whether an expression, type, or coercion). See also
-- 'extendIdSubst', 'extendTvSubst', and 'extendCvSubst'.
extendSubst :: Subst -> Var -> CoreArg -> Subst
extendSubst subst var arg
= case arg of
Type ty -> ASSERT( isTyVar var ) extendTvSubst subst var ty
Coercion co -> ASSERT( isCoVar var ) extendCvSubst subst var co
_ -> ASSERT( isId var ) extendIdSubst subst var arg
extendSubstWithVar :: Subst -> Var -> Var -> Subst
extendSubstWithVar subst v1 v2
| isTyVar v1 = ASSERT( isTyVar v2 ) extendTvSubst subst v1 (mkTyVarTy v2)
| isCoVar v1 = ASSERT( isCoVar v2 ) extendCvSubst subst v1 (mkCoVarCo v2)
| otherwise = ASSERT( isId v2 ) extendIdSubst subst v1 (Var v2)
-- | Add a substitution as appropriate to each of the terms being
-- substituted (whether expressions, types, or coercions). See also
-- 'extendSubst'.
extendSubstList :: Subst -> [(Var,CoreArg)] -> Subst
extendSubstList subst [] = subst
extendSubstList subst ((var,rhs):prs) = extendSubstList (extendSubst subst var rhs) prs
-- | Find the substitution for an 'Id' in the 'Subst'
lookupIdSubst :: SDoc -> Subst -> Id -> CoreExpr
lookupIdSubst doc (Subst in_scope ids _ _) v
| not (isLocalId v) = Var v
| Just e <- lookupVarEnv ids v = e
| Just v' <- lookupInScope in_scope v = Var v'
-- Vital! See Note [Extending the Subst]
| otherwise = WARN( True, ptext (sLit "CoreSubst.lookupIdSubst") <+> doc <+> ppr v
$$ ppr in_scope)
Var v
-- | Find the substitution for a 'TyVar' in the 'Subst'
lookupTvSubst :: Subst -> TyVar -> Type
lookupTvSubst (Subst _ _ tvs _) v = ASSERT( isTyVar v) lookupVarEnv tvs v `orElse` Type.mkTyVarTy v
-- | Find the coercion substitution for a 'CoVar' in the 'Subst'
lookupCvSubst :: Subst -> CoVar -> Coercion
lookupCvSubst (Subst _ _ _ cvs) v = ASSERT( isCoVar v ) lookupVarEnv cvs v `orElse` mkCoVarCo v
delBndr :: Subst -> Var -> Subst
delBndr (Subst in_scope ids tvs cvs) v
| isCoVar v = Subst in_scope ids tvs (delVarEnv cvs v)
| isTyVar v = Subst in_scope ids (delVarEnv tvs v) cvs
| otherwise = Subst in_scope (delVarEnv ids v) tvs cvs
delBndrs :: Subst -> [Var] -> Subst
delBndrs (Subst in_scope ids tvs cvs) vs
= Subst in_scope (delVarEnvList ids vs) (delVarEnvList tvs vs) (delVarEnvList cvs vs)
-- Easiest thing is just delete all from all!
-- | Simultaneously substitute for a bunch of variables
-- No left-right shadowing
-- ie the substitution for (\x \y. e) a1 a2
-- so neither x nor y scope over a1 a2
mkOpenSubst :: InScopeSet -> [(Var,CoreArg)] -> Subst
mkOpenSubst in_scope pairs = Subst in_scope
(mkVarEnv [(id,e) | (id, e) <- pairs, isId id])
(mkVarEnv [(tv,ty) | (tv, Type ty) <- pairs])
(mkVarEnv [(v,co) | (v, Coercion co) <- pairs])
------------------------------
isInScope :: Var -> Subst -> Bool
isInScope v (Subst in_scope _ _ _) = v `elemInScopeSet` in_scope
-- | Add the 'Var' to the in-scope set, but do not remove
-- any existing substitutions for it
addInScopeSet :: Subst -> VarSet -> Subst
addInScopeSet (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetSet` vs) ids tvs cvs
-- | Add the 'Var' to the in-scope set: as a side effect,
-- and remove any existing substitutions for it
extendInScope :: Subst -> Var -> Subst
extendInScope (Subst in_scope ids tvs cvs) v
= Subst (in_scope `extendInScopeSet` v)
(ids `delVarEnv` v) (tvs `delVarEnv` v) (cvs `delVarEnv` v)
-- | Add the 'Var's to the in-scope set: see also 'extendInScope'
extendInScopeList :: Subst -> [Var] -> Subst
extendInScopeList (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) (tvs `delVarEnvList` vs) (cvs `delVarEnvList` vs)
-- | Optimized version of 'extendInScopeList' that can be used if you are certain
-- all the things being added are 'Id's and hence none are 'TyVar's or 'CoVar's
extendInScopeIds :: Subst -> [Id] -> Subst
extendInScopeIds (Subst in_scope ids tvs cvs) vs
= Subst (in_scope `extendInScopeSetList` vs)
(ids `delVarEnvList` vs) tvs cvs
setInScope :: Subst -> InScopeSet -> Subst
setInScope (Subst _ ids tvs cvs) in_scope = Subst in_scope ids tvs cvs
-- Pretty printing, for debugging only
instance Outputable Subst where
ppr (Subst in_scope ids tvs cvs)
= ptext (sLit "<InScope =") <+> braces (fsep (map ppr (varEnvElts (getInScopeVars in_scope))))
$$ ptext (sLit " IdSubst =") <+> ppr ids
$$ ptext (sLit " TvSubst =") <+> ppr tvs
$$ ptext (sLit " CvSubst =") <+> ppr cvs
<> char '>'
{-
************************************************************************
* *
Substituting expressions
* *
************************************************************************
-}
-- | Apply a substitution to an entire 'CoreExpr'. Remember, you may only
-- apply the substitution /once/: see "CoreSubst#apply_once"
--
-- Do *not* attempt to short-cut in the case of an empty substitution!
-- See Note [Extending the Subst]
substExprSC :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExprSC _doc subst orig_expr
| isEmptySubst subst = orig_expr
| otherwise = -- pprTrace "enter subst-expr" (doc $$ ppr orig_expr) $
subst_expr subst orig_expr
substExpr :: SDoc -> Subst -> CoreExpr -> CoreExpr
substExpr _doc subst orig_expr = subst_expr subst orig_expr
subst_expr :: Subst -> CoreExpr -> CoreExpr
subst_expr subst expr
= go expr
where
go (Var v) = lookupIdSubst (text "subst_expr") subst v
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (substCo subst co)
go (Lit lit) = Lit lit
go (App fun arg) = App (go fun) (go arg)
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) = Cast (go e) (substCo subst co)
-- Do not optimise even identity coercions
-- Reason: substitution applies to the LHS of RULES, and
-- if you "optimise" an identity coercion, you may
-- lose a binder. We optimise the LHS of rules at
-- construction time
go (Lam bndr body) = Lam bndr' (subst_expr subst' body)
where
(subst', bndr') = substBndr subst bndr
go (Let bind body) = Let bind' (subst_expr subst' body)
where
(subst', bind') = substBind subst bind
go (Case scrut bndr ty alts) = Case (go scrut) bndr' (substTy subst ty) (map (go_alt subst') alts)
where
(subst', bndr') = substBndr subst bndr
go_alt subst (con, bndrs, rhs) = (con, bndrs', subst_expr subst' rhs)
where
(subst', bndrs') = substBndrs subst bndrs
-- | Apply a substitution to an entire 'CoreBind', additionally returning an updated 'Subst'
-- that should be used by subsequent substitutions.
substBind, substBindSC :: Subst -> CoreBind -> (Subst, CoreBind)
substBindSC subst bind -- Short-cut if the substitution is empty
| not (isEmptySubst subst)
= substBind subst bind
| otherwise
= case bind of
NonRec bndr rhs -> (subst', NonRec bndr' rhs)
where
(subst', bndr') = substBndr subst bndr
Rec pairs -> (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' | isEmptySubst subst' = rhss
| otherwise = map (subst_expr subst') rhss
substBind subst (NonRec bndr rhs) = (subst', NonRec bndr' (subst_expr subst rhs))
where
(subst', bndr') = substBndr subst bndr
substBind subst (Rec pairs) = (subst', Rec (bndrs' `zip` rhss'))
where
(bndrs, rhss) = unzip pairs
(subst', bndrs') = substRecBndrs subst bndrs
rhss' = map (subst_expr subst') rhss
-- | De-shadowing the program is sometimes a useful pre-pass. It can be done simply
-- by running over the bindings with an empty substitution, because substitution
-- returns a result that has no-shadowing guaranteed.
--
-- (Actually, within a single /type/ there might still be shadowing, because
-- 'substTy' is a no-op for the empty substitution, but that's probably OK.)
--
-- [Aug 09] This function is not used in GHC at the moment, but seems so
-- short and simple that I'm going to leave it here
deShadowBinds :: CoreProgram -> CoreProgram
deShadowBinds binds = snd (mapAccumL substBind emptySubst binds)
{-
************************************************************************
* *
Substituting binders
* *
************************************************************************
Remember that substBndr and friends are used when doing expression
substitution only. Their only business is substitution, so they
preserve all IdInfo (suitably substituted). For example, we *want* to
preserve occ info in rules.
-}
-- | Substitutes a 'Var' for another one according to the 'Subst' given, returning
-- the result and an updated 'Subst' that should be used by subsequent substitutions.
-- 'IdInfo' is preserved by this process, although it is substituted into appropriately.
substBndr :: Subst -> Var -> (Subst, Var)
substBndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = substIdBndr (text "var-bndr") subst subst bndr
-- | Applies 'substBndr' to a number of 'Var's, accumulating a new 'Subst' left-to-right
substBndrs :: Subst -> [Var] -> (Subst, [Var])
substBndrs subst bndrs = mapAccumL substBndr subst bndrs
-- | Substitute in a mutually recursive group of 'Id's
substRecBndrs :: Subst -> [Id] -> (Subst, [Id])
substRecBndrs subst bndrs
= (new_subst, new_bndrs)
where -- Here's the reason we need to pass rec_subst to subst_id
(new_subst, new_bndrs) = mapAccumL (substIdBndr (text "rec-bndr") new_subst) subst bndrs
substIdBndr :: SDoc
-> Subst -- ^ Substitution to use for the IdInfo
-> Subst -> Id -- ^ Substitution and Id to transform
-> (Subst, Id) -- ^ Transformed pair
-- NB: unfolding may be zapped
substIdBndr _doc rec_subst subst@(Subst in_scope env tvs cvs) old_id
= -- pprTrace "substIdBndr" (doc $$ ppr old_id $$ ppr in_scope) $
(Subst (in_scope `extendInScopeSet` new_id) new_env tvs cvs, new_id)
where
id1 = uniqAway in_scope old_id -- id1 is cloned if necessary
id2 | no_type_change = id1
| otherwise = setIdType id1 (substTy subst old_ty)
old_ty = idType old_id
no_type_change = isEmptyVarEnv tvs ||
isEmptyVarSet (Type.tyVarsOfType old_ty)
-- new_id has the right IdInfo
-- The lazy-set is because we're in a loop here, with
-- rec_subst, when dealing with a mutually-recursive group
new_id = maybeModifyIdInfo mb_new_info id2
mb_new_info = substIdInfo rec_subst id2 (idInfo id2)
-- NB: unfolding info may be zapped
-- Extend the substitution if the unique has changed
-- See the notes with substTyVarBndr for the delVarEnv
new_env | no_change = delVarEnv env old_id
| otherwise = extendVarEnv env old_id (Var new_id)
no_change = id1 == old_id
-- See Note [Extending the Subst]
-- it's /not/ necessary to check mb_new_info and no_type_change
{-
Now a variant that unconditionally allocates a new unique.
It also unconditionally zaps the OccInfo.
-}
-- | Very similar to 'substBndr', but it always allocates a new 'Unique' for
-- each variable in its output. It substitutes the IdInfo though.
cloneIdBndr :: Subst -> UniqSupply -> Id -> (Subst, Id)
cloneIdBndr subst us old_id
= clone_id subst subst (old_id, uniqFromSupply us)
-- | Applies 'cloneIdBndr' to a number of 'Id's, accumulating a final
-- substitution from left to right
cloneIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneIdBndrs subst us ids
= mapAccumL (clone_id subst) subst (ids `zip` uniqsFromSupply us)
cloneBndrs :: Subst -> UniqSupply -> [Var] -> (Subst, [Var])
-- Works for all kinds of variables (typically case binders)
-- not just Ids
cloneBndrs subst us vs
= mapAccumL (\subst (v, u) -> cloneBndr subst u v) subst (vs `zip` uniqsFromSupply us)
cloneBndr :: Subst -> Unique -> Var -> (Subst, Var)
cloneBndr subst uniq v
| isTyVar v = cloneTyVarBndr subst v uniq
| otherwise = clone_id subst subst (v,uniq) -- Works for coercion variables too
-- | Clone a mutually recursive group of 'Id's
cloneRecIdBndrs :: Subst -> UniqSupply -> [Id] -> (Subst, [Id])
cloneRecIdBndrs subst us ids
= (subst', ids')
where
(subst', ids') = mapAccumL (clone_id subst') subst
(ids `zip` uniqsFromSupply us)
-- Just like substIdBndr, except that it always makes a new unique
-- It is given the unique to use
clone_id :: Subst -- Substitution for the IdInfo
-> Subst -> (Id, Unique) -- Substitution and Id to transform
-> (Subst, Id) -- Transformed pair
clone_id rec_subst subst@(Subst in_scope idvs tvs cvs) (old_id, uniq)
= (Subst (in_scope `extendInScopeSet` new_id) new_idvs tvs new_cvs, new_id)
where
id1 = setVarUnique old_id uniq
id2 = substIdType subst id1
new_id = maybeModifyIdInfo (substIdInfo rec_subst id2 (idInfo old_id)) id2
(new_idvs, new_cvs) | isCoVar old_id = (idvs, extendVarEnv cvs old_id (mkCoVarCo new_id))
| otherwise = (extendVarEnv idvs old_id (Var new_id), cvs)
{-
************************************************************************
* *
Types and Coercions
* *
************************************************************************
For types and coercions we just call the corresponding functions in
Type and Coercion, but we have to repackage the substitution, from a
Subst to a TvSubst.
-}
substTyVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substTyVarBndr (Subst in_scope id_env tv_env cv_env) tv
= case Type.substTyVarBndr (TvSubst in_scope tv_env) tv of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
cloneTyVarBndr :: Subst -> TyVar -> Unique -> (Subst, TyVar)
cloneTyVarBndr (Subst in_scope id_env tv_env cv_env) tv uniq
= case Type.cloneTyVarBndr (TvSubst in_scope tv_env) tv uniq of
(TvSubst in_scope' tv_env', tv')
-> (Subst in_scope' id_env tv_env' cv_env, tv')
substCoVarBndr :: Subst -> TyVar -> (Subst, TyVar)
substCoVarBndr (Subst in_scope id_env tv_env cv_env) cv
= case Coercion.substCoVarBndr (CvSubst in_scope tv_env cv_env) cv of
(CvSubst in_scope' tv_env' cv_env', cv')
-> (Subst in_scope' id_env tv_env' cv_env', cv')
-- | See 'Type.substTy'
substTy :: Subst -> Type -> Type
substTy subst ty = Type.substTy (getTvSubst subst) ty
getTvSubst :: Subst -> TvSubst
getTvSubst (Subst in_scope _ tenv _) = TvSubst in_scope tenv
getCvSubst :: Subst -> CvSubst
getCvSubst (Subst in_scope _ tenv cenv) = CvSubst in_scope tenv cenv
-- | See 'Coercion.substCo'
substCo :: Subst -> Coercion -> Coercion
substCo subst co = Coercion.substCo (getCvSubst subst) co
{-
************************************************************************
* *
\section{IdInfo substitution}
* *
************************************************************************
-}
substIdType :: Subst -> Id -> Id
substIdType subst@(Subst _ _ tv_env cv_env) id
| (isEmptyVarEnv tv_env && isEmptyVarEnv cv_env) || isEmptyVarSet (Type.tyVarsOfType old_ty) = id
| otherwise = setIdType id (substTy subst old_ty)
-- The tyVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
------------------
-- | Substitute into some 'IdInfo' with regard to the supplied new 'Id'.
substIdInfo :: Subst -> Id -> IdInfo -> Maybe IdInfo
substIdInfo subst new_id info
| nothing_to_do = Nothing
| otherwise = Just (info `setSpecInfo` substSpec subst new_id old_rules
`setUnfoldingInfo` substUnfolding subst old_unf)
where
old_rules = specInfo info
old_unf = unfoldingInfo info
nothing_to_do = isEmptySpecInfo old_rules && isClosedUnfolding old_unf
------------------
-- | Substitutes for the 'Id's within an unfolding
substUnfolding, substUnfoldingSC :: Subst -> Unfolding -> Unfolding
-- Seq'ing on the returned Unfolding is enough to cause
-- all the substitutions to happen completely
substUnfoldingSC subst unf -- Short-cut version
| isEmptySubst subst = unf
| otherwise = substUnfolding subst unf
substUnfolding subst df@(DFunUnfolding { df_bndrs = bndrs, df_args = args })
= df { df_bndrs = bndrs', df_args = args' }
where
(subst',bndrs') = substBndrs subst bndrs
args' = map (substExpr (text "subst-unf:dfun") subst') args
substUnfolding subst unf@(CoreUnfolding { uf_tmpl = tmpl, uf_src = src })
-- Retain an InlineRule!
| not (isStableSource src) -- Zap an unstable unfolding, to save substitution work
= NoUnfolding
| otherwise -- But keep a stable one!
= seqExpr new_tmpl `seq`
unf { uf_tmpl = new_tmpl }
where
new_tmpl = substExpr (text "subst-unf") subst tmpl
substUnfolding _ unf = unf -- NoUnfolding, OtherCon
------------------
substIdOcc :: Subst -> Id -> Id
-- These Ids should not be substituted to non-Ids
substIdOcc subst v = case lookupIdSubst (text "substIdOcc") subst v of
Var v' -> v'
other -> pprPanic "substIdOcc" (vcat [ppr v <+> ppr other, ppr subst])
------------------
-- | Substitutes for the 'Id's within the 'WorkerInfo' given the new function 'Id'
substSpec :: Subst -> Id -> SpecInfo -> SpecInfo
substSpec subst new_id (SpecInfo rules rhs_fvs)
= seqSpecInfo new_spec `seq` new_spec
where
subst_ru_fn = const (idName new_id)
new_spec = SpecInfo (map (substRule subst subst_ru_fn) rules)
(substVarSet subst rhs_fvs)
------------------
substRulesForImportedIds :: Subst -> [CoreRule] -> [CoreRule]
substRulesForImportedIds subst rules
= map (substRule subst not_needed) rules
where
not_needed name = pprPanic "substRulesForImportedIds" (ppr name)
------------------
substRule :: Subst -> (Name -> Name) -> CoreRule -> CoreRule
-- The subst_ru_fn argument is applied to substitute the ru_fn field
-- of the rule:
-- - Rules for *imported* Ids never change ru_fn
-- - Rules for *local* Ids are in the IdInfo for that Id,
-- and the ru_fn field is simply replaced by the new name
-- of the Id
substRule _ _ rule@(BuiltinRule {}) = rule
substRule subst subst_ru_fn rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs
, ru_local = is_local })
= rule { ru_bndrs = bndrs',
ru_fn = if is_local
then subst_ru_fn fn_name
else fn_name,
ru_args = map (substExpr (text "subst-rule" <+> ppr fn_name) subst') args,
ru_rhs = simpleOptExprWith subst' rhs }
-- Do simple optimisation on RHS, in case substitution lets
-- you improve it. The real simplifier never gets to look at it.
where
(subst', bndrs') = substBndrs subst bndrs
------------------
substVects :: Subst -> [CoreVect] -> [CoreVect]
substVects subst = map (substVect subst)
------------------
substVect :: Subst -> CoreVect -> CoreVect
substVect subst (Vect v rhs) = Vect v (simpleOptExprWith subst rhs)
substVect _subst vd@(NoVect _) = vd
substVect _subst vd@(VectType _ _ _) = vd
substVect _subst vd@(VectClass _) = vd
substVect _subst vd@(VectInst _) = vd
------------------
substVarSet :: Subst -> VarSet -> VarSet
substVarSet subst fvs
= foldVarSet (unionVarSet . subst_fv subst) emptyVarSet fvs
where
subst_fv subst fv
| isId fv = exprFreeVars (lookupIdSubst (text "substVarSet") subst fv)
| otherwise = Type.tyVarsOfType (lookupTvSubst subst fv)
------------------
substTickish :: Subst -> Tickish Id -> Tickish Id
substTickish subst (Breakpoint n ids) = Breakpoint n (map do_one ids)
where do_one = getIdFromTrivialExpr . lookupIdSubst (text "subst_tickish") subst
substTickish _subst other = other
{- Note [substTickish]
A Breakpoint contains a list of Ids. What happens if we ever want to
substitute an expression for one of these Ids?
First, we ensure that we only ever substitute trivial expressions for
these Ids, by marking them as NoOccInfo in the occurrence analyser.
Then, when substituting for the Id, we unwrap any type applications
and abstractions to get back to an Id, with getIdFromTrivialExpr.
Second, we have to ensure that we never try to substitute a literal
for an Id in a breakpoint. We ensure this by never storing an Id with
an unlifted type in a Breakpoint - see Coverage.mkTickish.
Breakpoints can't handle free variables with unlifted types anyway.
-}
{-
Note [Worker inlining]
~~~~~~~~~~~~~~~~~~~~~~
A worker can get sustituted away entirely.
- it might be trivial
- it might simply be very small
We do not treat an InlWrapper as an 'occurrence' in the occurrence
analyser, so it's possible that the worker is not even in scope any more.
In all all these cases we simply drop the special case, returning to
InlVanilla. The WARN is just so I can see if it happens a lot.
************************************************************************
* *
The Very Simple Optimiser
* *
************************************************************************
Note [Optimise coercion boxes agressively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The simple expression optimiser needs to deal with Eq# boxes as follows:
1. If the result of optimising the RHS of a non-recursive binding is an
Eq# box, that box is substituted rather than turned into a let, just as
if it were trivial.
let eqv = Eq# co in e ==> e[Eq# co/eqv]
2. If the result of optimising a case scrutinee is a Eq# box and the case
deconstructs it in a trivial way, we evaluate the case then and there.
case Eq# co of Eq# cov -> e ==> e[co/cov]
We do this for two reasons:
1. Bindings/case scrutinisation of this form is often created by the
evidence-binding mechanism and we need them to be inlined to be able
desugar RULE LHSes that involve equalities (see e.g. T2291)
2. The test T4356 fails Lint because it creates a coercion between types
of kind (* -> * -> *) and (?? -> ? -> *), which differ. If we do this
inlining agressively we can collapse away the intermediate coercion between
these two types and hence pass Lint again. (This is a sort of a hack.)
In fact, our implementation uses slightly liberalised versions of the second rule
rule so that the optimisations are a bit more generally applicable. Precisely:
2a. We reduce any situation where we can spot a case-of-known-constructor
As a result, the only time we should get residual coercion boxes in the code is
when the type checker generates something like:
\eqv -> let eqv' = Eq# (case eqv of Eq# cov -> ... cov ...)
However, the case of lambda-bound equality evidence is fairly rare, so these two
rules should suffice for solving the rule LHS problem for now.
Annoyingly, we cannot use this modified rule 1a instead of 1:
1a. If we come across a let-bound constructor application with trivial arguments,
add an appropriate unfolding to the let binder. We spot constructor applications
by using exprIsConApp_maybe, so this would actually let rule 2a reduce more.
The reason is that we REALLY NEED coercion boxes to be substituted away. With rule 1a
we wouldn't simplify this expression at all:
let eqv = Eq# co
in foo eqv (bar eqv)
The rule LHS desugarer can't deal with Let at all, so we need to push that box into
the use sites.
-}
simpleOptExpr :: CoreExpr -> CoreExpr
-- Do simple optimisation on an expression
-- The optimisation is very straightforward: just
-- inline non-recursive bindings that are used only once,
-- or where the RHS is trivial
--
-- We also inline bindings that bind a Eq# box: see
-- See Note [Optimise coercion boxes agressively].
--
-- The result is NOT guaranteed occurrence-analysed, because
-- in (let x = y in ....) we substitute for x; so y's occ-info
-- may change radically
simpleOptExpr expr
= -- pprTrace "simpleOptExpr" (ppr init_subst $$ ppr expr)
simpleOptExprWith init_subst expr
where
init_subst = mkEmptySubst (mkInScopeSet (exprFreeVars expr))
-- It's potentially important to make a proper in-scope set
-- Consider let x = ..y.. in \y. ...x...
-- Then we should remember to clone y before substituting
-- for x. It's very unlikely to occur, because we probably
-- won't *be* substituting for x if it occurs inside a
-- lambda.
--
-- It's a bit painful to call exprFreeVars, because it makes
-- three passes instead of two (occ-anal, and go)
simpleOptExprWith :: Subst -> InExpr -> OutExpr
simpleOptExprWith subst expr = simple_opt_expr subst (occurAnalyseExpr expr)
----------------------
simpleOptPgm :: DynFlags -> Module
-> CoreProgram -> [CoreRule] -> [CoreVect]
-> IO (CoreProgram, [CoreRule], [CoreVect])
simpleOptPgm dflags this_mod binds rules vects
= do { dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings occ_anald_binds $$ pprRules rules );
; return (reverse binds', substRulesForImportedIds subst' rules, substVects subst' vects) }
where
occ_anald_binds = occurAnalysePgm this_mod (\_ -> False) {- No rules active -}
rules vects emptyVarEnv binds
(subst', binds') = foldl do_one (emptySubst, []) occ_anald_binds
do_one (subst, binds') bind
= case simple_opt_bind subst bind of
(subst', Nothing) -> (subst', binds')
(subst', Just bind') -> (subst', bind':binds')
----------------------
type InVar = Var
type OutVar = Var
type InId = Id
type OutId = Id
type InExpr = CoreExpr
type OutExpr = CoreExpr
-- In these functions the substitution maps InVar -> OutExpr
----------------------
simple_opt_expr :: Subst -> InExpr -> OutExpr
simple_opt_expr subst expr
= go expr
where
in_scope_env = (substInScope subst, simpleUnfoldingFun)
go (Var v) = lookupIdSubst (text "simpleOptExpr") subst v
go (App e1 e2) = simple_app subst e1 [go e2]
go (Type ty) = Type (substTy subst ty)
go (Coercion co) = Coercion (optCoercion (getCvSubst subst) co)
go (Lit lit) = Lit lit
go (Tick tickish e) = mkTick (substTickish subst tickish) (go e)
go (Cast e co) | isReflCo co' = go e
| otherwise = Cast (go e) co'
where
co' = optCoercion (getCvSubst subst) co
go (Let bind body) = case simple_opt_bind subst bind of
(subst', Nothing) -> simple_opt_expr subst' body
(subst', Just bind) -> Let bind (simple_opt_expr subst' body)
go lam@(Lam {}) = go_lam [] subst lam
go (Case e b ty as)
-- See Note [Optimise coercion boxes agressively]
| isDeadBinder b
, Just (con, _tys, es) <- exprIsConApp_maybe in_scope_env e'
, Just (altcon, bs, rhs) <- findAlt (DataAlt con) as
= case altcon of
DEFAULT -> go rhs
_ -> mkLets (catMaybes mb_binds) $ simple_opt_expr subst' rhs
where (subst', mb_binds) = mapAccumL simple_opt_out_bind subst
(zipEqual "simpleOptExpr" bs es)
| otherwise
= Case e' b' (substTy subst ty)
(map (go_alt subst') as)
where
e' = go e
(subst', b') = subst_opt_bndr subst b
----------------------
go_alt subst (con, bndrs, rhs)
= (con, bndrs', simple_opt_expr subst' rhs)
where
(subst', bndrs') = subst_opt_bndrs subst bndrs
----------------------
-- go_lam tries eta reduction
go_lam bs' subst (Lam b e)
= go_lam (b':bs') subst' e
where
(subst', b') = subst_opt_bndr subst b
go_lam bs' subst e
| Just etad_e <- tryEtaReduce bs e' = etad_e
| otherwise = mkLams bs e'
where
bs = reverse bs'
e' = simple_opt_expr subst e
----------------------
-- simple_app collects arguments for beta reduction
simple_app :: Subst -> InExpr -> [OutExpr] -> CoreExpr
simple_app subst (App e1 e2) as
= simple_app subst e1 (simple_opt_expr subst e2 : as)
simple_app subst (Lam b e) (a:as)
= case maybe_substitute subst b a of
Just ext_subst -> simple_app ext_subst e as
Nothing -> Let (NonRec b2 a) (simple_app subst' e as)
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
simple_app subst (Var v) as
| isCompulsoryUnfolding (idUnfolding v)
-- See Note [Unfold compulsory unfoldings in LHSs]
= simple_app subst (unfoldingTemplate (idUnfolding v)) as
simple_app subst (Tick t e) as
-- Okay to do "(Tick t e) x ==> Tick t (e x)"?
| t `tickishScopesLike` SoftScope
= mkTick t $ simple_app subst e as
simple_app subst e as
= foldl App (simple_opt_expr subst e) as
----------------------
simple_opt_bind,simple_opt_bind' :: Subst -> CoreBind -> (Subst, Maybe CoreBind)
simple_opt_bind s b -- Can add trace stuff here
= simple_opt_bind' s b
simple_opt_bind' subst (Rec prs)
= (subst'', res_bind)
where
res_bind = Just (Rec (reverse rev_prs'))
(subst', bndrs') = subst_opt_bndrs subst (map fst prs)
(subst'', rev_prs') = foldl do_pr (subst', []) (prs `zip` bndrs')
do_pr (subst, prs) ((b,r), b')
= case maybe_substitute subst b r2 of
Just subst' -> (subst', prs)
Nothing -> (subst, (b2,r2):prs)
where
b2 = add_info subst b b'
r2 = simple_opt_expr subst r
simple_opt_bind' subst (NonRec b r)
= simple_opt_out_bind subst (b, simple_opt_expr subst r)
----------------------
simple_opt_out_bind :: Subst -> (InVar, OutExpr) -> (Subst, Maybe CoreBind)
simple_opt_out_bind subst (b, r')
| Just ext_subst <- maybe_substitute subst b r'
= (ext_subst, Nothing)
| otherwise
= (subst', Just (NonRec b2 r'))
where
(subst', b') = subst_opt_bndr subst b
b2 = add_info subst' b b'
----------------------
maybe_substitute :: Subst -> InVar -> OutExpr -> Maybe Subst
-- (maybe_substitute subst in_var out_rhs)
-- either extends subst with (in_var -> out_rhs)
-- or returns Nothing
maybe_substitute subst b r
| Type ty <- r -- let a::* = TYPE ty in <body>
= ASSERT( isTyVar b )
Just (extendTvSubst subst b ty)
| Coercion co <- r
= ASSERT( isCoVar b )
Just (extendCvSubst subst b co)
| isId b -- let x = e in <body>
, not (isCoVar b) -- See Note [Do not inline CoVars unconditionally]
-- in SimplUtils
, safe_to_inline (idOccInfo b)
, isAlwaysActive (idInlineActivation b) -- Note [Inline prag in simplOpt]
, not (isStableUnfolding (idUnfolding b))
, not (isExportedId b)
, not (isUnLiftedType (idType b)) || exprOkForSpeculation r
= Just (extendIdSubst subst b r)
| otherwise
= Nothing
where
-- Unconditionally safe to inline
safe_to_inline :: OccInfo -> Bool
safe_to_inline (IAmALoopBreaker {}) = False
safe_to_inline IAmDead = True
safe_to_inline (OneOcc in_lam one_br _) = (not in_lam && one_br) || trivial
safe_to_inline NoOccInfo = trivial
trivial | exprIsTrivial r = True
| (Var fun, args) <- collectArgs r
, Just dc <- isDataConWorkId_maybe fun
, dc `hasKey` eqBoxDataConKey || dc `hasKey` coercibleDataConKey
, all exprIsTrivial args = True -- See Note [Optimise coercion boxes agressively]
| otherwise = False
----------------------
subst_opt_bndr :: Subst -> InVar -> (Subst, OutVar)
subst_opt_bndr subst bndr
| isTyVar bndr = substTyVarBndr subst bndr
| isCoVar bndr = substCoVarBndr subst bndr
| otherwise = subst_opt_id_bndr subst bndr
subst_opt_id_bndr :: Subst -> InId -> (Subst, OutId)
-- Nuke all fragile IdInfo, unfolding, and RULES;
-- it gets added back later by add_info
-- Rather like SimplEnv.substIdBndr
--
-- It's important to zap fragile OccInfo (which CoreSubst.substIdBndr
-- carefully does not do) because simplOptExpr invalidates it
subst_opt_id_bndr subst@(Subst in_scope id_subst tv_subst cv_subst) old_id
= (Subst new_in_scope new_id_subst tv_subst cv_subst, new_id)
where
id1 = uniqAway in_scope old_id
id2 = setIdType id1 (substTy subst (idType old_id))
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
new_in_scope = in_scope `extendInScopeSet` new_id
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_id_subst | new_id /= old_id
= extendVarEnv id_subst old_id (Var new_id)
| otherwise
= delVarEnv id_subst old_id
----------------------
subst_opt_bndrs :: Subst -> [InVar] -> (Subst, [OutVar])
subst_opt_bndrs subst bndrs
= mapAccumL subst_opt_bndr subst bndrs
----------------------
add_info :: Subst -> InVar -> OutVar -> OutVar
add_info subst old_bndr new_bndr
| isTyVar old_bndr = new_bndr
| otherwise = maybeModifyIdInfo mb_new_info new_bndr
where mb_new_info = substIdInfo subst new_bndr (idInfo old_bndr)
simpleUnfoldingFun :: IdUnfoldingFun
simpleUnfoldingFun id
| isAlwaysActive (idInlineActivation id) = idUnfolding id
| otherwise = noUnfolding
{-
Note [Inline prag in simplOpt]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If there's an INLINE/NOINLINE pragma that restricts the phase in
which the binder can be inlined, we don't inline here; after all,
we don't know what phase we're in. Here's an example
foo :: Int -> Int -> Int
{-# INLINE foo #-}
foo m n = inner m
where
{-# INLINE [1] inner #-}
inner m = m+n
bar :: Int -> Int
bar n = foo n 1
When inlining 'foo' in 'bar' we want the let-binding for 'inner'
to remain visible until Phase 1
Note [Unfold compulsory unfoldings in LHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user writes `map coerce = coerce` as a rule, the rule will only ever
match if we replace coerce by its unfolding on the LHS, because that is the
core that the rule matching engine will find. So do that for everything that
has a compulsory unfolding. Also see Note [Desugaring coerce as cast] in Desugar
************************************************************************
* *
exprIsConApp_maybe
* *
************************************************************************
Note [exprIsConApp_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsConApp_maybe is a very important function. There are two principal
uses:
* case e of { .... }
* cls_op e, where cls_op is a class operation
In both cases you want to know if e is of form (C e1..en) where C is
a data constructor.
However e might not *look* as if
Note [exprIsConApp_maybe on literal strings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #9400.
Conceptually, a string literal "abc" is just ('a':'b':'c':[]), but in Core
they are represented as unpackCString# "abc"# by MkCore.mkStringExprFS, or
unpackCStringUtf8# when the literal contains multi-byte UTF8 characters.
For optimizations we want to be able to treat it as a list, so they can be
decomposed when used in a case-statement. exprIsConApp_maybe detects those
calls to unpackCString# and returns:
Just (':', [Char], ['a', unpackCString# "bc"]).
We need to be careful about UTF8 strings here. ""# contains a ByteString, so
we must parse it back into a FastString to split off the first character.
That way we can treat unpackCString# and unpackCStringUtf8# in the same way.
-}
data ConCont = CC [CoreExpr] Coercion
-- Substitution already applied
-- | Returns @Just (dc, [t1..tk], [x1..xn])@ if the argument expression is
-- a *saturated* constructor application of the form @dc t1..tk x1 .. xn@,
-- where t1..tk are the *universally-qantified* type args of 'dc'
exprIsConApp_maybe :: InScopeEnv -> CoreExpr -> Maybe (DataCon, [Type], [CoreExpr])
exprIsConApp_maybe (in_scope, id_unf) expr
= go (Left in_scope) expr (CC [] (mkReflCo Representational (exprType expr)))
where
go :: Either InScopeSet Subst
-> CoreExpr -> ConCont
-> Maybe (DataCon, [Type], [CoreExpr])
go subst (Tick t expr) cont
| not (tickishIsCode t) = go subst expr cont
go subst (Cast expr co1) (CC [] co2)
= go subst expr (CC [] (subst_co subst co1 `mkTransCo` co2))
go subst (App fun arg) (CC args co)
= go subst fun (CC (subst_arg subst arg : args) co)
go subst (Lam var body) (CC (arg:args) co)
| exprIsTrivial arg -- Don't duplicate stuff!
= go (extend subst var arg) body (CC args co)
go (Right sub) (Var v) cont
= go (Left (substInScope sub))
(lookupIdSubst (text "exprIsConApp" <+> ppr expr) sub v)
cont
go (Left in_scope) (Var fun) cont@(CC args co)
| Just con <- isDataConWorkId_maybe fun
, count isValArg args == idArity fun
= dealWithCoercion co con args
-- Look through dictionary functions; see Note [Unfolding DFuns]
| DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = dfun_args } <- unfolding
, bndrs `equalLength` args -- See Note [DFun arity check]
, let subst = mkOpenSubst in_scope (bndrs `zip` args)
= dealWithCoercion co con (map (substExpr (text "exprIsConApp1") subst) dfun_args)
-- Look through unfoldings, but only arity-zero one;
-- if arity > 0 we are effectively inlining a function call,
-- and that is the business of callSiteInline.
-- In practice, without this test, most of the "hits" were
-- CPR'd workers getting inlined back into their wrappers,
| idArity fun == 0
, Just rhs <- expandUnfolding_maybe unfolding
, let in_scope' = extendInScopeSetSet in_scope (exprFreeVars rhs)
= go (Left in_scope') rhs cont
| (fun `hasKey` unpackCStringIdKey)
|| (fun `hasKey` unpackCStringUtf8IdKey)
, [Lit (MachStr str)] <- args
= dealWithStringLiteral fun str co
where
unfolding = id_unf fun
go _ _ _ = Nothing
----------------------------
-- Operations on the (Either InScopeSet CoreSubst)
-- The Left case is wildly dominant
subst_co (Left {}) co = co
subst_co (Right s) co = CoreSubst.substCo s co
subst_arg (Left {}) e = e
subst_arg (Right s) e = substExpr (text "exprIsConApp2") s e
extend (Left in_scope) v e = Right (extendSubst (mkEmptySubst in_scope) v e)
extend (Right s) v e = Right (extendSubst s v e)
-- See Note [exprIsConApp_maybe on literal strings]
dealWithStringLiteral :: Var -> BS.ByteString -> Coercion
-> Maybe (DataCon, [Type], [CoreExpr])
-- This is not possible with user-supplied empty literals, MkCore.mkStringExprFS
-- turns those into [] automatically, but just in case something else in GHC
-- generates a string literal directly.
dealWithStringLiteral _ str co
| BS.null str
= dealWithCoercion co nilDataCon [Type charTy]
dealWithStringLiteral fun str co
= let strFS = mkFastStringByteString str
char = mkConApp charDataCon [mkCharLit (headFS strFS)]
charTail = fastStringToByteString (tailFS strFS)
-- In singleton strings, just add [] instead of unpackCstring# ""#.
rest = if BS.null charTail
then mkConApp nilDataCon [Type charTy]
else App (Var fun)
(Lit (MachStr charTail))
in dealWithCoercion co consDataCon [Type charTy, char, rest]
dealWithCoercion :: Coercion -> DataCon -> [CoreExpr]
-> Maybe (DataCon, [Type], [CoreExpr])
dealWithCoercion co dc dc_args
| isReflCo co
, let (univ_ty_args, rest_args) = splitAtList (dataConUnivTyVars dc) dc_args
= Just (dc, stripTypeArgs univ_ty_args, rest_args)
| Pair _from_ty to_ty <- coercionKind co
, Just (to_tc, to_tc_arg_tys) <- splitTyConApp_maybe to_ty
, to_tc == dataConTyCon dc
-- These two tests can fail; we might see
-- (C x y) `cast` (g :: T a ~ S [a]),
-- where S is a type function. In fact, exprIsConApp
-- will probably not be called in such circumstances,
-- but there't nothing wrong with it
= -- Here we do the KPush reduction rule as described in the FC paper
-- The transformation applies iff we have
-- (C e1 ... en) `cast` co
-- where co :: (T t1 .. tn) ~ to_ty
-- The left-hand one must be a T, because exprIsConApp returned True
-- but the right-hand one might not be. (Though it usually will.)
let
tc_arity = tyConArity to_tc
dc_univ_tyvars = dataConUnivTyVars dc
dc_ex_tyvars = dataConExTyVars dc
arg_tys = dataConRepArgTys dc
non_univ_args = dropList dc_univ_tyvars dc_args
(ex_args, val_args) = splitAtList dc_ex_tyvars non_univ_args
-- Make the "theta" from Fig 3 of the paper
gammas = decomposeCo tc_arity co
theta_subst = liftCoSubstWith Representational
(dc_univ_tyvars ++ dc_ex_tyvars)
-- existentials are at role N
(gammas ++ map (mkReflCo Nominal)
(stripTypeArgs ex_args))
-- Cast the value arguments (which include dictionaries)
new_val_args = zipWith cast_arg arg_tys val_args
cast_arg arg_ty arg = mkCast arg (theta_subst arg_ty)
dump_doc = vcat [ppr dc, ppr dc_univ_tyvars, ppr dc_ex_tyvars,
ppr arg_tys, ppr dc_args,
ppr ex_args, ppr val_args, ppr co, ppr _from_ty, ppr to_ty, ppr to_tc ]
in
ASSERT2( eqType _from_ty (mkTyConApp to_tc (stripTypeArgs $ takeList dc_univ_tyvars dc_args))
, dump_doc )
ASSERT2( all isTypeArg ex_args, dump_doc )
ASSERT2( equalLength val_args arg_tys, dump_doc )
Just (dc, to_tc_arg_tys, ex_args ++ new_val_args)
| otherwise
= Nothing
stripTypeArgs :: [CoreExpr] -> [Type]
stripTypeArgs args = ASSERT2( all isTypeArg args, ppr args )
[ty | Type ty <- args]
-- We really do want isTypeArg here, not isTyCoArg!
{-
Note [Unfolding DFuns]
~~~~~~~~~~~~~~~~~~~~~~
DFuns look like
df :: forall a b. (Eq a, Eq b) -> Eq (a,b)
df a b d_a d_b = MkEqD (a,b) ($c1 a b d_a d_b)
($c2 a b d_a d_b)
So to split it up we just need to apply the ops $c1, $c2 etc
to the very same args as the dfun. It takes a little more work
to compute the type arguments to the dictionary constructor.
Note [DFun arity check]
~~~~~~~~~~~~~~~~~~~~~~~
Here we check that the total number of supplied arguments (inclding
type args) matches what the dfun is expecting. This may be *less*
than the ordinary arity of the dfun: see Note [DFun unfoldings] in CoreSyn
-}
exprIsLiteral_maybe :: InScopeEnv -> CoreExpr -> Maybe Literal
-- Same deal as exprIsConApp_maybe, but much simpler
-- Nevertheless we do need to look through unfoldings for
-- Integer literals, which are vigorously hoisted to top level
-- and not subsequently inlined
exprIsLiteral_maybe env@(_, id_unf) e
= case e of
Lit l -> Just l
Tick _ e' -> exprIsLiteral_maybe env e' -- dubious?
Var v | Just rhs <- expandUnfolding_maybe (id_unf v)
-> exprIsLiteral_maybe env rhs
_ -> Nothing
{-
Note [exprIsLambda_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprIsLambda_maybe will, given an expression `e`, try to turn it into the form
`Lam v e'` (returned as `Just (v,e')`). Besides using lambdas, it looks through
casts (using the Push rule), and it unfolds function calls if the unfolding
has a greater arity than arguments are present.
Currently, it is used in Rules.match, and is required to make
"map coerce = coerce" match.
-}
exprIsLambda_maybe :: InScopeEnv -> CoreExpr
-> Maybe (Var, CoreExpr,[Tickish Id])
-- See Note [exprIsLambda_maybe]
-- The simple case: It is a lambda already
exprIsLambda_maybe _ (Lam x e)
= Just (x, e, [])
-- Still straightforward: Ticks that we can float out of the way
exprIsLambda_maybe (in_scope_set, id_unf) (Tick t e)
| tickishFloatable t
, Just (x, e, ts) <- exprIsLambda_maybe (in_scope_set, id_unf) e
= Just (x, e, t:ts)
-- Also possible: A casted lambda. Push the coercion inside
exprIsLambda_maybe (in_scope_set, id_unf) (Cast casted_e co)
| Just (x, e,ts) <- exprIsLambda_maybe (in_scope_set, id_unf) casted_e
-- Only do value lambdas.
-- this implies that x is not in scope in gamma (makes this code simpler)
, not (isTyVar x) && not (isCoVar x)
, ASSERT( not $ x `elemVarSet` tyCoVarsOfCo co) True
, Just (x',e') <- pushCoercionIntoLambda in_scope_set x e co
, let res = Just (x',e',ts)
= --pprTrace "exprIsLambda_maybe:Cast" (vcat [ppr casted_e,ppr co,ppr res)])
res
-- Another attempt: See if we find a partial unfolding
exprIsLambda_maybe (in_scope_set, id_unf) e
| (Var f, as, ts) <- collectArgsTicks tickishFloatable e
, idArity f > length (filter isValArg as)
-- Make sure there is hope to get a lambda
, Just rhs <- expandUnfolding_maybe (id_unf f)
-- Optimize, for beta-reduction
, let e' = simpleOptExprWith (mkEmptySubst in_scope_set) (rhs `mkApps` as)
-- Recurse, because of possible casts
, Just (x', e'', ts') <- exprIsLambda_maybe (in_scope_set, id_unf) e'
, let res = Just (x', e'', ts++ts')
= -- pprTrace "exprIsLambda_maybe:Unfold" (vcat [ppr e, ppr (x',e'')])
res
exprIsLambda_maybe _ _e
= -- pprTrace "exprIsLambda_maybe:Fail" (vcat [ppr _e])
Nothing
pushCoercionIntoLambda
:: InScopeSet -> Var -> CoreExpr -> Coercion -> Maybe (Var, CoreExpr)
pushCoercionIntoLambda in_scope x e co
-- This implements the Push rule from the paper on coercions
-- Compare with simplCast in Simplify
| ASSERT(not (isTyVar x) && not (isCoVar x)) True
, Pair s1s2 t1t2 <- coercionKind co
, Just (_s1,_s2) <- splitFunTy_maybe s1s2
, Just (t1,_t2) <- splitFunTy_maybe t1t2
= let [co1, co2] = decomposeCo 2 co
-- Should we optimize the coercions here?
-- Otherwise they might not match too well
x' = x `setIdType` t1
in_scope' = in_scope `extendInScopeSet` x'
subst = extendIdSubst (mkEmptySubst in_scope')
x
(mkCast (Var x') co1)
in Just (x', subst_expr subst e `mkCast` co2)
| otherwise
= pprTrace "exprIsLambda_maybe: Unexpected lambda in case" (ppr (Lam x e))
Nothing
|
green-haskell/ghc
|
compiler/coreSyn/CoreSubst.hs
|
bsd-3-clause
| 58,634 | 1 | 22 | 14,950 | 11,358 | 5,971 | 5,387 | 664 | 13 |
{-# LANGUAGE OverloadedStrings #-}
module Document.Tests.Parser where
-- Modules
import Document.Tests.Suite as S
import Test.UnitTest
import Logic.Theories.FunctionTheory
import UnitB.Expr
import UnitB.QuasiQuote
-- Library
import Control.Lens
import Control.Monad.State
import Data.Map
test_case :: TestCase
test_case = test_cases
"Parser"
[ poCase "test0: verify m0" case0 result0
, poCase "test1: verify m1" case1 result1
, poCase "test2: verify m2" case2 result2
, aCase "test3: spontaneous events" case3 result3
]
path0 :: FilePath
path0 = [path|Tests/parser/main.tex|]
case0 :: IO POResult
case0 = verify path0 0
result0 :: String
result0 = unlines
[ " o m0/INIT/INV/m0:inv0"
, " o m0/INIT/INV/m0:inv1"
, " o m0/input/INV/m0:inv0"
, " o m0/input/INV/m0:inv1"
, " o m0/input/SAF/m0:saf0"
, "passed 5 / 5"
]
case1 :: IO POResult
case1 = verify path0 1
result1 :: String
result1 = unlines
[ " o m1/INIT/INV/m1:inv0"
, " o m1/INIT/INV/m1:inv1"
, " o m1/INIT/INV/m1:inv2"
, " o m1/INIT/INV/m1:inv3"
, " o m1/INV/WD"
, " o m1/LIVE/m1:prog2/ensure/TR/choose/EN"
, " o m1/LIVE/m1:prog2/ensure/TR/choose/NEG"
, " o m1/LIVE/m1:prog4/ensure/SAF/WD/lhs"
, " o m1/LIVE/m1:prog4/ensure/TR/WD"
, " o m1/LIVE/m1:prog4/ensure/TR/parse/EN"
, " o m1/LIVE/m1:prog4/ensure/TR/parse/NEG"
, " o m1/LIVE/m1:prog5/ensure/SAF/WD/lhs"
, " o m1/LIVE/m1:prog5/ensure/TR/WD"
, " o m1/LIVE/m1:prog5/ensure/TR/fail/EN"
, " o m1/LIVE/m1:prog5/ensure/TR/fail/NEG"
, " o m1/choose/INV/m1:inv0"
, " o m1/choose/INV/m1:inv1"
, " o m1/choose/INV/m1:inv2"
, " o m1/choose/INV/m1:inv3"
, " o m1/choose/SAF/LIVE/m1:prog2/ensure"
, " o m1/choose/SAF/LIVE/m1:prog4/ensure"
, " o m1/choose/SAF/LIVE/m1:prog5/ensure"
, " o m1/choose/SCH"
, " o m1/choose/SCH/vv"
, " o m1/fail/INV/m1:inv0"
, " o m1/fail/INV/m1:inv1"
, " o m1/fail/INV/m1:inv2"
, " o m1/fail/INV/m1:inv3"
, " o m1/fail/SAF/LIVE/m1:prog2/ensure"
, " o m1/fail/SAF/LIVE/m1:prog4/ensure"
, " o m1/fail/SAF/LIVE/m1:prog5/ensure"
, " o m1/fail/WD/C_SCH"
, " o m1/input/INV/m1:inv0"
, " o m1/input/INV/m1:inv1"
, " o m1/input/INV/m1:inv2"
, " o m1/input/INV/m1:inv3"
, " o m1/input/SAF/LIVE/m1:prog2/ensure"
, " o m1/input/SAF/LIVE/m1:prog4/ensure"
, " o m1/input/SAF/LIVE/m1:prog5/ensure"
, " o m1/m1:prog0/LIVE/trading/lhs"
, " o m1/m1:prog0/LIVE/trading/rhs"
, " o m1/m1:prog1/LIVE/transitivity/lhs"
, " o m1/m1:prog1/LIVE/transitivity/mhs/0/1"
, " o m1/m1:prog1/LIVE/transitivity/rhs"
, " o m1/m1:prog3/LIVE/disjunction/lhs"
, " o m1/m1:prog3/LIVE/disjunction/rhs"
, " o m1/m1:prog4/PROG/WD/lhs"
, " o m1/m1:prog5/PROG/WD/lhs"
, " o m1/parse/INV/m1:inv0"
, " o m1/parse/INV/m1:inv1"
, " o m1/parse/INV/m1:inv2"
, " o m1/parse/INV/m1:inv3"
, " o m1/parse/SAF/LIVE/m1:prog2/ensure"
, " o m1/parse/SAF/LIVE/m1:prog4/ensure"
, " o m1/parse/SAF/LIVE/m1:prog5/ensure"
, " o m1/parse/WD/ACT/m1:act0"
, " o m1/parse/WD/C_SCH"
, "passed 57 / 57"
]
case2 :: IO POResult
case2 = verify path0 2
result2 :: String
result2 = unlines
[ "passed 0 / 0"
]
case3 :: IO (Either [Error] (EventRef Expr))
case3 = runEitherT $ do
r <- EitherT $ parse_machine path0 2
S.lookup (Right "input",Right "input") $ all_refs' r
result3 :: Either [Error] EventRef'
result3 = Right $ eventRef "input" "input" &~ do
let fs = make_type (z3Sort "FS" "FS" 0) []
file = symbol_table [z3Var "file" fs]
c = ctxWith [function_theory] $ do
[carrier| FS |]
[var| file : FS |]
[var| in : \Int \pfun FS |]
[var| v : \Int |]
acts = fromList
[ ("m0:act0",c [act| in := in \1| (v+1) \fun file |])
, ("m0:act1",c [act| v := v + 1 |]) ]
old %= execState (do
params .= file
actions .= acts
)
abs_actions .= acts
new %= execState (do
params .= file
actions .= acts
)
|
literate-unitb/literate-unitb
|
src/Document/Tests/Parser.hs
|
mit
| 4,347 | 0 | 14 | 1,219 | 706 | 407 | 299 | -1 | -1 |
module Base.Light
( Light(..)
, lightContributions
) where
import Math.Vector
import Math.Ray
import Base.Material
import Base.Intersection
import Control.Lens
data Attenuation = Atten Double Double Double
data Light =
Directional Color Unit3
| Point Color Vector3 Attenuation
| Spot Color Vector3 Attenuation Unit3 Double Double
calculateDenominator :: Vector3 -> Attenuation -> Vector3 -> Double
calculateDenominator p (Atten kc kl kq) v =
let d = len $ v - p in 1 / (kc + kl * d + kq * d * d)
intensity :: Light -> Vector3 -> Color
intensity (Directional i _) _ = i
intensity (Point i p k) v = calculateDenominator p k v *& i
intensity (Spot i p k d g a) v = let q = calculateDenominator p k v in
let dl = d &. unit (v - p) in
if dl > cos g
then q *& dl ** a *& i
else vector3 0 0 0
direction :: Light -> Vector3 -> Unit3
direction (Directional _ d) _ = d
direction (Point _ p _) v = unit $ p - v
direction (Spot _ p _ _ _ _) v = unit $ p - v
diffuse' :: Color -> Unit3 -> Unit3 -> Color -> Color
diffuse' k n l i = k * ((n &. l) *& i)
specular' :: Color -> Unit3 -> Unit3 -> Color -> Double -> Color
specular' k v r i n = k * (((v &. r) ** n) *& i)
lightContribution :: Intersection -> Vector3 -> Light -> Color
lightContribution i v l = let ity = intensity l v in
let dir = direction l v in let n = i ^. normal in
let d = diffuse' (i ^. matrl . diffuse) n dir ity in
let refl = reflect' dir n in
let spec = i ^. matrl . specular in
let specFall = i ^. matrl . specularFallOff in
let s = specular' spec (unit v) refl ity specFall in d + s
lightContributions :: Intersection -> Vector3 -> Color -> [Light] -> Color
lightContributions i v a ls = foldr contrib base ls
where contrib l c = c + lightContribution i v l
base = a * i ^. matrl . ambient
|
burz/Rayzer
|
Base/Light.hs
|
mit
| 1,842 | 0 | 25 | 470 | 831 | 421 | 410 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, UndecidableInstances, OverlappingInstances #-}
module Language.Meta.C.Show where
import Language.Meta.C.Literal
import Language.Meta.C.AST
import Language.Meta.SExpr
import Data.List (intercalate)
parens :: String -> String
parens x = "(" ++ x ++ ")"
angles :: String -> String
angles x = "[" ++ x ++ "]"
braces :: String -> String
braces x = "{" ++ x ++ "}"
spaces :: String -> String
spaces x = " " ++ x ++ " "
-- comma separated list
csl :: [String] -> String
csl xs = intercalate ", " xs
-- newline separated list
nsl, nnsl :: [String] -> String
nsl xs = intercalate eol xs
nnsl xs = intercalate (eol ++ eol) xs
eol :: String
eol = "\n"
indent :: Int -> String
indent 0 = ""
indent 1 = " "
indent level = indent 1 ++ indent (level - 1)
flatten :: NonEmptyList a -> [a]
flatten (NonEmptyList (x, xs)) = (x:xs)
class ShowC a where
showC :: Int -> a -> String
instance (ShowC a) => (Show a) where
show x = showC 0 x
instance Show Negligible where
show negligible = case negligible of
NegligibleMacro macro -> macro
NegligibleComment comment -> comment
instance Show Id where
show (Id str) = str
instance Show Literal where
show literal = case literal of
LiteralInt integer -> show integer
LiteralChar char -> show char
LiteralFloat float -> show float
LiteralString string -> show string
LiteralEnumConst enum_const -> show enum_const
instance Show EnumerationConstant where
show (EnumerationConstant str) = str
instance Show CharConstant where
show char = case char of
CharConstant s -> "'" ++ s ++ "'"
WideCharConstant s -> "L'" ++ s ++ "'"
instance Show StringLiteral where
show string = case string of
StringLiteral s -> "\"" ++ show s ++ "\""
WideStringLiteral s -> "L\"" ++ show s ++ "\""
instance Show IntegerConstant where
show integer_constant = case integer_constant of
DecimalConstant s suffixes -> s ++ concatMap show suffixes
OctalConstant s suffixes -> "0" ++ s ++ concatMap show suffixes
HexadecimalConstant s suffixes -> "0x" ++ s ++ concatMap show suffixes
instance Show IntegerSuffix where
show suffix = case suffix of
UnsignedSuffix -> "U"
LongSuffix -> "L"
LongLongSuffix -> "LL"
instance Show FloatingConstant where
show float = case float of
DecimalFloatingConstant fractional_constant exponent_part suffix -> show fractional_constant ++ maybe "" show exponent_part ++ maybe "" show suffix
HexadecimalFloatingConstant fractional_constant exponent_part suffix -> "0x" ++ show fractional_constant ++ show exponent_part ++ maybe "" show suffix
instance Show FractionalConstant where
show fractional = case fractional of
FractionalConstant mantissa floating ->
maybe "" id mantissa ++ "." ++ floating
MantissaConstant mantissa ->
mantissa ++ "."
-- TODO: DRY
instance Show HexadecimalFractionalConstant where
show fractional = case fractional of
HexadecimalFractionalConstant mantissa floating ->
maybe "" id mantissa ++ "." ++ floating
HexadecimalMantissaConstant mantissa ->
mantissa ++ "."
instance Show ExponentPart where
show exponent_part = case exponent_part of
ExponentPart sign exponent ->
"e" ++ maybe "" show sign ++ exponent
-- DRY
instance Show BinaryExponentPart where
show exponent_part = case exponent_part of
BinaryExponentPart sign exponent ->
"p" ++ maybe "" show sign ++ exponent
instance Show FloatingConstantSign where
show sign = case sign of
FloatingConstantSignPlus -> "+"
FloatingConstantSignMinus -> "-"
instance Show FloatingSuffix where
show suffix = case suffix of
FloatSuffix -> "f"
LongDoubleSuffix -> "l"
instance Show StructOrUnion where
show struct_or_union = case struct_or_union of
Struct -> "struct"
Union -> "union"
instance Show AssignmentOperator where
show assignment_operator = case assignment_operator of
Assign -> "="
AssignMult -> "*="
AssignDiv -> "/="
AssignMod -> "%="
AssignPlus -> "+="
AssignMinus -> "-="
AssignLShift -> "<<="
AssignRShift -> ">>="
AssignAnd -> "&="
AssignXor -> "^="
AssignOr -> "|="
instance Show BinaryOperator where
show binary_operator = case binary_operator of
LogicalOr -> "||"
LogicalAnd -> "&&"
BitOr -> "|"
BitXor -> "^"
BitAnd -> "&"
EqualityNotEqual -> "!="
EqualityEqual -> "=="
RelationalEGT -> ">="
RelationalELT -> "<="
RelationalGT -> ">"
RelationalLT -> "<"
ShiftRight -> ">>"
ShiftLeft -> "<<"
BinaryPlus -> "+"
BinaryMinus -> "-"
Mod -> "%"
Div -> "/"
Mult -> "*"
instance Show UnaryOperator where
show unary_operator = case unary_operator of
UnaryReference -> "&"
UnaryDereference -> "*"
UnaryPlus -> "+"
UnaryMinus -> "-"
UnaryInverse -> "~"
UnaryNot -> "!"
|
ykst/MetaC
|
Language/Meta/C/Show.hs
|
mit
| 5,217 | 1 | 12 | 1,412 | 1,448 | 719 | 729 | 141 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Prelude.String.Type (
String,
) where
import Prelude (
String,
)
|
scott-fleischman/cafeteria-prelude
|
src/Prelude/String/Type.hs
|
mit
| 121 | 0 | 5 | 29 | 23 | 16 | 7 | 5 | 0 |
-- Monoid is a type class.
-- It is for types whose values can be combined together with a binary operation.
class Monoid m where
mempty :: m
mappend :: m -> m -> m
mconcat :: [m] -> m
mconcat = foldr mappend mempty
-- Only concrete types can be made monoids.
-- This is different from Functors and Applicatives which require their
-- parameters to be type constructors.
-- mempty is the identity value of the monoid. -- it is a polymorphic constant.
-- mappend is the binary function. (the “append” in the name is misleading).
-- mconcat takes a list of monoid values and reduces them into a single value.
-- (again calling this “concat” is fucking misleading).
-- Monoid laws
-- mempty `mappend` x = x :::: 1 * x = x
-- x `mappend` mepmty = x
-- (x `mappend` y) `mappend` z = x `mappend` (y `mappend` z)
instance Monoid [a] where
mempty = []
mappend = (++)
-- [a] : because Monoid requires a concrete type.
mconcat [[2],[3],[3,4,5]]
newtype Product a = Product { getProduct :: a }
deriving (Eq, Ord, Read, Show, Bounded)
instance Num a => Monoid (Product a) where
mempty = Product 1
Product x `mappend` Product y = Product (x*y)
newtype Any = Any { getAny :: Bool }
deriving (Eq, Ord, Read, Show, Bounded)
instance Monoid Any where
mempty = Any False
Any x `mappend` Any y = Any (x || y)
newtype All = All { getAll :: Bool }
deriving (Eq, Ord, Read, Show, Bounded)
instance Monoid All where
mempty = All True
All x `mappend` All y = All (x && y)
instance Monoid Ordering where
mempty = EQ
LT `mappend` _ = LT
EQ `mappend` y = y
GT `mappend` _ = GT
instance Monoid a => Monoid (Maybe a) where
mempty = Nothing
Nothing `mappend` m = m
m `mappend Nothing = m
Just m1 `mappend` Just m2 = Just (m1 `mappend` m2)
newtype First a = First { getFirst :: Maybe a }
deriving (Eq, Ord, Read, Show)
instance Monoid (First a) where
mempty = First Nothing
First (Just x) `mappend` _ First (Just x)
First Nothing `mappend` x = x
|
v0lkan/learning-haskell
|
monoids.hs
|
mit
| 2,049 | 1 | 9 | 495 | 608 | 333 | 275 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI, DeriveDataTypeable,
UnboxedTuples, GHCForeignImportPrim, UnliftedFFITypes,
MagicHash
#-}
module JavaScript.Web.MessageEvent ( MessageEvent
, getData
, MessageEventData(..)
) where
import GHCJS.Types
import GHC.Exts
import Data.Typeable
import JavaScript.Web.MessageEvent.Internal
import JavaScript.Web.Blob.Internal (Blob, SomeBlob(..))
import JavaScript.TypedArray.ArrayBuffer.Internal (ArrayBuffer, SomeArrayBuffer(..))
import Data.JSString.Internal.Type (JSString(..))
data MessageEventData = StringData JSString
| BlobData Blob
| ArrayBufferData ArrayBuffer
deriving (Typeable)
getData :: MessageEvent -> MessageEventData
getData me = case js_getData me of
(# 1#, r #) -> StringData (JSString r)
(# 2#, r #) -> BlobData (SomeBlob r)
(# 3#, r #) -> ArrayBufferData (SomeArrayBuffer r)
{-# INLINE getData #-}
-- -----------------------------------------------------------------------------
foreign import javascript unsafe
"$r2 = $1.data;\
\$r1 = typeof $r2 === 'string' ? 1 : ($r2 instanceof ArrayBuffer ? 3 : 2)"
js_getData :: MessageEvent -> (# Int#, JSVal #)
|
ghcjs/ghcjs-base
|
JavaScript/Web/MessageEvent.hs
|
mit
| 1,386 | 4 | 11 | 392 | 236 | 139 | 97 | 25 | 3 |
import Test.Hspec (it, shouldBe, hspec)
import LuciansLusciousLasagna (elapsedTimeInMinutes, expectedMinutesInOven, preparationTimeInMinutes)
main :: IO ()
main = hspec $ do
it "expectedMinutesInOven" $ do
expectedMinutesInOven `shouldBe` 40
it "preparationTimeInMinutes" $
preparationTimeInMinutes 5 `shouldBe` 10
it "elapsedTimeInMinutes" $ do
elapsedTimeInMinutes 3 20 `shouldBe` 26
|
exercism/xhaskell
|
exercises/concept/lucians-luscious-lasagna/test/Tests.hs
|
mit
| 437 | 0 | 12 | 93 | 112 | 58 | 54 | 10 | 1 |
{--
- Problem 28
Sorting a list of lists according to length of sublists
a) We suppose that a list contains elements that are lists themselves. The objective is to sort the elements
of this list according to their length. E.g. short lists first, longer lists later, or vice versa.
Example:
* (lsort '((a b c) (d e) (f g h) (d e) (i j k l) (m n) (o)))
((O) (D E) (D E) (M N) (A B C) (F G H) (I J K L))
Example in Haskell:
Prelude>lsort ["abc","de","fgh","de","ijkl","mn","o"]
["o","de","de","mn","abc","fgh","ijkl"]
b) Again, we suppose that a list contains elements that are lists themselves. But this time the objective is
to sort the elements of this list according to their length frequency; i.e., in the default, where sorting is
done ascendingly, lists with rare lengths are placed first, others with a more frequent length come later.
Example:
* (lfsort '((a b c) (d e) (f g h) (d e) (i j k l) (m n) (o)))
((i j k l) (o) (a b c) (f g h) (d e) (d e) (m n))
Example in Haskell:
lfsort ["abc", "de", "fgh", "de", "ijkl", "mn", "o"]
["ijkl","o","abc","fgh","de","de","mn"]
--}
import Data.List
import Data.Function
lsort :: [[a]] -> [[a]]
lsort = sortBy (compare `on` length)
lfsort :: [[a]] -> [[a]]
lfsort xs = sortBy (compare `on` lenFrequency) xs
where lenFrequency x = length $ filter (== length x) $ map length xs
|
sighingnow/Functional-99-Problems
|
Haskell/28.hs
|
mit
| 1,427 | 0 | 11 | 344 | 126 | 71 | 55 | 7 | 1 |
-- | A generic formula used for various logics, most notably propositional logic
-- and first-order logic (Sphinx.FOL module). The structure mostly follows
-- Harrison (2009), however, binary connectives ('and', 'or', ...) are
-- aggregated into a BinOp type.
--
-- Reference:
-- John Harrison, Handbook of Practical Logic and Automated Reasoning.
-- Cambridge University Press, 2009.
module Faun.Formula where
import System.Random
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.List (nub)
import Data.Monoid ((<>))
import Faun.Symbols
import qualified Faun.Text as FT
import qualified Data.Text as T
import Faun.BinT
import Faun.QuanT
import Faun.PrettyPrint
-- | A formula with generic atoms. Propositional logic can easily be described
-- with Formula String, and first-order logic is defined in module Faun.FOL as
-- Formula (Predicate t).
data Formula a =
-- | Generic atoms.
Atom a
-- | The unary negation type.
| Not (Formula a)
-- | Binary connectives.
| BinOp BinT (Formula a) (Formula a)
-- | Quantifier apply to a string (following Harrison 2009).
| Quantifier QuanT T.Text (Formula a) -- Following Harris' here, but it might be smarter to put Quantifiers in FOL only.
instance Eq a => Eq (Formula a) where
Atom a0 == Atom a1 = a0 == a1
Not x0 == Not x1 = x0 == x1
BinOp b0 x0 y0 == BinOp b1 x1 y1 =
b0 == b1 && x0 == x1 && y0 == y1
Quantifier q0 v0 x0 == Quantifier q1 v1 x1 =
q0 == q1 && v0 == v1 && x0 == x1
_ == _ = False
instance Ord a => Ord (Formula a) where
Atom a0 `compare` Atom a1 = a0 `compare` a1
Atom _ `compare` _ = GT
_ `compare` Atom _ = LT
Not f0 `compare` Not f1 = f0 `compare` f1
Not _ `compare` _ = GT
_ `compare` Not _ = LT
BinOp b0 f00 f01 `compare` BinOp b1 f10 f11 =
(b0 `compare` b1) <> (f00 `compare` f10) <> (f01 `compare` f11)
BinOp{} `compare` Quantifier{} = GT
Quantifier{} `compare` BinOp{} = LT
Quantifier q0 v0 f0 `compare` Quantifier q1 v1 f1 =
(q0 `compare` q1) <> (v0 `compare` v1) <> (f1 `compare` f0)
-- | Prints the formula given a set of symbols ('Sphinx.Symbols.Symbols').
-- This function is built to support printing in symbolic, LaTeX, and ASCII
-- formats.
prettyPrintFm :: (PrettyPrint a) => Symbols -> Formula a -> T.Text
prettyPrintFm s = FT.rmQuotes . buildStr (0 :: Int)
where
-- For negation and Quantifiers, add spaces after words but not symbols:
notSpace = if T.toLower (symNot s) == "not" then " " else ""
qualSpace = if T.toLower (symForall s) == "forall" then " " else ""
suffixNot = symNot s == "'"
-- Format prefixes:
showNot b pr sym p =
FT.surrIf b $ T.concat $ if suffixNot then [txt, sym] else [sym, notSpace, txt]
where txt = buildStr (pr + 1) p
-- Format infix operators:
showInfix b pr sym p q =
FT.surrIf b $ T.concat [buildStr (pr + 1) p, " ", sym, " ", buildStr pr q]
-- Recursive function to build the string:
buildStr pr fm = case fm of
Atom a -> prettyPrint s a
Not x -> showNot (pr > 12) 11 (symNot s) x
BinOp And x y -> showInfix (pr > 10) 10 (symAnd s) x y
BinOp Or x y -> showInfix (pr > 8) 8 (symOr s) x y
BinOp Implies x y -> showInfix (pr > 6) 6 (symImplies s) x y
BinOp Xor x y -> showInfix (pr > 4) 4 (symXor s) x y
BinOp Iff x y -> showInfix (pr > 2) 2 (symIff s) x y
Quantifier ForAll v x -> T.concat [symForall s, qualSpace, v, " ", buildStr pr x]
Quantifier Exists v x -> T.concat [symExists s, qualSpace, v, " ", buildStr pr x]
Quantifier Unique v x -> T.concat [symExists s, "!", qualSpace, v, " ", buildStr pr x]
-- | Count the number of atoms (Top & Bottom are considered atoms).
numAtoms :: Formula a -> Int
numAtoms f = case f of
Not x -> numAtoms x
BinOp _ x y -> numAtoms x + numAtoms y
Quantifier _ _ x -> numAtoms x
_ -> 1
-- | Gathers all atoms in the formula.
atoms :: (Ord a) => Formula a -> Set a
atoms = gat Set.empty
where
gat s fm = case fm of
Atom z -> Set.insert z s
Not x -> Set.union (atoms x) s
BinOp _ x y -> Set.unions [atoms x, atoms y, s]
Quantifier _ _ x -> Set.union (atoms x) s
-- | Gathers all atoms in the formula in a list for atoms that do not support
-- the Ord type class.
atomsLs :: (Eq a) => Formula a -> [a]
atomsLs = nub . gat
where
gat = gat' []
gat' l fm = case fm of
Atom z -> z : l
Not x -> l ++ gat x
BinOp _ x y -> l ++ gat x ++ gat y
Quantifier _ _ x -> l ++ gat x
-- | Returns true if the formula has quantifiers
hasQuan :: Formula a -> Bool
hasQuan f = case f of
Not x -> hasQuan x
BinOp _ x y -> hasQuan x || hasQuan y
Quantifier{} -> True
_ -> False
-- | Gathers the variables inside some type of quantifier.
quanVars :: QuanT -> Formula a -> Set T.Text
quanVars q = gat'
where
gat' = gat Set.empty
gat s f' = case f' of
Not x -> Set.union s (gat' x)
BinOp _ x y -> Set.unions [s, gat' x, gat' y]
Quantifier q' v x ->
if q == q' then Set.union (Set.insert v s) (gat' x)
else Set.union s (gat' x)
_ -> Set.empty
-- | Returns existentially quantified variables.
exiquanVars :: Formula a -> Set T.Text
exiquanVars = quanVars Exists
-- | Returns universally quantifier variables.
uniquanVars :: Formula a -> Set T.Text
uniquanVars = quanVars ForAll
-- | Randomly assigns all element of the set to either True or False with equal
-- probability. It's a fair ass.
randomFairAss :: (Ord a) => StdGen -> Set a -> Map a Bool
randomFairAss g s = Map.fromList $ zip (Set.toList s) rs
where rs = take (Set.size s) $ randoms g :: [Bool]
-- | Gathers and assigns all atoms to a boolean given a seed value.
randomFairAssF :: (Ord a) => StdGen -> Formula a -> Map a Bool
randomFairAssF g f = randomFairAss g $ atoms f
-- | Removes implications, equivalences, and exclusive disjunctions.
coreOp :: Formula a -> Formula a
coreOp f = case f of
Not x -> Not $ coreOp x
BinOp And x y -> BinOp And (coreOp x) (coreOp y)
BinOp Or x y -> BinOp Or (coreOp x) (coreOp y)
BinOp Xor x y -> BinOp Or (BinOp And (coreOp x) (Not $ coreOp y)) (BinOp And (Not $ coreOp x) (coreOp y))
BinOp Implies x y -> BinOp Or (Not $ coreOp x) (coreOp y)
BinOp Iff x y -> BinOp Or (BinOp And (coreOp x) (coreOp y)) (BinOp And (Not $ coreOp x) (Not $ coreOp y))
Quantifier q v x -> Quantifier q v (coreOp x)
_ -> f
-- | Normal form.
nnf :: Formula a -> Formula a
nnf f = case coreOp f of
BinOp And x y -> BinOp And (nnf x) (nnf y)
BinOp Or x y -> BinOp Or (nnf x) (nnf y)
Not (Not x) -> nnf x
Not (BinOp And x y) -> BinOp Or (nnf (Not x)) (nnf (Not y))
Not (BinOp Or x y) -> BinOp And (nnf (Not x)) (nnf (Not y))
_ -> f
|
PhDP/Sphinx-AI
|
Faun/Formula.hs
|
mit
| 7,024 | 0 | 14 | 1,934 | 2,632 | 1,331 | 1,301 | 125 | 13 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Language.Inference.Semantics where
-- Reference: Generalizing Hindley-Milner Type Inference Algorithms (2002)
import Language.Inference.Syntax
import Data.Foldable (foldrM)
import Data.Maybe
import Data.Monoid
import Control.Monad.State
import qualified Data.Set as Set
import qualified Data.Map as Map
data TyMono = TyBool |
TyNat |
TyArr TyMono TyMono |
TyVar Integer
deriving (Eq, Show)
data TyPoly = TyPoly (Set.Set Integer) TyMono
type Context = Map.Map String TyPoly
data SemanticError = UndefinedVariable String |
UnificationFailure TyMono TyMono
deriving (Eq)
instance Show SemanticError where
show (UndefinedVariable var) = "undefined variable: " ++ var
show (UnificationFailure x y) = "cannot unify: " ++ (show x) ++ " and " ++ (show y)
fresh :: (Monad m) => StateT Integer m Integer
fresh = modify (+1) >> get
freshMono :: (Monad m) => StateT Integer m TyMono
freshMono = fresh >>= (\x -> return $ TyVar x)
-- Substitution
newtype Substitution = Substitution (Map.Map Integer TyMono) deriving Monoid
singleton :: Integer -> TyMono -> Substitution
singleton key val = Substitution $ Map.singleton key val
remove :: Set.Set Integer -> Substitution -> Substitution
remove s (Substitution substitution) = Substitution $ foldr Map.delete substitution s
apply :: Substitution -> Integer -> Maybe TyMono
apply (Substitution substitution) i = Map.lookup i substitution
substitute :: Substitution -> TyMono -> TyMono
substitute _ TyBool = TyBool
substitute _ TyNat = TyNat
substitute s (TyArr a b) = TyArr (substitute s a) (substitute s b)
substitute s (TyVar i) = fromMaybe (TyVar i) (apply s i)
substitutePoly :: Substitution -> TyPoly -> TyPoly
substitutePoly substitution (TyPoly bindings monotype) = TyPoly bindings monoSub where
monoSub = substitute (remove bindings substitution) monotype
substituteCtx :: Substitution -> Context -> Context
substituteCtx x = Map.map $ substitutePoly x
-- Generalization and instantiation
generalize :: Context -> TyMono -> TyPoly
generalize context monotype = TyPoly free monotype where
free = Set.difference (freeVarsMono monotype) (freeVarsCtx context)
instantiate :: (Monad m) => TyPoly -> StateT Integer m TyMono
instantiate (TyPoly bindings monotype) = do
freshSubstitution <- freshVars bindings
return $ substitute freshSubstitution monotype
where
freshVars = foldrM build mempty where
build key substitution = do
new <- fresh
return $ singleton key (TyVar new) <> substitution
freeVarsMono :: TyMono -> Set.Set Integer
freeVarsMono TyBool = Set.empty
freeVarsMono TyNat = Set.empty
freeVarsMono (TyVar i) = Set.singleton i
freeVarsMono (TyArr x y) = Set.union (freeVarsMono x) (freeVarsMono y)
freeVars :: TyPoly -> Set.Set Integer
freeVars (TyPoly bindings mt) = Set.difference (freeVarsMono mt) bindings
freeVarsCtx :: Context -> Set.Set Integer
freeVarsCtx ctx = Set.unions . map freeVars . Map.elems $ ctx
-- Unification
unify :: TyMono -> TyMono -> Either SemanticError Substitution
unify TyBool TyBool = Right mempty
unify TyNat TyNat = Right mempty
unify (TyVar x) (TyVar y) | x == y = Right mempty
unify (TyVar var) x | Set.notMember var (freeVarsMono x) = Right $ singleton var x
unify x (TyVar var) | Set.notMember var (freeVarsMono x) = Right $ singleton var x
unify (TyArr a b) (TyArr x y) = do
s <- unify a x
t <- unify (substitute s b) (substitute s y)
return $ t <> s
unify x@_ y@_ = Left $ UnificationFailure x y
-- Algorithm W
infer :: Term -> Either SemanticError TyMono
infer term = (evalStateT (inferWithState Map.empty term) 0) >>= return . snd
inferWithState :: Context -> Term -> StateT Integer (Either SemanticError) (Substitution, TyMono)
inferWithState _ TmTrue = return $ (mempty, TyBool)
inferWithState _ TmFalse = return $ (mempty, TyBool)
inferWithState _ TmZero = return $ (mempty, TyNat)
inferWithState context (TmVar i) = do
typoly <- lift typescheme
tymono <- instantiate typoly
return $ (mempty, tymono)
where
typescheme = case Map.lookup i context of
Just polytype -> Right polytype
Nothing -> Left $ UndefinedVariable i
inferWithState context (TmAbs var body) = do
new <- freshMono
let newPoly = TyPoly Set.empty new
(sub, ty) <- inferWithState (Map.insert var newPoly context) body
return $ (sub, TyArr (substitute sub new) ty)
inferWithState context (TmApp f x) = do
(s1, t1) <- inferWithState context f
(s2, t2) <- inferWithState (substituteCtx s1 context) x
new <- freshMono
s3 <- lift $ unify (substitute s2 t1) (TyArr t2 new)
return (s3 <> s2 <> s1, substitute s3 new)
inferWithState context (TmIf b t f) = do
(s1, t1) <- inferWithState context b
s2 <- lift $ unify t1 TyBool
(s3, t3) <- inferWithState (substituteCtx (s2 <> s1) context) t
(s4, t4) <- inferWithState (substituteCtx (s3 <> s2 <> s1) context) f
s5 <- lift $ unify (substitute s4 t3) t4
return (s5 <> s4 <> s3 <> s2 <> s1, (substitute s5 t4))
inferWithState context (TmSucc n) = do
(s1, t1) <- inferWithState context n
s2 <- lift $ unify t1 TyNat
return (s2 <> s1, substitute s2 t1)
inferWithState context (TmRec base ind) = do
(s1, t1) <- inferWithState context base
(s2, t2) <- inferWithState (substituteCtx s1 context) ind
s3 <- lift $ unify t2 (TyArr (substitute s2 t1) (substitute s2 t1))
return (s3 <> s2 <> s1, TyArr TyNat (substitute (s3 <> s2) t1))
inferWithState context (TmLet var arg body) = do
(s1, t1) <- inferWithState context arg
let newContext = substituteCtx s1 context
let genContext = Map.insert var (generalize newContext t1) newContext
(s2, t2) <- inferWithState genContext body
return (s2 <> s1, t2)
|
robertclancy/tapl
|
inference/lib/Language/Inference/Semantics.hs
|
gpl-2.0
| 6,105 | 0 | 15 | 1,445 | 2,206 | 1,102 | 1,104 | 120 | 2 |
-- $Id: GenUtil.hs,v 1.27 2004/07/12 23:31:34 john Exp $
-- arch-tag: 835e46b7-8ffd-40a0-aaf9-326b7e347760
-- Copyright (c) 2002 John Meacham ([email protected])
--
-- Permission is hereby granted, free of charge, to any person obtaining a
-- copy of this software and associated documentation files (the
-- "Software"), to deal in the Software without restriction, including
-- without limitation the rights to use, copy, modify, merge, publish,
-- distribute, sublicense, and/or sell copies of the Software, and to
-- permit persons to whom the Software is furnished to do so, subject to
-- the following conditions:
--
-- The above copyright notice and this permission notice shall be included
-- in all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-- OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-- IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-- CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-- TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-- SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
----------------------------------------
-- | This is a collection of random useful utility functions written in pure
-- Haskell 98. In general, it trys to conform to the naming scheme put forth
-- the haskell prelude and fill in the obvious omissions, as well as provide
-- useful routines in general. To ensure maximum portability, no instances are
-- exported so it may be added to any project without conflicts.
----------------------------------------
module Ginsu.GenUtil(
-- * Functions
-- ** Error reporting
putErr,putErrLn,putErrDie,
-- ** Simple deconstruction
fromLeft,fromRight,fsts,snds,splitEither,rights,lefts,
-- ** System routines
exitSuccess, System.exitFailure, epoch, lookupEnv,endOfTime,
-- ** Random routines
repMaybe,
liftT2, liftT3, liftT4,
snub, snubFst, sortFst, groupFst, foldl',
fmapLeft,fmapRight,isDisjoint,isConjoint,
-- ** Monad routines
repeatM, repeatM_, replicateM, replicateM_, maybeToMonad,
toMonadM, ioM, ioMp, foldlM, foldlM_, foldl1M, foldl1M_,
-- ** Text Routines
-- *** Quoting
shellQuote, simpleQuote, simpleUnquote,
-- *** Random
concatInter,
powerSet,
indentLines,
buildTableLL,
buildTableRL,
randomPermute,
randomPermuteIO,
trimBlankLines,
paragraph,
paragraphBreak,
expandTabs,
chunk,
chunkText,
rtup,
triple,
fromEither,
mapFst,
mapSnd,
mapFsts,
mapSnds,
tr,
readHex,
overlaps,
showDuration,
getArgContents,
readM,
readsM,
-- * Classes
UniqueProducer(..)
) where
import Char(isAlphaNum, isSpace, toLower, ord)
import List(group,sort)
import List(intersperse, sortBy, groupBy)
import Monad
import qualified IO
import qualified System
import Random(StdGen, newStdGen, randomR)
import Time
{-# SPECIALIZE snub :: [String] -> [String] #-}
{-# SPECIALIZE snub :: [Int] -> [Int] #-}
-- | sorted nub of list, much more efficient than nub, but doesnt preserve ordering.
snub :: Ord a => [a] -> [a]
snub = map head . group . sort
-- | sorted nub of list of tuples, based solely on the first element of each tuple.
snubFst :: Ord a => [(a,b)] -> [(a,b)]
snubFst = map head . groupBy (\(x,_) (y,_) -> x == y) . sortBy (\(x,_) (y,_) -> compare x y)
-- | sort list of tuples, based on first element of each tuple.
sortFst :: Ord a => [(a,b)] -> [(a,b)]
sortFst = sortBy (\(x,_) (y,_) -> compare x y)
-- | group list of tuples, based only on equality of the first element of each tuple.
groupFst :: Eq a => [(a,b)] -> [[(a,b)]]
groupFst = groupBy (\(x,_) (y,_) -> x == y)
-- | write string to standard error
putErr :: String -> IO ()
putErr = IO.hPutStr IO.stderr
-- | write string and newline to standard error
putErrLn :: String -> IO ()
putErrLn s = putErr (s ++ "\n")
-- | write string and newline to standard error,
-- then exit program with failure.
putErrDie :: String -> IO a
putErrDie s = putErrLn s >> System.exitFailure
-- | exit program successfully. 'exitFailure' is
-- also exported from System.
exitSuccess :: IO a
exitSuccess = System.exitWith System.ExitSuccess
{-# INLINE fromRight #-}
fromRight :: Either a b -> b
fromRight (Right x) = x
fromRight _ = error "fromRight"
{-# INLINE fromLeft #-}
fromLeft :: Either a b -> a
fromLeft (Left x) = x
fromLeft _ = error "fromLeft"
-- | recursivly apply function to value until it returns Nothing
repMaybe :: (a -> Maybe a) -> a -> a
repMaybe f e = case f e of
Just e' -> repMaybe f e'
Nothing -> e
{-# INLINE liftT2 #-}
{-# INLINE liftT3 #-}
{-# INLINE liftT4 #-}
liftT4 (f1,f2,f3,f4) (v1,v2,v3,v4) = (f1 v1, f2 v2, f3 v3, f4 v4)
liftT3 (f,g,h) (x,y,z) = (f x, g y, h z)
-- | apply functions to values inside a tupele. 'liftT3' and 'liftT4' also exist.
liftT2 :: (a -> b, c -> d) -> (a,c) -> (b,d)
liftT2 (f,g) (x,y) = (f x, g y)
-- | class for monads which can generate
-- unique values.
class Monad m => UniqueProducer m where
-- | produce a new unique value
newUniq :: m Int
-- peekUniq :: m Int
-- modifyUniq :: (Int -> Int) -> m ()
-- newUniq = do
-- v <- peekUniq
-- modifyUniq (+1)
-- return v
rtup a b = (b,a)
triple a b c = (a,b,c)
-- | the standard unix epoch
epoch :: ClockTime
epoch = toClockTime $ CalendarTime { ctYear = 1970, ctMonth = January, ctDay = 0, ctHour = 0, ctMin = 0, ctSec = 0, ctTZ = 0, ctPicosec = 0, ctWDay = undefined, ctYDay = undefined, ctTZName = undefined, ctIsDST = undefined}
-- | an arbitrary time in the future
endOfTime :: ClockTime
endOfTime = toClockTime $ CalendarTime { ctYear = 2020, ctMonth = January, ctDay = 0, ctHour = 0, ctMin = 0, ctSec = 0, ctTZ = 0, ctPicosec = 0, ctWDay = undefined, ctYDay = undefined, ctTZName = undefined, ctIsDST = undefined}
{-# INLINE fsts #-}
-- | take the fst of every element of a list
fsts :: [(a,b)] -> [a]
fsts = map fst
{-# INLINE snds #-}
-- | take the snd of every element of a list
snds :: [(a,b)] -> [b]
snds = map snd
{-# INLINE repeatM #-}
{-# SPECIALIZE repeatM :: IO a -> IO [a] #-}
repeatM :: Monad m => m a -> m [a]
repeatM x = sequence $ repeat x
{-# INLINE repeatM_ #-}
{-# SPECIALIZE repeatM_ :: IO a -> IO () #-}
repeatM_ :: Monad m => m a -> m ()
repeatM_ x = sequence_ $ repeat x
{-# INLINE replicateM #-}
{-# SPECIALIZE replicateM :: Int -> IO a -> IO [a] #-}
replicateM :: Monad m => Int -> m a -> m [a]
replicateM n x = sequence $ replicate n x
{-# INLINE replicateM_ #-}
{-# SPECIALIZE replicateM_ :: Int -> IO a -> IO () #-}
replicateM_ :: Monad m => Int -> m a -> m ()
replicateM_ n x = sequence_ $ replicate n x
{-# SPECIALIZE maybeToMonad :: Maybe a -> IO a #-}
-- | convert a maybe to an arbitrary failable monad
maybeToMonad :: Monad m => Maybe a -> m a
maybeToMonad (Just x) = return x
maybeToMonad Nothing = fail "Nothing"
toMonadM :: Monad m => m (Maybe a) -> m a
toMonadM action = join $ liftM maybeToMonad action
foldlM :: Monad m => (a -> b -> m a) -> a -> [b] -> m a
foldlM f v (x:xs) = (f v x) >>= \a -> foldlM f a xs
foldlM _ v [] = return v
foldl1M :: Monad m => (a -> a -> m a) -> [a] -> m a
foldl1M f (x:xs) = foldlM f x xs
foldl1M _ _ = error "foldl1M"
foldlM_ :: Monad m => (a -> b -> m a) -> a -> [b] -> m ()
foldlM_ f v xs = foldlM f v xs >> return ()
foldl1M_ ::Monad m => (a -> a -> m a) -> [a] -> m ()
foldl1M_ f xs = foldl1M f xs >> return ()
-- | partition a list of eithers.
splitEither :: [Either a b] -> ([a],[b])
splitEither (r:rs) = case splitEither rs of
(xs,ys) -> case r of
Left x -> (x:xs,ys)
Right y -> (xs,y:ys)
splitEither [] = ([],[])
fromEither :: Either a a -> a
fromEither (Left x) = x
fromEither (Right x) = x
{-# INLINE mapFst #-}
{-# INLINE mapSnd #-}
mapFst f (x,y) = (f x, y)
mapSnd g (x,y) = ( x,g y)
{-# INLINE mapFsts #-}
{-# INLINE mapSnds #-}
mapFsts f xs = [(f x, y) | (x,y) <- xs]
mapSnds g xs = [(x, g y) | (x,y) <- xs]
{-# INLINE rights #-}
-- | take just the rights
rights :: [Either a b] -> [b]
rights xs = [x | Right x <- xs]
{-# INLINE lefts #-}
-- | take just the lefts
lefts :: [Either a b] -> [a]
lefts xs = [x | Left x <- xs]
ioM :: Monad m => IO a -> IO (m a)
ioM action = catch (fmap return action) (\e -> return (fail (show e)))
ioMp :: MonadPlus m => IO a -> IO (m a)
ioMp action = catch (fmap return action) (\_ -> return mzero)
-- | reformat a string to not be wider than a given width, breaking it up
-- between words.
paragraph :: Int -> String -> String
paragraph maxn xs = drop 1 (f maxn (words xs)) where
f n (x:xs) | lx < n = (' ':x) ++ f (n - lx) xs where
lx = length x + 1
f _ (x:xs) = '\n': (x ++ f (maxn - length x) xs)
f _ [] = "\n"
chunk :: Int -> [a] -> [[a]]
chunk mw s | length s < mw = [s]
chunk mw s = case splitAt mw s of (a,b) -> a : chunk mw b
chunkText :: Int -> String -> String
chunkText mw s = concatMap (unlines . chunk mw) $ lines s
{-
paragraphBreak :: Int -> String -> String
paragraphBreak maxn xs = unlines (map ( unlines . map (unlines . chunk maxn) . lines . f maxn ) $ lines xs) where
f _ "" = ""
f n xs | length ss > 0 = if length ss + r rs > n then '\n':f maxn rs else ss where
(ss,rs) = span isSpace xs
f n xs = ns ++ f (n - length ns) rs where
(ns,rs) = span (not . isSpace) xs
r xs = length $ fst $ span (not . isSpace) xs
-}
paragraphBreak :: Int -> String -> String
paragraphBreak maxn xs = unlines $ (map f) $ lines xs where
f s | length s <= maxn = s
f s | isSpace (head b) = a ++ "\n" ++ f (dropWhile isSpace b)
| all (not . isSpace) a = a ++ "\n" ++ f b
| otherwise = reverse (dropWhile isSpace sa) ++ "\n" ++ f (reverse ea ++ b) where
(ea, sa) = span (not . isSpace) $ reverse a
(a,b) = splitAt maxn s
expandTabs' :: Int -> Int -> String -> String
expandTabs' 0 _ s = filter (/= '\t') s
expandTabs' sz off ('\t':s) = replicate len ' ' ++ expandTabs' sz (off + len) s where
len = (sz - (off `mod` sz))
expandTabs' sz _ ('\n':s) = '\n': expandTabs' sz 0 s
expandTabs' sz off (c:cs) = c: expandTabs' sz (off + 1) cs
expandTabs' _ _ "" = ""
-- | expand tabs into spaces in a string
expandTabs s = expandTabs' 8 0 s
tr :: String -> String -> String -> String
tr as bs s = map (f as bs) s where
f (a:_) (b:_) c | a == c = b
f (_:as) (_:bs) c = f as bs c
f [] [] c = c
f _ _ _ = error "invalid tr"
-- | quote strings 'rc' style. single quotes protect any characters between
-- them, to get an actual single quote double it up. Inverse of 'simpleUnquote'
simpleQuote :: [String] -> String
simpleQuote ss = unwords (map f ss) where
f s | any isBad s = "'" ++ dquote s ++ "'"
f s = s
dquote s = concatMap (\c -> if c == '\'' then "''" else [c]) s
isBad c = isSpace c || c == '\''
-- | inverse of 'simpleQuote'
simpleUnquote :: String -> [String]
simpleUnquote s = f (dropWhile isSpace s) where
f [] = []
f ('\'':xs) = case quote' "" xs of (x,y) -> x:f (dropWhile isSpace y)
f xs = case span (not . isSpace) xs of (x,y) -> x:f (dropWhile isSpace y)
quote' a ('\'':'\'':xs) = quote' ('\'':a) xs
quote' a ('\'':xs) = (reverse a, xs)
quote' a (x:xs) = quote' (x:a) xs
quote' a [] = (reverse a, "")
-- | quote a set of strings as would be appropriate to pass them as
-- arguments to a 'sh' style shell
shellQuote :: [String] -> String
shellQuote ss = unwords (map f ss) where
f s | any (not . isGood) s = "'" ++ dquote s ++ "'"
f s = s
dquote s = concatMap (\c -> if c == '\'' then "'\\''" else [c]) s
isGood c = isAlphaNum c || c `elem` "@/."
-- | looks up an enviornment variable and returns it in a 'MonadPlus' rather
-- than raising an exception if the variable is not set.
lookupEnv :: MonadPlus m => String -> IO (m String)
lookupEnv s = catch (fmap return $ System.getEnv s) (\e -> if IO.isDoesNotExistError e then return mzero else ioError e)
{-# SPECIALIZE fmapLeft :: (a -> c) -> [(Either a b)] -> [(Either c b)] #-}
fmapLeft :: Functor f => (a -> c) -> f (Either a b) -> f (Either c b)
fmapLeft fn = fmap f where
f (Left x) = Left (fn x)
f (Right x) = Right x
{-# SPECIALIZE fmapRight :: (b -> c) -> [(Either a b)] -> [(Either a c)] #-}
fmapRight :: Functor f => (b -> c) -> f (Either a b) -> f (Either a c)
fmapRight fn = fmap f where
f (Left x) = Left x
f (Right x) = Right (fn x)
{-# SPECIALIZE isDisjoint :: [String] -> [String] -> Bool #-}
{-# SPECIALIZE isConjoint :: [String] -> [String] -> Bool #-}
{-# SPECIALIZE isDisjoint :: [Int] -> [Int] -> Bool #-}
{-# SPECIALIZE isConjoint :: [Int] -> [Int] -> Bool #-}
-- | set operations on lists. (slow!)
isDisjoint, isConjoint :: Eq a => [a] -> [a] -> Bool
isConjoint xs ys = or [x == y | x <- xs, y <- ys]
isDisjoint xs ys = not (isConjoint xs ys)
-- | 'concat' composed with 'List.intersperse'.
concatInter :: String -> [String] -> String
concatInter x = concat . (intersperse x)
-- | place spaces before each line in string.
indentLines :: Int -> String -> String
indentLines n s = unlines $ map (replicate n ' ' ++)$ lines s
-- | trim blank lines at beginning and end of string
trimBlankLines :: String -> String
trimBlankLines cs = unlines $ reverse (tb $ reverse (tb (lines cs))) where
tb = dropWhile (all isSpace)
buildTableRL :: [(String,String)] -> [String]
buildTableRL ps = map f ps where
f (x,"") = x
f (x,y) = replicate (bs - length x) ' ' ++ x ++ replicate 4 ' ' ++ y
bs = maximum (map (length . fst) [ p | p@(_,_:_) <- ps ])
buildTableLL :: [(String,String)] -> [String]
buildTableLL ps = map f ps where
f (x,y) = x ++ replicate (bs - length x) ' ' ++ replicate 4 ' ' ++ y
bs = maximum (map (length . fst) ps)
{-# INLINE foldl' #-}
-- | strict version of 'foldl'
foldl' :: (a -> b -> a) -> a -> [b] -> a
foldl' _ a [] = a
foldl' f a (x:xs) = (foldl' f $! f a x) xs
-- | randomly permute a list, using the standard random number generator.
randomPermuteIO :: [a] -> IO [a]
randomPermuteIO xs = newStdGen >>= \g -> return (randomPermute g xs)
-- | randomly permute a list given a RNG
randomPermute :: StdGen -> [a] -> [a]
randomPermute _ [] = []
randomPermute gen xs = (head tl) : randomPermute gen' (hd ++ tail tl)
where (idx, gen') = randomR (0,length xs - 1) gen
(hd, tl) = splitAt idx xs
-- | compute the power set of a list
powerSet :: [a] -> [[a]]
powerSet [] = [[]]
powerSet (x:xs) = xss /\/ map (x:) xss
where xss = powerSet xs
-- | interleave two lists lazily, alternating elements from them. This can be used instead of concatination to avoid space leaks in certain situations.
(/\/) :: [a] -> [a] -> [a]
[] /\/ ys = ys
(x:xs) /\/ ys = x : (ys /\/ xs)
readHexChar a | a >= '0' && a <= '9' = return $ ord a - ord '0'
readHexChar a | z >= 'a' && z <= 'f' = return $ 10 + ord z - ord 'a' where z = toLower a
readHexChar x = fail $ "not hex char: " ++ [x]
readHex :: Monad m => String -> m Int
readHex [] = fail "empty string"
readHex cs = mapM readHexChar cs >>= \cs' -> return (rh $ reverse cs') where
rh (c:cs) = c + 16 * (rh cs)
rh [] = 0
{-# SPECIALIZE overlaps :: (Int,Int) -> (Int,Int) -> Bool #-}
-- | determine if two closed intervals overlap at all.
overlaps :: Ord a => (a,a) -> (a,a) -> Bool
(a,_) `overlaps` (_,y) | y < a = False
(_,b) `overlaps` (x,_) | b < x = False
_ `overlaps` _ = True
-- | translate a number of seconds to a string representing the duration expressed.
showDuration :: Integral a => a -> String
showDuration x = st "d" dayI ++ st "h" hourI ++ st "m" minI ++ show secI ++ "s" where
(dayI, hourI) = divMod hourI' 24
(hourI', minI) = divMod minI' 60
(minI',secI) = divMod x 60
st _ 0 = ""
st c n = show n ++ c
-- | behave like while(<>) in perl, go through the argument list, reading the
-- concation of each file name mentioned or stdin if '-' is on it. If no
-- arguments are given, read stdin.
getArgContents = do
as <- System.getArgs
let f "-" = getContents
f fn = readFile fn
cs <- mapM f as
if null as then getContents else return $ concat cs
readM :: (Monad m, Read a) => String -> m a
readM cs = case [x | (x,t) <- reads cs, ("","") <- lex t] of
[x] -> return x
[] -> fail "readM: no parse"
_ -> fail "readM: ambiguous parse"
readsM :: (Monad m, Read a) => String -> m (a,String)
readsM cs = case readsPrec 0 cs of
[(x,s)] -> return (x,s)
_ -> fail "cannot readsM"
|
opqdonut/riot
|
Ginsu/GenUtil.hs
|
gpl-2.0
| 16,722 | 0 | 15 | 3,989 | 5,771 | 3,083 | 2,688 | 299 | 6 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./OWL2/ProfilesAndSublogics.hs
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
OWL2 Profiles (EL, QL and RL) + OWL2 complexity analysis
References : <http://www.w3.org/TR/owl2-profiles/>
-}
module OWL2.ProfilesAndSublogics where
import OWL2.MS
import OWL2.Profiles
import OWL2.Sublogic
import OWL2.Sign
import OWL2.Morphism
import Data.Data
data ProfSub = ProfSub
{ profiles :: Profiles
, sublogic :: OWLSub
} deriving (Show, Eq, Ord, Typeable, Data)
allProfSubs :: [[ProfSub]]
allProfSubs = map (map (`ProfSub` slBottom)) allProfiles
++ map (map (ProfSub topProfile)) allSublogics
bottomS :: ProfSub
bottomS = ProfSub topProfile slBottom
topS :: ProfSub
topS = ProfSub bottomProfile slTop
maxS :: ProfSub -> ProfSub -> ProfSub
maxS ps1 ps2 = ProfSub (andProfileList [profiles ps1, profiles ps2])
(slMax (sublogic ps1) (sublogic ps2))
nameS :: ProfSub -> String
nameS ps = printProfile (profiles ps) ++ "-" ++ slName (sublogic ps)
psAxiom :: Axiom -> ProfSub
psAxiom ax = ProfSub (axiom ax) (slAxiom ax)
sSig :: Sign -> ProfSub
sSig s = bottomS {sublogic = slSig s}
sMorph :: OWLMorphism -> ProfSub
sMorph m = bottomS {sublogic = slMor m}
prSign :: ProfSub -> Sign -> Sign
prSign s = prSig (sublogic s)
prMorph :: ProfSub -> OWLMorphism -> OWLMorphism
prMorph s a = a
{ osource = prSign s $ osource a
, otarget = prSign s $ otarget a }
prOntDoc :: ProfSub -> OntologyDocument -> OntologyDocument
prOntDoc ps = prODoc (sublogic ps)
profilesAndSublogic :: OntologyDocument -> ProfSub
profilesAndSublogic odoc = ProfSub (ontologyProfiles odoc) (slODoc odoc)
|
gnn/Hets
|
OWL2/ProfilesAndSublogics.hs
|
gpl-2.0
| 1,792 | 0 | 10 | 324 | 516 | 272 | 244 | 40 | 1 |
module Grammar where
import DataTypesEtc
grammar :: Grammar
-- the potato grammar
grammar nt = case nt of
Program -> [[prog, FuncName, ProgBody]]
ProgBody -> [[semi, Rep0 [Line], stop, dot]]
Line -> [[Decl]
,[Assign]
,[FuncCall]
,[Incr]
,[When]
,[While]
,[Task]]
Decl -> [[suppose, Opt [global], Type, Idf, Alt [ofK, lengthK, Expr] [Opt [is, Expr]], dot]]
Assign -> [[Idf, is, Expr, dot]]
FuncCall -> [[FuncName, lPar, Rep0 [Expr, Opt [comma]], rPar, Opt [dot]]
,[FuncName, lPar, Rep0 [Expr, Opt [comma]], rPar]]
Incr -> [[inc, Idf, dot]]
When -> [[when, Expr, doK, Body, Opt [otherwiseK, doK, Body]]]
While -> [[while, Expr, doK, Body]]
Task -> [[task, FuncName, takes, Args, gives, Type, after, Body]]
Args -> [[Rep0[Arg]]]
Arg -> [[Type, Idf, Alt [comma] [andK]]]
Body -> [[semi, Rep0 [Line], Alt [stop, dot] [give, VIA, dot]]]
Expr -> [[VIA, Op, Expr]
,[lPar, Expr, rPar, Op, Expr]
,[lPar, Expr, rPar]
,[VIA]]
VIA -> [[Value]
,[FuncCall]
,[Idf]
,[Array]]
Op -> [[plus]
,[minus]
,[times]
,[DividedBy]
,[equals]
,[is]
,[NotEqual]
,[GreaterThan]
,[GreaterThanEq]
,[SmallerThan]
,[SmallerThanEq]
,[andK]
,[orK]]
NotEqual -> [[is, notK, equal, to]]
DividedBy -> [[divided, by]]
GreaterThan -> [[is, greater, than]]
GreaterThanEq -> [[is, greater, than, orK, equal, to]]
SmallerThan -> [[is, smaller, than]]
SmallerThanEq -> [[is, smaller, than, orK, equal, to]]
FuncName -> [[funcName]]
Type -> [[TypeBool]
,[TypeInt]
,[TypeChar]
,[TypeArray]
,[TypeNothing]]
Idf -> [[idf, Opt [lBracket, Expr, rBracket]]]
Value -> [[Boolean]
,[Integer]
,[Character]]
Array -> [[lBracket, Rep0 [ArrayVal], rBracket]]
ArrayVal -> [[VIA, Opt [comma]]]
TypeArray -> [[lBracket, Type, rBracket]]
Boolean -> [[Alt [TrueK] [FalseK]]]
TrueK -> [[trueK]]
FalseK -> [[falseK]]
TypeBool -> [[typeBool]]
Integer -> [[int]]
TypeInt -> [[typeInt]]
Character -> [[char]]
TypeChar -> [[typeChar]]
TypeNothing -> [[nothing]]
-- shorthand names can be handy, such as:
lPar = Symbol "("
rPar = Symbol ")"
lBracket = Symbol "["
rBracket = Symbol "]"
bool = SyntCat Boolean
trueK = SyntCat TrueK
falseK = SyntCat FalseK
int = SyntCat Integer
char = SyntCat Character
idf = SyntCat Idf
funcName = SyntCat FuncName
typeBool = Keyword "boolean"
typeInt = Keyword "integer"
typeChar = Keyword "character"
greater = Keyword "greater"
orK = Keyword "or"
than = Keyword "than"
equal = Keyword "equal"
smaller = Keyword "smaller"
equals = Keyword "equals"
inc = Keyword "increment"
plus = Keyword "plus"
minus = Keyword "minus"
times = Keyword "times"
divided = Keyword "divided"
by = Keyword "by"
suppose = Keyword "suppose"
after = Keyword "after"
is = Keyword "is"
task = Keyword "task"
global = Keyword "global"
takes = Keyword "takes"
comma = Keyword ","
andK = Keyword "and"
gives = Keyword "gives"
dot = Keyword "."
to = Keyword "to"
while = Keyword "while"
doK = Keyword "do"
comment = Keyword "btw"
when = Keyword "when"
otherwiseK = Keyword "otherwise"
nothing = Keyword "nothing"
give = Keyword "give"
stop = Keyword "stop"
semi = Keyword ":"
prog = Keyword "program"
ofK = Keyword "of"
lengthK = Keyword "length"
notK = Keyword "not"
|
Ertruby/PPFinalProject
|
src/Grammar.hs
|
gpl-2.0
| 4,773 | 0 | 13 | 2,149 | 1,448 | 840 | 608 | 123 | 38 |
module Blink where
import Control.Monad (forever)
import System.Hardware.Arduino
import System.Environment
pauseTime :: Int
pauseTime = 500
-- prepare the led
prepareLed :: Pin -> Arduino ()
prepareLed = flip setPinMode OUTPUT
-- make the led blink
blink :: Pin -> Arduino ()
blink l = do digitalWrite l True
delay pauseTime
digitalWrite l False
delay pauseTime
-- Blink the led connected to port 13 on the Arduino UNO board.
run :: String -> IO ()
run device = withArduino False device $ do
let led = digital 13
prepareLed led
forever $ blink led
-- *Blink> run "/dev/ttyACM0"
-- C-c C-chArduino: Caught Ctrl-C, quitting..
-- run from the cli
main :: IO ()
main =
do (devicePath:_) <- getArgs
run devicePath
|
ardumont/harduino-lab
|
src/Blink.hs
|
gpl-2.0
| 810 | 0 | 11 | 222 | 209 | 104 | 105 | 22 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
{-# LANGUAGE NoMonomorphismRestriction, DeriveDataTypeable #-}
module TAC.Data where
import Autolib.Reader
import Autolib.ToDoc
import Autolib.FiniteMap
import Autolib.Util.Zufall
type Program = [ Statement ]
data Statement
= Constant Int Int
| Add Int Int Int
| Mul Int Int Int
deriving ( Eq, Ord )
$(derives [makeReader, makeToDoc] [''Statement])
some :: Int -> IO Program
some l = sequence $ replicate l $ do
action <- eins [ some_constant , some_operation ]
action
some_constant = do
i <- eins [ 0 .. 3 ]
c <- eins [ 0 .. 1 ]
return $ Constant i c
some_operation = do
i <- eins [ 0 .. 3 ]
j <- eins [ 0 .. 3 ]
k <- eins [ 0 .. 3 ]
op <- eins [ Add, Mul ]
return $ op i j k
change_program [] = return [ Constant 0 1 ]
change_program p = do
i <- randomRIO ( 0, length p - 1 )
let ( pre , this : post ) = splitAt i p
that <- change this
return $ pre ++ that : post
change s = case s of
Constant i c -> do
j <- randomRIO ( 0, i+1 )
d <- randomRIO ( 0, c+1 )
return $ Constant j d
Add i j k -> change_operation i j k
Mul i j k -> change_operation i j k
change_operation i j k = do
ii <- randomRIO ( max 0 $ i-1, i+1 )
jj <- randomRIO ( max 0 $ j-1, j+1 )
kk <- randomRIO ( max 0 $ k-1, k+1 )
op <- eins [ Mul, Add ]
return $ op ii jj kk
-- | costs on the Smallnums 1 model
cost :: Statement -> Int
cost s = case s of
Constant i c -> patch i + patch c + 1
Add i j k -> 4 + sum [ patch i, patch j, patch k ]
Mul i j k -> 4 + sum [ patch i, patch j, patch k ]
patch i = max 1 $ 2*i-1
-- | the value that is left in x0 finally
value :: Program -> Integer
value stmts =
let fm = foldl execute emptyFM stmts
in access fm 0
access fm i = case lookupFM fm i of
Just x -> x
Nothing -> 0
execute state action = case action of
Constant i c -> addToFM state i $ fromIntegral c
Add i j k -> operation state (+) i j k
Mul i j k -> operation state (*) i j k
operation state op i j k =
let x = access state j
y = access state k
in addToFM state i $ op x y
-- local variables:
-- mode: haskell
-- end:
|
Erdwolf/autotool-bonn
|
src/TAC/Data.hs
|
gpl-2.0
| 2,252 | 7 | 15 | 685 | 1,001 | 494 | 507 | 68 | 3 |
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2009 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- Functions for translating between Page structures and raw
- text strings. The strings may begin with a metadata block,
- which looks like this (it is valid YAML):
-
- > ---
- > title: Custom Title
- > format: markdown+lhs
- > toc: yes
- > categories: foo bar baz
- > ...
-
- This would tell gitit to use "Custom Title" as the displayed
- page title (instead of the page name), to interpret the page
- text as markdown with literate haskell, to include a table of
- contents, and to include the page in the categories foo, bar,
- and baz.
-
- The metadata block may be omitted entirely, and any particular line
- may be omitted. The categories in the @categories@ field should be
- separated by spaces. Commas will be treated as spaces.
-
- Metadata value fields may be continued on the next line, as long as
- it is nonblank and starts with a space character.
-
- Unrecognized metadata fields are simply ignored.
-}
module Network.Gitit.Page ( stringToPage
, pageToString
, readCategories
)
where
import Network.Gitit.Types
import Network.Gitit.Util (trim, splitCategories, parsePageType)
import Text.ParserCombinators.Parsec
import Data.Char (toLower)
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import Data.ByteString.UTF8 (toString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import System.IO (withFile, Handle, IOMode(..))
import qualified Control.Exception as E
import System.IO.Error (isEOFError)
#if MIN_VERSION_base(4,5,0)
#else
import Codec.Binary.UTF8.String (encodeString)
#endif
parseMetadata :: String -> ([(String, String)], String)
parseMetadata raw =
case parse pMetadataBlock "" raw of
Left _ -> ([], raw)
Right (ls, rest) -> (ls, rest)
pMetadataBlock :: GenParser Char st ([(String, String)], String)
pMetadataBlock = try $ do
_ <- string "---"
_ <- pBlankline
ls <- manyTill pMetadataLine pMetaEnd
skipMany pBlankline
rest <- getInput
return (ls, rest)
pMetaEnd :: GenParser Char st Char
pMetaEnd = try $ do
string "..." <|> string "---"
pBlankline
pBlankline :: GenParser Char st Char
pBlankline = try $ many (oneOf " \t") >> newline
pMetadataLine :: GenParser Char st (String, String)
pMetadataLine = try $ do
first <- letter
rest <- many (letter <|> digit <|> oneOf "-_")
let ident = first:rest
skipMany (oneOf " \t")
_ <- char ':'
rawval <- many $ noneOf "\n\r"
<|> (try $ newline >> notFollowedBy pBlankline >>
skipMany1 (oneOf " \t") >> return ' ')
_ <- newline
return (ident, trim rawval)
-- | Read a string (the contents of a page file) and produce a Page
-- object, using defaults except when overridden by metadata.
stringToPage :: Config -> String -> String -> Page
stringToPage conf pagename raw =
let (ls, rest) = parseMetadata raw
page' = Page { pageName = pagename
, pageFormat = defaultPageType conf
, pageLHS = defaultLHS conf
, pageTOC = tableOfContents conf
, pageTitle = pagename
, pageCategories = []
, pageText = filter (/= '\r') rest
, pageMeta = ls }
in foldr adjustPage page' ls
adjustPage :: (String, String) -> Page -> Page
adjustPage ("title", val) page' = page' { pageTitle = val }
adjustPage ("format", val) page' = page' { pageFormat = pt, pageLHS = lhs }
where (pt, lhs) = parsePageType val
adjustPage ("toc", val) page' = page' {
pageTOC = map toLower val `elem` ["yes","true"] }
adjustPage ("categories", val) page' =
page' { pageCategories = splitCategories val ++ pageCategories page' }
adjustPage (_, _) page' = page'
-- | Write a string (the contents of a page file) corresponding to
-- a Page object, using explicit metadata only when needed.
pageToString :: Config -> Page -> String
pageToString conf page' =
let pagename = pageName page'
pagetitle = pageTitle page'
pageformat = pageFormat page'
pagelhs = pageLHS page'
pagetoc = pageTOC page'
pagecats = pageCategories page'
metadata = filter
(\(k, _) -> not (k `elem`
["title", "format", "toc", "categories"]))
(pageMeta page')
metadata' = (if pagename /= pagetitle
then "title: " ++ pagetitle ++ "\n"
else "") ++
(if pageformat /= defaultPageType conf ||
pagelhs /= defaultLHS conf
then "format: " ++
map toLower (show pageformat) ++
if pagelhs then "+lhs\n" else "\n"
else "") ++
(if pagetoc /= tableOfContents conf
then "toc: " ++
(if pagetoc then "yes" else "no") ++ "\n"
else "") ++
(if not (null pagecats)
then "categories: " ++ intercalate ", " pagecats ++ "\n"
else "") ++
(unlines (map (\(k, v) -> k ++ ": " ++ v) metadata))
in (if null metadata' then "" else "---\n" ++ metadata' ++ "...\n\n")
++ pageText page'
-- | Read categories from metadata strictly.
readCategories :: FilePath -> IO [String]
readCategories f =
#if MIN_VERSION_base(4,5,0)
withFile f ReadMode $ \h ->
#else
withFile (encodeString f) ReadMode $ \h ->
#endif
E.catch (do fl <- B.hGetLine h
if dashline fl
then do -- get rest of metadata
rest <- hGetLinesTill h dotOrDashline
let (md,_) = parseMetadata $ unlines $ "---":rest
return $ splitCategories $ fromMaybe ""
$ lookup "categories" md
else return [])
(\e -> if isEOFError e then return [] else E.throwIO e)
dashline :: B.ByteString -> Bool
dashline x =
case BC.unpack x of
('-':'-':'-':xs) | all (==' ') xs -> True
_ -> False
dotOrDashline :: B.ByteString -> Bool
dotOrDashline x =
case BC.unpack x of
('-':'-':'-':xs) | all (==' ') xs -> True
('.':'.':'.':xs) | all (==' ') xs -> True
_ -> False
hGetLinesTill :: Handle -> (B.ByteString -> Bool) -> IO [String]
hGetLinesTill h end = do
next <- B.hGetLine h
if end next
then return [toString next]
else do
rest <- hGetLinesTill h end
return (toString next:rest)
|
bergmannf/gitit
|
src/Network/Gitit/Page.hs
|
gpl-2.0
| 7,427 | 0 | 19 | 2,199 | 1,694 | 901 | 793 | 131 | 8 |
{-# LANGUAGE ViewPatterns #-}
{- | The events found in the \"PART VOCALS\", \"HARM1\", \"HARM2\", and
\"HARM3\" tracks. -}
module Data.Rhythm.RockBand.Lex.Vocals where
import Data.Rhythm.RockBand.Common
import qualified Sound.MIDI.Message.Channel.Voice as V
import qualified Data.Rhythm.MIDI as MIDI
import Data.Rhythm.Event
import Data.Rhythm.Time
import Data.Rhythm.Parser
import qualified Numeric.NonNegative.Class as NN
import Data.Char (toLower)
import qualified Sound.MIDI.File.Event as E
import qualified Sound.MIDI.File.Event.Meta as M
data Point
= LyricShift
| Mood Mood
| Lyric String
-- | A playable percussion note.
| Percussion
-- | A nonplayable percussion note, which just triggers the sound sample.
| PercussionSound
| PercussionAnimation PercussionType Bool
deriving (Eq, Ord, Show, Read)
data Length
-- | General phrase marker (RB3) or Player 1 phrases (pre-RB3).
= Phrase
-- | Pre-RB3, used for 2nd player phrases in Tug of War.
| Phrase2
| Overdrive
| RangeShift
-- | Pitches from 36 to 84 are valid.
| Note V.Pitch
deriving (Eq, Ord, Show)
instance Long Length where
match (Note _) (Note _) = True
match x y = x == y
type T = Event Length Point
data PercussionType
= Tambourine
| Cowbell
| Clap
deriving (Eq, Ord, Show, Read, Enum, Bounded)
parse :: (NN.C a) => Parser (MIDI.T a) (T a)
parse = get >>= \x -> case x of
Length len n@(MIDI.Note _ p _) -> case V.fromPitch p of
0 -> return $ Length len RangeShift
1 -> return $ Point LyricShift
i | 36 <= i && i <= 84 -> return $ Length len $ Note p
96 -> return $ Point Percussion
97 -> return $ Point PercussionSound
105 -> return $ Length len Phrase
106 -> return $ Length len Phrase2
116 -> return $ Length len Overdrive
_ -> unrecognized n
Point (E.MetaEvent (M.Lyric str)) -> return $ Point $ Lyric str
Point (E.MetaEvent (M.TextEvent str)) -> case str of
(readPercAnim -> Just evt) -> return $ Point evt
(readMood -> Just m ) -> return $ Point $ Mood m
_ -> warn w >> return (Point $ Lyric str) where
w = "Unrecognized text treated as lyric: " ++ show str
Point p -> unrecognized p
unparse :: T Beats -> MIDI.T Beats
unparse (Point p) = case p of
LyricShift -> blip $ V.toPitch 1
Mood m -> Point . E.MetaEvent . M.TextEvent $ showMood m
Lyric str -> Point . E.MetaEvent $ M.Lyric str
Percussion -> blip $ V.toPitch 96
PercussionSound -> blip $ V.toPitch 97
PercussionAnimation t b -> Point . E.MetaEvent . M.TextEvent $ showPercAnim t b
unparse (Length len l) = Length len $ standardNote $ case l of
Overdrive -> V.toPitch 116
Phrase -> V.toPitch 105
Phrase2 -> V.toPitch 106
RangeShift -> V.toPitch 0
Note p -> p
readPercAnim :: String -> Maybe Point
readPercAnim str = case str of
"[tambourine_start]" -> f Tambourine True
"[tambourine_end]" -> f Tambourine False
"[cowbell_start]" -> f Cowbell True
"[cowbell_end]" -> f Cowbell False
"[clap_start]" -> f Clap True
"[clap_end]" -> f Clap False
_ -> Nothing
where f typ b = Just $ PercussionAnimation typ b
showPercAnim :: PercussionType -> Bool -> String
showPercAnim typ b =
"[" ++ map toLower (show typ) ++ if b then "_start]" else "_end]"
|
mtolly/rhythm
|
src/Data/Rhythm/RockBand/Lex/Vocals.hs
|
gpl-3.0
| 3,237 | 0 | 18 | 697 | 1,099 | 565 | 534 | 82 | 14 |
{-# LANGUAGE ExistentialQuantification #-}
module Scheme
( symbol
,readExpr
,eval
,extractValue
,trapError
) where
import Text.ParserCombinators.Parsec hiding (spaces)
import System.Environment
import Control.Monad
import Control.Monad.Except
import Numeric
-- Data Types --{{{--
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser ParseError
| BadSpecialForm String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
-- Unpacker Generic type allows all unpackers in a list for mapping
data Unpacker = forall a. Eq a => AnyUnpacker (LispVal -> ThrowsError a)
-- --}}}--
-- Parsers --{{{--
symbol :: Parser Char
symbol = oneOf "!$%&|*+-/:<=>?@^_~"
readExpr :: String -> ThrowsError LispVal
readExpr input = case parse parseExpr "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
spaces :: Parser ()
spaces = skipMany1 space
escapeChars :: Parser Char
escapeChars = do char '\\'
x <- oneOf "\\\"nrt"
return $ case x of
'\\' -> x
'"' -> x
'n' -> '\n'
'r' -> '\r'
't' -> '\t'
parseBool :: Parser LispVal
parseBool = do
char '#'
(char 't' >> return (Bool True)) <|> (char 'f' >> return (Bool False))
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ Atom atom
parseString :: Parser LispVal
parseString = do
char '"'
x <- many $ escapeChars <|> noneOf "\""
char '"'
return $ String x
parseNumber :: Parser LispVal
parseNumber = parseDecimal1 <|> parseDecimal2 <|> parseHex <|> parseOct <|> parseBin
parseDecimal1 :: Parser LispVal
parseDecimal1 = many1 digit >>= (return . Number . read)
parseDecimal2 :: Parser LispVal
parseDecimal2 = do try $ string "#d"
x <- many1 digit
(return . Number . read) x
parseHex :: Parser LispVal
parseHex = do try $ string "#x"
x <- many1 hexDigit
return $ Number (oct2dig x)
parseOct :: Parser LispVal
parseOct = do try $ string "#o"
x <- many1 octDigit
return $ Number (oct2dig x)
parseBin :: Parser LispVal
parseBin = do try $ string "#b"
x <- many1 (oneOf "10")
return $ Number (bin2dig x)
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseBool
<|> parseQuoted
<|> do char '('
x <- try parseList <|> parseDottedList
char ')'
return x
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
-- --}}}--
-- Helper Functions --{{{--
oct2dig x = fst $ readOct x !! 0
hex2dig x = fst $ readHex x !! 0
bin2dig = bin2dig' 0
bin2dig' digint "" = digint
bin2dig' digint (x:xs) =
let old = 2 * digint + (if x == '0' then 0 else 1) in
bin2dig' old xs
unwordList :: [LispVal] -> String
unwordList = unwords . map showVal
showVal :: LispVal -> String
showVal (String contents) = "\"" ++ contents ++ "\""
showVal (Atom name) = name
showVal (Number contents) = show contents
showVal (Bool True) = "#t"
showVal (Bool False) = "#f"
showVal (List contents) = "(" ++ unwordList contents ++ ")"
showVal (DottedList head tail) = "(" ++ unwordList head ++ " . " ++ showVal tail ++ ")"
showError :: LispError -> String
showError (UnboundVar message varname) = message ++ ": " ++ varname
showError (BadSpecialForm message form) = message ++ ": " ++ show form
showError (NotFunction message func) = message ++ ": " ++ show func
showError (NumArgs expected found) = "Expected " ++ show expected
++ " args; Found Values: " ++ unwordList found
showError (TypeMismatch expected found) = "Invalid Type: expected " ++ expected
++ "; Found " ++ show found
showError (Parser parseErr) = "Parse Error at " ++ show parseErr
instance Show LispVal where show = showVal
instance Show LispError where show = showError
type ThrowsError = Either LispError
trapError action = catchError action (return . show)
extractValue :: ThrowsError a -> a
extractValue (Right val) = val
-- --}}}--
-- Lisp Functions --{{{--
eval :: LispVal -> ThrowsError LispVal
eval val@(String _) = return val
eval val@(Number _) = return val
eval val@(Bool _) = return val
eval (List [Atom "quote", val]) = return val
eval (List [Atom "if", pred, conseq, alt]) =
do result <- eval pred
case result of
Bool False -> eval alt
otherwise -> eval conseq
eval (List (Atom func : args)) = mapM eval args >>= apply func
eval badForm = throwError $ BadSpecialForm "Unrecognized special form" badForm
apply :: String -> [LispVal] -> ThrowsError LispVal
apply func args = maybe (throwError $ NotFunction "Unrecognized primitive function args" func)
($ args)
(lookup func primitives)
car :: [LispVal] -> ThrowsError LispVal
car [List (x : xs)] = return x
car [DottedList (x : xs) _] = return x
car [badArg] = throwError $ TypeMismatch "pair" badArg
car badArgList = throwError $ NumArgs 1 badArgList
cdr :: [LispVal] -> ThrowsError LispVal
cdr [List (x : xs)] = return $ List xs
cdr [DottedList [_] x] = return x
cdr [DottedList (_ : xs) x] = return $ DottedList xs x
cdr [badArg] = throwError $ TypeMismatch "pair" badArg
cdr badArgList = throwError $ NumArgs 1 badArgList
cons :: [LispVal] -> ThrowsError LispVal
cons [x1, List[]] = return $ List [x1]
cons [x, List xs] = return $ List $ x : xs
cons [x, DottedList xs xlast] = return $ DottedList (x : xs) xlast
cons [x1, x2] = return $ DottedList [x1] x2
cons badArgList = throwError $ NumArgs 2 badArgList
eqv :: [LispVal] -> ThrowsError LispVal
eqv [(Bool arg1), (Bool arg2)] = return $ Bool $ arg1 == arg2
eqv [(Number arg1), (Number arg2)] = return $ Bool $ arg1 == arg2
eqv [(String arg1), (String arg2)] = return $ Bool $ arg1 == arg2
eqv [(Atom arg1), (Atom arg2)] = return $ Bool $ arg1 == arg2
eqv [(DottedList xs x), (DottedList ys y)] = eqv [List $ xs ++ [x], List$ ys ++ [y]]
eqv [(List arg1), (List arg2)] = return $ Bool $ (length arg1 == length arg2) &&
(all eqvPair $ zip arg1 arg2)
where eqvPair (x1, x2) = case eqv [x1, x2] of
Left err -> False
Right (Bool val) -> val
eqv [_, _] = return $ Bool False
eqv badArgList = throwError $ NumArgs 2 badArgList
equal :: [LispVal] -> ThrowsError LispVal
equal [(List arg1), (List arg2)] = return $ Bool $ (length arg1 == length arg2) &&
(all equalPair $ zip arg1 arg2)
where equalPair (x1, x2) = case equal [x1, x2] of
Left err -> False
Right (Bool val) -> val
equal [arg1, arg2] = do
primitiveEquals <- liftM or $ mapM (unpackEquals arg1 arg2)
[AnyUnpacker unpackNum, AnyUnpacker unpackStr, AnyUnpacker unpackBool]
eqvEquals <- eqv [arg1, arg2]
return $ Bool $ (primitiveEquals || let (Bool x) = eqvEquals in x)
equal badArgList = throwError $ NumArgs 2 badArgList
-- --}}}--
-- Primitives and Binops --{{{--
primitives :: [(String, [LispVal] -> ThrowsError LispVal)]
primitives = [("+", numericBinop (+)),
("-", numericBinop (-)),
("*", numericBinop (*)),
("/", numericBinop div),
("mod", numericBinop mod),
("quotient", numericBinop quot),
("remainder", numericBinop rem),
("=", numBoolBinop (==)),
("<", numBoolBinop (<)),
(">", numBoolBinop (>)),
("/=", numBoolBinop (/=)),
(">=", numBoolBinop (>=)),
("<=", numBoolBinop (<=)),
("&&", boolBoolBinop (&&)),
("||", boolBoolBinop (||)),
("string=?", strBoolBinop (==)),
("string<?", strBoolBinop (<)),
("string>?", strBoolBinop (>)),
("string<=?", strBoolBinop (<=)),
("string>=?", strBoolBinop (>=)),
("car", car),
("cdr", cdr),
("cons", cons),
("eq?", eqv),
("eqv?", eqv),
("equal?", equal)]
boolBinop :: (LispVal -> ThrowsError a) -> (a -> a -> Bool) -> [LispVal] -> ThrowsError LispVal
boolBinop unpacker op args = if length args /= 2
then throwError $ NumArgs 2 args
else do left <- unpacker $ args !! 0
right <- unpacker $ args !! 1
return $ Bool $ left `op` right
numBoolBinop = boolBinop unpackNum
strBoolBinop = boolBinop unpackStr
boolBoolBinop = boolBinop unpackBool
numericBinop :: (Integer -> Integer -> Integer) -> [LispVal] -> ThrowsError LispVal
numericBinop op [] = throwError $ NumArgs 2 []
numericBinop op singleVal@[_] = throwError $ NumArgs 2 singleVal
numericBinop op params = mapM unpackNum params >>= return . Number . foldl1 op
-- --}}}--
-- Unpackers --{{{--
unpackEquals :: LispVal -> LispVal -> Unpacker -> ThrowsError Bool
unpackEquals arg1 arg2 (AnyUnpacker unpacker) =
do unpacked1 <- unpacker arg1
unpacked2 <- unpacker arg2
return $ unpacked1 == unpacked2
`catchError` (const $ return False)
unpackNum :: LispVal -> ThrowsError Integer
unpackNum (Number n) = return n
unpackNum (String n) = let parsed = reads n in
if null parsed
then throwError $ TypeMismatch "number" $ String n
else return $ fst $ parsed !! 0
unpackNum (List [n]) = unpackNum n
unpackNum notNum = throwError $ TypeMismatch "number" notNum
unpackStr :: LispVal -> ThrowsError String
unpackStr (String s) = return s
unpackStr (Number s) = return $ show s
unpackStr (Bool s) = return $ show s
unpackStr notString = throwError $ TypeMismatch "string" notString
unpackBool :: LispVal -> ThrowsError Bool
unpackBool (Bool b) = return b
unpackBool notBool = throwError $ TypeMismatch "boolean" notBool
-- --}}}--
|
jdcannon/hascheme
|
src/Scheme.hs
|
gpl-3.0
| 11,590 | 0 | 15 | 3,893 | 3,811 | 1,947 | 1,864 | 253 | 5 |
module Main (main) where
import DailyProgrammer
|
d-strickland/dailyprogrammer
|
haskell/main/Main.hs
|
gpl-3.0
| 48 | 0 | 4 | 6 | 12 | 8 | 4 | 2 | 0 |
{-# LANGUAGE DeriveDataTypeable, MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, UndecidableInstances, TemplateHaskell, PatternGuards #-}
module Action (Action (..), resolveAction) where
import BasicTypes
import CommonTypes
import Object
import Data.Data
import Game
import Data.Accessor
import Control.Monad (liftM, when)
import Util (whenM)
import qualified Data.Map as Map
data Action =
Idle
| WalkTo Coord
| Attack { attackTarget :: ObjRef, attackWeapon :: ObjRef }
| Quit
deriving (Eq, Show)
--resolveAction :: SpecificObject -> Initiative -> Action -> PureGame (Maybe String)
resolveAction o i a =
gsGlobal references_ >>= \refs ->
if not $ Map.member (ref o) refs
then return Nothing
else case a of
Idle -> return Nothing
WalkTo c -> do pos <- mapPosition (ref o)
tryWalkTo o (fst pos, c)
Attack target weapon ->
do wo <- dereferObj weapon
case wo of
RangedWeapon { rangedWeaponPrototype_ = prot } -> do
mGlobal (references ^: Map.adjust (health ^: subtract (rangedWeaponDamage prot)) target)
mpr "Bang!"
whenM ((<0) `liftM` health_ `liftM` dereferObj target)
$ removeActor target
return Nothing
|
arirahikkala/straylight-divergence
|
src/Action.hs
|
gpl-3.0
| 1,393 | 0 | 26 | 437 | 345 | 183 | 162 | 34 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.CreateRole
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new role for your AWS account. For more information about
-- roles, go to
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/WorkingWithRoles.html Working with Roles>.
-- For information about limitations on role names and the number of roles
-- you can create, go to
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/LimitationsOnEntities.html Limitations on IAM Entities>
-- in the /IAM User Guide/.
--
-- The policy in the following example grants permission to an EC2 instance
-- to assume the role.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_CreateRole.html AWS API Reference> for CreateRole.
module Network.AWS.IAM.CreateRole
(
-- * Creating a Request
createRole
, CreateRole
-- * Request Lenses
, crPath
, crRoleName
, crAssumeRolePolicyDocument
-- * Destructuring the Response
, createRoleResponse
, CreateRoleResponse
-- * Response Lenses
, crrsResponseStatus
, crrsRole
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'createRole' smart constructor.
data CreateRole = CreateRole'
{ _crPath :: !(Maybe Text)
, _crRoleName :: !Text
, _crAssumeRolePolicyDocument :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateRole' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'crPath'
--
-- * 'crRoleName'
--
-- * 'crAssumeRolePolicyDocument'
createRole
:: Text -- ^ 'crRoleName'
-> Text -- ^ 'crAssumeRolePolicyDocument'
-> CreateRole
createRole pRoleName_ pAssumeRolePolicyDocument_ =
CreateRole'
{ _crPath = Nothing
, _crRoleName = pRoleName_
, _crAssumeRolePolicyDocument = pAssumeRolePolicyDocument_
}
-- | The path to the role. For more information about paths, see
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html IAM Identifiers>
-- in the /Using IAM/ guide.
--
-- This parameter is optional. If it is not included, it defaults to a
-- slash (\/).
crPath :: Lens' CreateRole (Maybe Text)
crPath = lens _crPath (\ s a -> s{_crPath = a});
-- | The name of the role to create.
crRoleName :: Lens' CreateRole Text
crRoleName = lens _crRoleName (\ s a -> s{_crRoleName = a});
-- | The policy that grants an entity permission to assume the role.
crAssumeRolePolicyDocument :: Lens' CreateRole Text
crAssumeRolePolicyDocument = lens _crAssumeRolePolicyDocument (\ s a -> s{_crAssumeRolePolicyDocument = a});
instance AWSRequest CreateRole where
type Rs CreateRole = CreateRoleResponse
request = postQuery iAM
response
= receiveXMLWrapper "CreateRoleResult"
(\ s h x ->
CreateRoleResponse' <$>
(pure (fromEnum s)) <*> (x .@ "Role"))
instance ToHeaders CreateRole where
toHeaders = const mempty
instance ToPath CreateRole where
toPath = const "/"
instance ToQuery CreateRole where
toQuery CreateRole'{..}
= mconcat
["Action" =: ("CreateRole" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"Path" =: _crPath, "RoleName" =: _crRoleName,
"AssumeRolePolicyDocument" =:
_crAssumeRolePolicyDocument]
-- | Contains the response to a successful CreateRole request.
--
-- /See:/ 'createRoleResponse' smart constructor.
data CreateRoleResponse = CreateRoleResponse'
{ _crrsResponseStatus :: !Int
, _crrsRole :: !Role
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateRoleResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'crrsResponseStatus'
--
-- * 'crrsRole'
createRoleResponse
:: Int -- ^ 'crrsResponseStatus'
-> Role -- ^ 'crrsRole'
-> CreateRoleResponse
createRoleResponse pResponseStatus_ pRole_ =
CreateRoleResponse'
{ _crrsResponseStatus = pResponseStatus_
, _crrsRole = pRole_
}
-- | The response status code.
crrsResponseStatus :: Lens' CreateRoleResponse Int
crrsResponseStatus = lens _crrsResponseStatus (\ s a -> s{_crrsResponseStatus = a});
-- | Information about the role.
crrsRole :: Lens' CreateRoleResponse Role
crrsRole = lens _crrsRole (\ s a -> s{_crrsRole = a});
|
olorin/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/CreateRole.hs
|
mpl-2.0
| 5,237 | 0 | 14 | 1,125 | 709 | 430 | 279 | 90 | 1 |
module Handler.MarkdownTutorial where
import Import
getMarkdownTutorialR :: Handler Html
getMarkdownTutorialR = defaultLayout $ do
setTitle "Markdown Tutorial | Snowdrift.coop"
$(widgetFile "markdown")
|
Happy0/snowdrift
|
Handler/MarkdownTutorial.hs
|
agpl-3.0
| 214 | 0 | 10 | 33 | 42 | 21 | 21 | 6 | 1 |
-- | The Commands module contains a representation of a single Hexwax expandIO-USB command as documented in its datasheet, which you can find here: <http://www.firmwarefactory.com/Docs/expandIO-USB%20HW148.pdf>
module Commands
(
HWCmd, HWMsg, CmdId, Payload,
hwCmd,
cSETSERIAL,
cGETFWID,
cINTERRUPT,
cGETANALOG,
cGETREG,
cSETREG,
cGETBIT,
cSETBIT,
cGETPORT,
cSETPORT,
cGETPORTBIT,
cSETPORTBIT,
cEXEI2C,
cWAIT,
cSCANMATRIX,
cCAMULTIPLEX,
cMPXDATA,
cSTREAM,
cCCMULTIPLEX,
cEXESPI,
cEXEUNIO,
cERROR,
cNOP
)
where
import Data.ByteString as B hiding (putStrLn, find)
import Data.Word (Word16, Word8)
import Data.List (find)
import Prelude as P
import Text.Printf as T (printf)
-- | This holds a single Hexwax command as documented in the
-- datasheet.
type CmdId = Word8
type Payload = [Word8]
data HWCmd = HWCmd CmdId Payload
-- | This type is just a list of commands; used to build a single
-- pipelined request for better efficiency or for a specific reason
-- e.g. localised delay routines. The FS (full-speed) device can
-- accept up to 16 commands in a single request.
type HWMsg = [HWCmd]
cmdLenMax :: Int
cmdLenMax = 16
-- | The show implementation naively assumes that the payload length
-- is correct for the type of command. It attempts to also render a
-- meaningful display for the 'cGETFWID' command by intrepreting the
-- payload as indicated in the datasheet with respect to revision
-- number and device indentification.
instance Show HWCmd where
show (HWCmd id bytes)
| id == cGETFWID =
printf "FirmwareId: %s rev %04d"
(device $ bytes!!0) (revision bytes)
|
otherwise =
printf "%s %s" (asCmdLabel id) (show bytes)
where
device 0x14 = "14K50"
device 0x25 = "18F2455"
device x = "dev? " ++ (show x)
revision b = ((b!!1) * 8) + (b!!2)
hwCmd :: Word8 -> [Word8] -> HWCmd
hwCmd id bytes
| P.length bytes > 3 = error "Too much data"
| otherwise =
case matchId id of
Nothing -> error $ "Unknown command id " ++ (show id)
Just (v,_) -> HWCmd id $ [v] ++ bytes
asCmdId :: String -> Word8
asCmdId name = case matchName name of
Nothing -> error "Invalid command code"
Just (v,_) -> v
asCmdLabel :: Word8 -> String
asCmdLabel cmd = case matchId cmd of
Nothing -> "*???*"
Just (_,label) -> label
matchName name = find (\(_,s) -> s == name) cmdSet
matchId id = find (\(c,_) -> c == id) cmdSet
-- | document these later, much later!
cSETSERIAL = 0x93 ::Word8
cGETFWID = 0x94 ::Word8
cINTERRUPT = 0x95 ::Word8
cGETANALOG = 0x96 ::Word8
cGETREG = 0x98 ::Word8
cSETREG = 0x99 ::Word8
cGETBIT = 0x9A ::Word8
cSETBIT = 0x9B ::Word8
cGETPORT = 0x9C ::Word8
cSETPORT = 0x9D ::Word8
cGETPORTBIT = 0x9E ::Word8
cSETPORTBIT = 0x9F ::Word8
cEXEI2C = 0xA0 ::Word8
cWAIT = 0xA9 ::Word8
cSCANMATRIX = 0xAA ::Word8
cCAMULTIPLEX = 0xAB ::Word8
cMPXDATA = 0xAC ::Word8
cSTREAM = 0xAD ::Word8
cCCMULTIPLEX = 0xAE ::Word8
cEXESPI = 0xAF ::Word8
cEXEUNIO = 0xB0 ::Word8
cERROR = 0xFF ::Word8
cNOP = 0x00 ::Word8
cmdSet :: [(Word8, String)]
cmdSet =
[(cSETSERIAL , "SETSERIAL")
,(cGETFWID , "GETFWID")
,(cINTERRUPT , "INTERRUPT")
,(cGETANALOG , "GETANALOG")
,(cGETREG , "GETREG")
,(cSETREG , "SETREG")
,(cGETBIT , "GETBIT")
,(cSETBIT , "SETBIT")
,(cGETPORT , "GETPORT")
,(cSETPORT , "SETPORT")
,(cGETPORTBIT , "GETPORTBIT")
,(cSETPORTBIT , "SETPORTBIT")
,(cEXEI2C , "EXEI2C")
,(cWAIT , "WAIT")
,(cSCANMATRIX , "SCANMATRIX")
,(cCAMULTIPLEX , "CAMULTIPLEX")
,(cMPXDATA , "MPXDATA")
,(cSTREAM , "STREAM")
,(cCCMULTIPLEX , "CCMULTIPLEX")
,(cEXESPI , "EXESPI")
,(cEXEUNIO , "EXEUNIO")
,(cERROR , "ERROR")
,(cNOP , "NOP")
]
asPort :: Word8 -> String
asPort 0x01 = "PORTA"
asPort 0x02 = "PORTB"
asPort 0x03 = "PORTC"
asPort 0x04 = "PORTD"
asPort 0x05 = "PORTE"
asPort _ = "*ERROR*"
-- some sample command instances for console testing
fw1 = HWCmd cGETFWID [0x14, 0x00, 0x01]
fw2 = HWCmd cGETFWID [0x25, 0x00, 0x0a]
-- GETPORT PORTA=0x01 succ. byte2 = actual value on return
gp1 = HWCmd cGETPORT [0x01, 0x00, 0x55]
sp1 = HWCmd cSETPORT [0x01, 0x00, 0x55]
sp2 = HWCmd cSETPORT [0x02, 0x3e, 0x55]
|
emacstheviking/hexwax-usb
|
Commands.hs
|
lgpl-3.0
| 4,683 | 0 | 12 | 1,357 | 1,187 | 700 | 487 | 127 | 2 |
module Git.Repository (
-- The 'Repository' type
Repository,
-- Locating repositories
findRepository, gitDir, loadObject, commitParent, walkAncestors,
peelTo
) where
import Control.Exception
import System.IO
import System.FilePath
import System.Directory
import Data.ByteString.Internal
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Codec.Compression.Zlib
import Data.Attoparsec.ByteString hiding (take)
import Data.Word
import Git.Hash
import Git.Object
import Git.Object.Blob
import Git.Object.Commit (commitHash, commitParents, commitTree)
import Git.Object.Tag
import Git.Object.Tree
import Git.Parser
data Repository = Repository {
gitDir :: FilePath, workingDir:: FilePath
} deriving (Eq, Show)
thisRepo :: Repository
thisRepo = Repository ".git" ""
-- Starting at a path, find the repository root (directory with a .git dir).
findRepository :: FilePath -> IO (Maybe Repository)
findRepository path = do
let gitDir = path </> ".git"
exist <- doesDirectoryExist gitDir
if exist
then return $ Just $ Repository gitDir path
else if path == "/"
then return Nothing
else findRepository (takeDirectory path)
objectStore :: Repository -> FilePath
objectStore repo = (gitDir repo) ++ "/objects"
objectPath :: Repository -> Hash -> FilePath
objectPath repo hash = (objectStore repo) ++ "/" ++ x ++ "/" ++ y where
x = take 2 $ show hash
y = drop 2 $ show hash
loadObject :: Repository -> Hash -> IO Object
loadObject repo hash = do
handle <- openFile (objectPath repo hash) ReadMode
hSetBinaryMode handle True
contents <- L.hGetContents handle
decompData <- return $ S.concat $ L.toChunks $ decompress contents
object <- return $ parseOnly objectParser decompData
case object of
Right a -> return a
otherwise -> error "couldn't parse object"
peelTo :: Type -> Object -> IO (Maybe Hash)
-- Commits can be peeled to itself or the tree.
peelTo TCommit (Commit o) = return $ Just $ commitHash o
peelTo TTree (Commit o) = return $ Just $ commitTree o
commitParent :: Int -> Object -> IO (Maybe Hash)
commitParent n (Commit c)
| n < 0 || n > (length $ commitParents c) = return Nothing
| otherwise = return $ Just $ commitParents c !! (n - 1)
commitParent _ _ = return Nothing
walkAncestors :: Repository -> Int -> Object -> IO (Maybe Hash)
walkAncestors repo n (Commit c)
| n == 0 = return $ Just $ commitHash c
| null $ commitParents c = return Nothing
| otherwise = loadObject repo (head $ commitParents c) >>= walkAncestors repo (n - 1)
walkAncestors _ _ _ = return Nothing
toCommit :: Object -> IO Object
toCommit obj@(Commit commit) = return obj
toCommit _ = error "Not a commit"
dumpObjectAtPath :: FilePath -> IO ()
dumpObjectAtPath path = do
putStrLn path
handle <- openFile ("test/" ++ path) ReadMode
hSetBinaryMode handle True
contents <- L.hGetContents handle
decompData <- return $ S.concat $ L.toChunks $ decompress contents
object <- return $ parseOnly objectParser decompData
case object of
Right a -> putStrLn (show a)
otherwise -> error "couldn't parse object"
test :: IO ()
test = do
dir <- getDirectoryContents "test"
mapM dumpObjectAtPath $ drop 2 dir
return ()
|
wereHamster/yag
|
Git/Repository.hs
|
unlicense
| 3,342 | 0 | 12 | 716 | 1,095 | 548 | 547 | 84 | 3 |
-----------------------------------------------------------------------------
--
-- Module : Parse.Pretty
-- Copyright : (c) DICOM Grid Inc. 2013
-- License : MIT
--
-- Maintainer : Phillip Freeman <[email protected]>
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Parse.Pretty (
renderDeclarationSourceFile
) where
import Parse.Types
import Text.PrettyPrint
renderDeclarationSourceFile :: [DeclarationElement] -> String
renderDeclarationSourceFile = render . declarationSourceFile
declarationSourceFile :: [DeclarationElement] -> Doc
declarationSourceFile = vcat . map declarationElement
exported :: Exported -> Doc
exported _ = text "export"
renderMaybe :: (a -> Doc) -> Maybe a -> Doc
renderMaybe f = maybe empty f
stringLiteral :: String -> Doc
stringLiteral = doubleQuotes . text
declarationElement :: DeclarationElement -> Doc
declarationElement (InterfaceDeclaration _ e i) =
renderMaybe exported e
<+> interface i
declarationElement (ImportDeclaration e name entityName) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> renderEntityName entityName
declarationElement (ExportDeclaration name) =
exported Exported
<+> text "="
<+> text name
declarationElement (ExternalImportDeclaration e name imp) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> text "require"
<+> stringLiteral imp
declarationElement (AmbientDeclaration _ e a) =
renderMaybe exported e
<+> text "declare"
<+> renderAmbientDeclaration a
renderAmbientDeclaration :: Ambient -> Doc
renderAmbientDeclaration (AmbientVariableDeclaration _ name ty) =
text "var"
<+> text name
<+> renderMaybe typeAnnotation ty
<+> semi
renderAmbientDeclaration (AmbientFunctionDeclaration _ name plrt) =
text "function"
<+> text name
<+> parameterListAndReturnType plrt
<+> semi
renderAmbientDeclaration (AmbientClassDeclaration _ name ps exts imps els) =
text "class"
<+> text name
<+> renderMaybe typeParameters ps
<+> renderMaybe extendsClause exts
<+> renderMaybe implementsClause imps
<+> braces (sepEndBy semi (renderAmbientClassBodyElement . snd ) els)
renderAmbientDeclaration (AmbientInterfaceDeclaration i) = interface i
renderAmbientDeclaration (AmbientEnumDeclaration _ name members) =
text "enum" <+> text name <+> braces (sepEndBy comma enumMember members)
where
enumMember (name, val) = text name <+> renderMaybe (\n -> char '=' <+> integer n) val
renderAmbientDeclaration (AmbientModuleDeclaration _ name ds) =
text "module"
<+> sepBy dot text name
<+> braces (vcat (map renderAmbientDeclaration ds))
renderAmbientDeclaration (AmbientExternalModuleDeclaration _ name es) =
text "module"
<+> stringLiteral name
<+> braces (vcat (map renderAmbientExternalModuleElement es))
renderAmbientExternalModuleElement :: AmbientExternalModuleElement -> Doc
renderAmbientExternalModuleElement (AmbientModuleElement a) = renderAmbientDeclaration a
renderAmbientExternalModuleElement (ExportAssignment name) =
text "export"
<+> char '='
<+> text name
<+> semi
renderAmbientExternalModuleElement (AmbientModuleExternalImportDeclaration e name imp) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> text "require"
<+> stringLiteral imp
renderAmbientClassBodyElement :: AmbientClassBodyElement -> Doc
renderAmbientClassBodyElement (AmbientConstructorDeclaration ps) =
text "constructor"
<+> parameterList ps
<+> semi
renderAmbientClassBodyElement (AmbientMemberDeclaration p s prop (Left ty)) =
renderMaybe publicOrPrivate p
<+> renderMaybe static s
<+> propertyName prop
<+> renderMaybe typeAnnotation ty
renderAmbientClassBodyElement (AmbientMemberDeclaration p s prop (Right ps)) =
renderMaybe publicOrPrivate p
<+> renderMaybe static s
<+> propertyName prop
<+> parameterListAndReturnType ps
renderAmbientClassBodyElement (AmbientIndexSignature i) = renderIndexSignature i
renderIndexSignature :: IndexSignature -> Doc
renderIndexSignature (IndexSignature s sn ty) =
text s
<+> colon
<+> stringOrNumber sn
<+> typeAnnotation ty
dot :: Doc
dot = char '.'
sepEndBy :: Doc -> (a -> Doc) -> [a] -> Doc
sepEndBy s f as = hsep $ map (\e -> f e <+> s) as
renderEntityName :: EntityName -> Doc
renderEntityName (EntityName Nothing e) = text e
renderEntityName (EntityName (Just (ModuleName es)) e) = hcat (punctuate dot (map text es)) <> text e
interface :: Interface -> Doc
interface (Interface _ name ps exts ty) =
text "interface"
<+> text name
<+> renderMaybe typeParameters ps
<+> renderMaybe extendsClause exts
<+> objectType ty
extendsClause :: [TypeRef] -> Doc
extendsClause rs = text "extends" <+> classOrInterfaceTypeList rs
implementsClause :: [TypeRef] -> Doc
implementsClause rs = text "implements" <+> classOrInterfaceTypeList rs
sepBy :: Doc -> (a -> Doc) -> [a] -> Doc
sepBy s f as = hsep $ punctuate s (map f as)
commaSep :: (a -> Doc) -> [a] -> Doc
commaSep = sepBy comma
classOrInterfaceTypeList :: [TypeRef] -> Doc
classOrInterfaceTypeList = commaSep typeRef
objectType :: TypeBody -> Doc
objectType = braces . typeBody
typeBody :: TypeBody -> Doc
typeBody (TypeBody ms) = hcat . map (\(_, m) -> typeMember m <+> semi) $ ms
typeMember :: TypeMember -> Doc
typeMember (MethodSignature name opt plrt) =
propertyName name
<+> renderMaybe optional opt
<+> parameterListAndReturnType plrt
typeMember (PropertySignature name opt ty) =
propertyName name
<+> renderMaybe optional opt
<+> renderMaybe typeAnnotation ty
typeMember (CallSignature plrt) = parameterListAndReturnType plrt
typeMember (ConstructSignature tyArgs pl ty) =
text "new"
<+> renderMaybe typeParameters tyArgs
<+> parens (parameterList pl)
<+> renderMaybe typeAnnotation ty
typeMember (TypeIndexSignature i) = renderIndexSignature i
propertyName :: String -> Doc
propertyName = text
typeAnnotation :: Type -> Doc
typeAnnotation t = colon <+> _type t
parameterListAndReturnType :: ParameterListAndReturnType -> Doc
parameterListAndReturnType (ParameterListAndReturnType ps pl ty) =
renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> renderMaybe typeAnnotation ty
parameterList :: [Parameter] -> Doc
parameterList = commaSep parameter
optional :: Optional -> Doc
optional _ = char '?'
parameter :: Parameter -> Doc
parameter (RequiredOrOptionalParameter pop name opt ty) =
renderMaybe publicOrPrivate pop
<+> text name
<+> renderMaybe optional opt
<+> renderMaybe typeAnnotation ty
parameter (RestParameter name ty) =
text "..."
<+> text name
<+> renderMaybe typeAnnotation ty
static :: Static -> Doc
static _ = text "static"
publicOrPrivate :: PublicOrPrivate -> Doc
publicOrPrivate Public = text "public"
publicOrPrivate Private = text "private"
stringOrNumber :: StringOrNumber -> Doc
stringOrNumber String = text "string"
stringOrNumber Number = text "number"
typeParameters :: [TypeParameter] -> Doc
typeParameters ps = char '<' <+> commaSep typeParameter ps <+> char '>'
typeParameter :: TypeParameter -> Doc
typeParameter (TypeParameter name ext) =
text name
<+> renderMaybe (\t -> text "extends" <+> _type t) ext
_type :: Type -> Doc
_type (ArrayType t) = _type t <+> text "[]"
_type (Predefined p) = predefinedType p
_type (TypeReference r) = typeRef r
_type (ObjectType o) = objectType o
_type (FunctionType ps pl ret) =
renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> text "=>"
<+> _type ret
_type (ConstructorType ps pl ret) =
text "new"
<+> renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> text "=>"
<+> _type ret
typeRef :: TypeRef -> Doc
typeRef (TypeRef n as) =
typeName n
<+> renderMaybe typeArguments as
predefinedType :: PredefinedType -> Doc
predefinedType AnyType = text "any"
predefinedType NumberType = text "number"
predefinedType BooleanType = text "boolean"
predefinedType StringType = text "string"
predefinedType VoidType = text "void"
typeName :: TypeName -> Doc
typeName (TypeName Nothing t) = text t
typeName (TypeName (Just (ModuleName ts)) t) = sepBy dot text ts <+> text t
typeArguments :: [Type] -> Doc
typeArguments ts = char '<' <+> commaSep _type ts <+> char '>'
|
uProxy/uproxy-idl-compiler
|
Parse/Pretty.hs
|
apache-2.0
| 8,312 | 0 | 12 | 1,351 | 2,522 | 1,229 | 1,293 | 217 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module CouchDB.DBChallenge where
import qualified Data.ByteString.Char8 as C
import Data.Conduit
import Data.Conduit.List as CL
import Data.Maybe (fromJust, listToMaybe)
import Database.CouchDB.Conduit
import Database.CouchDB.Conduit.Explicit
import Database.CouchDB.Conduit.View
import CouchDB.DBCommon
import qualified Model.Challenge as Challenge
import qualified Model.UUID as UUID
create :: Challenge.Challenge -> IO Challenge.Challenge
create challenge = do
uuid <- fmap UUID.toByteString' UUID.nextRandom
runCouch conn $ do
let rev = ""
revision <- couchPut dbName uuid rev [] challenge
return challenge { Challenge.uuid = UUID.fromByteStringSafe' uuid
, Challenge.revision = Just revision }
type ChallengeUUID = UUID.UUID
type ContractUUID = UUID.UUID
type TokenValue = C.ByteString
findChallenge :: ChallengeUUID -> ContractUUID -> IO (Maybe Challenge.Challenge)
findChallenge challengeUUID contractUUID = do
tokens <- runCouch conn $
couchView_ dbName "challenge" "listChallengesWithContractUUID"
[("key", Just $ encodeKeys $ fmap UUID.toByteString' [challengeUUID, contractUUID])] $
rowValue =$= toType =$ CL.consume
return $ listToMaybe tokens
setAnswered :: Challenge.Challenge -> IO Challenge.Challenge
setAnswered challenge = runCouch conn $ do
let uuid = UUID.toByteString' $ fromJust $ Challenge.uuid challenge
rev = fromJust $ Challenge.revision challenge
challenge' = challenge { Challenge.wasAnswered = True }
revision <- couchPut dbName uuid rev [] challenge'
return challenge' { Challenge.revision = Just revision }
|
alexandrelucchesi/pfec
|
server-common/src/CouchDB/DBChallenge.hs
|
apache-2.0
| 1,949 | 0 | 17 | 503 | 454 | 242 | 212 | 39 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Spark.Core.Internal.RowUtils(
-- jsonToCell,
checkCell,
rowArray,
rowCell,
cellFromProto,
cellWithTypeFromProto,
cellWithTypeToProto
) where
-- import Data.Aeson
import Data.Text(Text)
import Data.Maybe(catMaybes, listToMaybe)
import Formatting
import qualified Data.Vector as V
import Control.Monad.Except
import Spark.Core.Internal.TypesStructures
import Spark.Core.Internal.RowStructures
import Spark.Core.Internal.Utilities
import Spark.Core.Try
import Spark.Core.Internal.ProtoUtils
import qualified Proto.Karps.Proto.Row as P
type TryCell = Either Text Cell
instance FromProto P.CellWithType (Cell, DataType) where
fromProto cwt = tryEither $ cellWithTypeFromProto cwt
cellFromProto :: DataType -> P.Cell -> TryCell
cellFromProto (NullableType _) (P.Cell Nothing) = pure Empty
cellFromProto (StrictType sdt) (P.Cell Nothing) =
throwError $ sformat ("cellFromProto: nothing given on a strict type: "%sh%", got null") sdt
cellFromProto dt (P.Cell (Just ce)) = x where
sdt = case dt of
StrictType sdt' -> sdt'
NullableType sdt' -> sdt'
x = case (sdt, ce) of
(IntType, P.Cell'IntValue i) -> pure $ IntElement (fromIntegral i)
(DoubleType, P.Cell'DoubleValue d) -> pure $ DoubleElement d
(StringType, P.Cell'StringValue s) -> pure $ StringElement s
(BoolType, P.Cell'BoolValue b) -> pure $ BoolElement b
(ArrayType dt', P.Cell'ArrayValue (P.ArrayCell l)) ->
RowArray . V.fromList <$> sequence (cellFromProto dt' <$> l)
(Struct (StructType v), P.Cell'StructValue (P.Row l)) ->
if length l /= V.length v
then throwError $ sformat ("cellFromProto: struct: got "%sh%" values but structure has "%sh%" elements") (length l) (length v)
else RowElement . Row <$> sequence (f <$> l') where
f (StructField _ dt', v') = cellFromProto dt' v'
l' = V.zip v (V.fromList l)
_ -> throwError $ sformat ("cellFromProto: mismatch "%sh) (sdt, ce)
cellWithTypeFromProto :: P.CellWithType -> Either Text (Cell, DataType)
cellWithTypeFromProto (P.CellWithType (Just c) (Just pdt)) = do
dt <- case fromProto pdt of
Right x -> Right x
Left s -> Left (show' s) -- TODO: this is bad.
cell <- cellFromProto dt c
return (cell, dt)
cellWithTypeFromProto cwt =
throwError $ sformat ("cellWithTypeFromProto: missing data in "%sh) cwt
cellWithTypeToProto :: DataType -> Cell -> Either Text P.CellWithType
cellWithTypeToProto dt c = do
_ <- checkCell dt c
return $ P.CellWithType (Just (toProto c)) (Just (toProto dt))
{-| Given a datatype, ensures that the cell has the corresponding type.
-}
checkCell :: DataType -> Cell -> Either Text Cell
checkCell dt c = case _checkCell dt c of
Nothing -> pure c
Just txt -> throwError txt
{-| Convenience constructor for an array of cells.
-}
rowArray :: [Cell] -> Cell
rowArray = RowArray . V.fromList
rowCell :: [Cell] -> Cell
rowCell = RowElement . Row . V.fromList
-- Returns an error message if something wrong is found
_checkCell :: DataType -> Cell -> Maybe Text
_checkCell dt c = case (dt, c) of
(NullableType _, Empty) -> Nothing
(StrictType _, Empty) ->
pure $ sformat ("Expected a strict value of type "%sh%" but no value") dt
(StrictType sdt, x) -> _checkCell' sdt x
(NullableType sdt, x) -> _checkCell' sdt x
-- Returns an error message if something wrong is found
_checkCell' :: StrictDataType -> Cell -> Maybe Text
_checkCell' sdt c = case (sdt, c) of
(_, Empty) ->
pure $ sformat ("Expected a strict value of type "%sh%" but no value") sdt
(IntType, IntElement _) -> Nothing
(DoubleType, DoubleElement _) -> Nothing
(StringType, StringElement _) -> Nothing
(BoolType, BoolElement _) -> Nothing
(Struct s, RowElement (Row l)) -> _checkCell' (Struct s) (RowArray l)
(Struct (StructType fields), RowArray cells') ->
if V.length fields == V.length cells'
then
let types = V.toList $ structFieldType <$> fields
res = uncurry _checkCell <$> (types `zip` V.toList cells')
in listToMaybe (catMaybes res)
else
pure $ sformat ("Struct "%sh%" has "%sh%" fields, asked to be matched with "%sh%" cells") sdt (V.length fields) (V.length cells')
(ArrayType dt, RowArray cells') ->
let res = uncurry _checkCell <$> (repeat dt `zip` V.toList cells')
in listToMaybe (catMaybes res)
(_, _) ->
pure $ sformat ("Type "%sh%" is incompatible with cell content "%sh) sdt c
|
tjhunter/karps
|
haskell/src/Spark/Core/Internal/RowUtils.hs
|
apache-2.0
| 4,563 | 0 | 17 | 877 | 1,547 | 804 | 743 | 95 | 10 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Maybe
import Data.Maybe
import qualified Data.Text as T
import Filesystem.Path.CurrentOS hiding (FilePath, (</>))
import Options.Applicative hiding (command)
import qualified Options.Applicative as A
import Prelude hiding (FilePath)
import Shelly
default (T.Text)
type PackageName = T.Text
type ModuleName = T.Text
data OpenHaddock = ListHs FilePath PackageName
| OpenHs FilePath PackageName
| OpenModule FilePath ModuleName
deriving (Show, Eq)
openHaddock :: OpenHaddock -> Sh ()
openHaddock (ListHs cabal package) = do
echo $ "searching for haddocks for '" <> package <> "'"
void $ hcPkg cabal "list" [package]
openHaddock (OpenHs cabal package) = do
echo $ "opening haddocks for '" <> package <> "'"
maybe (return ()) open_ =<< findPackageIndex cabal package
openHaddock (OpenModule cabal modName) = do
echo $ "opening haddocks for the package containing '" <> modName <> "'"
maybe (return ()) open_ =<< findModuleIndex cabal modName
findPackageIndex :: FilePath -> PackageName -> Sh (Maybe FilePath)
findPackageIndex cabal package = runMaybeT $
whenExists =<< MaybeT ( parseHaddockHtml
<$> hcPkg cabal "field" [package, "haddock-html"])
findModuleIndex :: FilePath -> ModuleName -> Sh (Maybe FilePath)
findModuleIndex cabal modName =
maybe (return Nothing) (findPackageIndex cabal)
=<< listToMaybe
. filter (/= "(no packages)")
. map T.strip
. filter (T.isPrefixOf " ")
. T.lines
<$> hcPkg cabal "find-module" [modName]
open_ :: FilePath -> Sh ()
open_ = command_ "open" [] . pure . toTextIgnore
hcPkg :: FilePath -> T.Text -> [T.Text] -> Sh T.Text
hcPkg cabal c = command1 cabal [] "sandbox" . ("hc-pkg" :) . (c :)
parseHaddockHtml :: T.Text -> Maybe FilePath
parseHaddockHtml =
fmap ((</> "index.html") . fromText . T.strip . snd . T.breakOn " ")
. listToMaybe
. T.lines
whenExists :: FilePath -> MaybeT Sh FilePath
whenExists fn = do
exists <- lift $ test_f fn
if exists
then return fn
else fail $ "File not found: " ++ encodeString fn
main :: IO ()
main = shelly . verbosely . openHaddock =<< execParser opt
opt' :: Parser OpenHaddock
opt' = subparser $ A.command "list" (info (helper <*> list)
$ briefDesc
<> progDesc "List haddocks for a package."
<> header "open-haddock list -- list haddocks for a package.")
<> A.command "open" (info (helper <*> open)
$ briefDesc
<> progDesc "Open haddocks for a package."
<> header "open-haddock open -- open haddocks for a package.")
<> A.command "module" (info (helper <*> mod_)
$ briefDesc
<> progDesc "Open haddocks for a module's package."
<> header "open-haddock module -- open haddocks for a\
\ module's package.")
where
nameArg = argument (T.pack <$> str)
(help "The name of the package to show haddocks for.")
cabalOpt = option (decodeString <$> str)
( short 'c' <> long "cabal" <> metavar "CABAL"
<> value "cabal"
<> help "The cabal executable name.\
\ Defaults to 'cabal'.")
list = ListHs <$> cabalOpt <*> nameArg
open = OpenHs <$> cabalOpt <*> nameArg
mod_ = OpenModule <$> cabalOpt <*> nameArg
opt :: ParserInfo OpenHaddock
opt = info (helper <*> opt')
( fullDesc
<> progDesc "Utilities for opening haddocks."
<> header "open-haddock -- utilities for opening local haddocks.")
|
erochest/open-haddock
|
Main.hs
|
apache-2.0
| 4,373 | 0 | 15 | 1,571 | 1,040 | 536 | 504 | 87 | 2 |
-- |
-- Module : Text.Megaparsec.Error.Builder
-- Copyright : © 2015–2017 Megaparsec contributors
-- License : FreeBSD
--
-- Maintainer : Mark Karpov <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- A set of helpers that should make construction of 'ParseError's more
-- concise. This is primarily useful in test suites and for debugging, you
-- most certainly don't need it for normal usage.
--
-- @since 6.0.0
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Text.Megaparsec.Error.Builder
( -- * Top-level helpers
err
, errFancy
-- * Error position
, posI
, posN
-- * Error components
, utok
, utoks
, ulabel
, ueof
, etok
, etoks
, elabel
, eeof
, fancy
-- * Data types
, ET
, EF )
where
import Data.Data (Data)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Proxy
import Data.Semigroup
import Data.Set (Set)
import Data.Typeable (Typeable)
import GHC.Generics
import Text.Megaparsec.Error
import Text.Megaparsec.Pos
import Text.Megaparsec.Stream
import qualified Data.List.NonEmpty as NE
import qualified Data.Set as E
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
----------------------------------------------------------------------------
-- Data types
-- | Auxiliary type for construction of trivial parse errors.
data ET t = ET (Maybe (ErrorItem t)) (Set (ErrorItem t))
deriving (Eq, Ord, Data, Typeable, Generic)
instance Ord t => Semigroup (ET t) where
ET us0 ps0 <> ET us1 ps1 = ET (n us0 us1) (E.union ps0 ps1)
where
n Nothing Nothing = Nothing
n (Just x) Nothing = Just x
n Nothing (Just y) = Just y
n (Just x) (Just y) = Just (max x y)
instance Ord t => Monoid (ET t) where
mempty = ET Nothing E.empty
mappend = (<>)
-- | Auxiliary type for construction of fancy parse errors.
data EF e = EF (Set (ErrorFancy e))
deriving (Eq, Ord, Data, Typeable, Generic)
instance Ord e => Semigroup (EF e) where
EF xs0 <> EF xs1 = EF (E.union xs0 xs1)
instance Ord e => Monoid (EF e) where
mempty = EF E.empty
mappend = (<>)
----------------------------------------------------------------------------
-- Top-level helpers
-- | Assemble a 'ParseError' from source position and @'ET' t@ value. To
-- create source position, two helpers are available: 'posI' and 'posN'.
-- @'ET' t@ is a monoid and can be built from primitives provided by this
-- module, see below.
err
:: NonEmpty SourcePos -- ^ 'ParseError' position
-> ET t -- ^ Error components
-> ParseError t e -- ^ Resulting 'ParseError'
err pos (ET us ps) = TrivialError pos us ps
-- | Much like 'err', but constructs a “fancy” 'ParseError'.
errFancy
:: NonEmpty SourcePos -- ^ 'ParseError' position
-> EF e -- ^ Error components
-> ParseError t e -- ^ Resulting 'ParseError'
errFancy pos (EF xs) = FancyError pos xs
----------------------------------------------------------------------------
-- Error position
-- | Initial source position with empty file name.
posI :: NonEmpty SourcePos
posI = initialPos "" :| []
-- | @'posN' n s@ returns source position achieved by applying 'advanceN'
-- method corresponding to the type of stream @s@.
posN :: forall s. Stream s
=> Int
-> s
-> NonEmpty SourcePos
posN n s =
case takeN_ n s of
Nothing -> posI
Just (ts, _) ->
advanceN (Proxy :: Proxy s) defaultTabWidth (initialPos "") ts :| []
----------------------------------------------------------------------------
-- Error components
-- | Construct an “unexpected token” error component.
utok :: Ord t => t -> ET t
utok = unexp . Tokens . nes
-- | Construct an “unexpected tokens” error component. Empty string produces
-- 'EndOfInput'.
utoks :: Ord t => [t] -> ET t
utoks = unexp . canonicalizeTokens
-- | Construct an “unexpected label” error component. Do not use with empty
-- strings (for empty strings it's bottom).
ulabel :: Ord t => String -> ET t
ulabel = unexp . Label . NE.fromList
-- | Construct an “unexpected end of input” error component.
ueof :: Ord t => ET t
ueof = unexp EndOfInput
-- | Construct an “expected token” error component.
etok :: Ord t => t -> ET t
etok = expe . Tokens . nes
-- | Construct an “expected tokens” error component. Empty string produces
-- 'EndOfInput'.
etoks :: Ord t => [t] -> ET t
etoks = expe . canonicalizeTokens
-- | Construct an “expected label” error component. Do not use with empty
-- strings.
elabel :: Ord t => String -> ET t
elabel = expe . Label . NE.fromList
-- | Construct an “expected end of input” error component.
eeof :: Ord t => ET t
eeof = expe EndOfInput
-- | Construct a custom error component.
fancy :: ErrorFancy e -> EF e
fancy = EF . E.singleton
----------------------------------------------------------------------------
-- Helpers
-- | Construct appropriate 'ErrorItem' representation for given token
-- stream. Empty string produces 'EndOfInput'.
canonicalizeTokens :: [t] -> ErrorItem t
canonicalizeTokens ts =
case NE.nonEmpty ts of
Nothing -> EndOfInput
Just xs -> Tokens xs
-- | Lift an unexpected item into 'ET'.
unexp :: ErrorItem t -> ET t
unexp u = ET (pure u) E.empty
-- | Lift an expected item into 'ET'.
expe :: ErrorItem t -> ET t
expe p = ET Nothing (E.singleton p)
-- | Make a singleton non-empty list from a value.
nes :: a -> NonEmpty a
nes x = x :| []
|
recursion-ninja/megaparsec
|
Text/Megaparsec/Error/Builder.hs
|
bsd-2-clause
| 5,555 | 0 | 11 | 1,151 | 1,232 | 667 | 565 | 102 | 2 |
{-# LANGUAGE LambdaCase #-}
-- |a way to print expressions
module Language.Pureli.Printer where
import qualified Data.Map as M
import Language.Pureli.AST
class Printer a where
printer :: a -> String
---------------
-- Instances
---------------
instance Printer Module where
printer = showModule
instance Printer ModuleDef where
printer = showModuleDef
instance Printer Atom where
printer = showAtom 0
instance Printer Expr where
printer = showExpr 0
instance Printer Closure where
printer = showClosure 0
instance Printer Fun where
printer = showFun 0
instance Printer a => Printer (WithMD a) where
printer = showWithMD 0
instance (Show a, Printer b) => Printer (M.Map a b) where
printer =
(\list -> "[" ++ unlines list ++ "]")
. fmap (\(x,y) -> "(" ++ show x ++ " -> " ++ printer y ++ " )")
. M.toList
---------------
-- Functions
---------------
getIndent :: Int -> String
getIndent indent = concat $ replicate indent " "
-- |convert an atom to a string.
showAtom :: Int -> Atom -> String
showAtom indent atom = getIndent indent ++ case atom of
Nil -> "nil"
Integer x -> show x
Real x -> show x
String x -> show x
Bool True -> "#t"
Bool False -> "#f"
Symbol x -> x
Keyword x -> ':':x
-- |convert an expression to a string.
showExpr :: Int -> Expr -> String
showExpr indent expr = getIndent indent ++ case expr of
QUOTE x@(WithMD _ (LIST _)) -> printer x
LIST xs -> "(" ++ showListElements xs ++ ")"
QUOTE x -> "'" ++ printer x
ATOM a -> printer a
PROCEDURE x -> printer x
ENVEXPR _ x -> "~" ++ printer x
STOREENV x -> "(;storenv " ++ printer x ++ ")"
IOResult x -> "<IO: " ++ printer x ++ ">"
-- |convert a closure to a string.
showClosure :: Int -> Closure -> String
showClosure indent (Closure _ x) = getIndent indent ++ printer x
-- |convert a function to a string.
showFun :: Int -> Fun -> String
showFun indent (Fun (FunArgsList arg) body) = getIndent indent ++ "(lambda " ++ arg ++ " " ++ printer body ++ ")"
showFun indent (Fun (FunArgs args mn) body) = getIndent indent ++ "(lambda (" ++ showListStrings (args ++ [maybe "" ('&':) mn]) ++ ") " ++ printer body ++ ")"
-- |convert a type with metadata to a string.
showWithMD :: Printer a => Int -> WithMD a -> String
showWithMD indent (WithMD _ x) = getIndent indent ++ printer x
-- |strings separated by space.
showListStrings :: [String] -> String
showListStrings [] = ""
showListStrings [x] = x
showListStrings (x:y:xs) = x ++ " " ++ showListStrings (y:xs)
-- |showable types separated by space.
showListElements :: Printer a => [a] -> String
showListElements = showListStrings . fmap printer
-- |convert a define to a string.
showDefine :: Name -> (Name, WithMD Expr) -> String
showDefine kind (name, WithMD _ (LIST [WithMD _ (ATOM (Symbol "lambda")), WithMD _ (LIST args), body])) =
unlines ["(" ++ kind ++ " " ++ name ++ " [" ++ showListElements args ++ "]"
," " ++ showExpr 1 (stripMD body) ++ ")"]
showDefine kind (name, WithMD _ expr) =
unlines ["(" ++ kind ++ " " ++ name
,showExpr 1 expr ++ ")"]
-- |convert a module definition to a string.
showModuleDef :: ModuleDef -> String
showModuleDef modu = unlines
["(module " ++ show (modName modu) ++ ")"
,""
,""
,defs modDefs "define"]
where defs f k = unlines $ map (showDefine k) (f modu)
-- |convert a module to a string.
showModule :: Module -> String
showModule modu = unlines
["(module " ++ show (getModName modu) ++ ")"
,""
,""
,defs (M.toList . getModEnv) "define"]
where defs f k = unlines $ fmap (showDefine k) (f modu)
-- |
-- convert a data module to a string.
showdataModule :: Module -> String
showdataModule m =
unlines $ fmap ($m)
[getModFile
,getModName
,show . fmap showdataModule . getModImports
,printer . getModExports
,printer . getModEnv]
|
soupi/pureli
|
src/Language/Pureli/Printer.hs
|
bsd-3-clause
| 3,881 | 0 | 15 | 869 | 1,378 | 699 | 679 | 89 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module CNC.HCodeTests where
import CNC.FanucMacro
import CNC.HCode
import CNC.AwePrelude
import Prelude(Num(..), Fractional(..), Floating(..), Int, ($), id, putStrLn, (++), show)
import Control.Monad(mapM_)
fcode_prog1 = FOps
[ FLabel (UserLabel "start")
, FAssign (FCell 101) (F_Add (F_Read $ FCell 101) (F_Int 42))
, FFrame [FInstrI 'G' 1, FInstrE 'X' (F_Read $ FCell 101), FInstrE 'Y' (F_Read $ FCell 100), FInstrE 'Z' (F_Int 20)]
, FAssign (FCell 100) (F_Sub (F_Read $ FCell 100) (F_Int 5))
, FIf (F_Gt (F_Read (FCell 100)) (F_Int 0))
(UserLabel "start")
, FLabel (UserLabel "end")
, FFrame [FInstrI 'M' 100]
, FOps [FFrame [FInstrI 'M' 30]] ""
] "a test program"
hcode_prog1 :: HCode ()
hcode_prog1 = do
label "start"
var101 <- sysVar 5101
var100 <- sysVar 5102
var101 #= (var101 + 42)
frame [g 1, z (var101), y (var100), z 20]
var100 #= (var100 - 5)
gIf (var100 > 0)
(goto "start")
m 30 # "stop operation"
label "end"
hcode_prog2 :: HCode ()
hcode_prog2 = do
let safe = 20
step = 15
frame [g 100, z safe]
speed <- newVar 15 # "feed speed"
speed2 <- newVar 2 # "rotation speed"
cur_x <- newVar 5.0 # "current x"
cur_y <- newVar 5.0
count <- newVar (0 :: Int)
while (cur_x < 100) $ do
gwhile (cur_y < 200) $ do
count #= count + 1
frame [g 100, f (speed), s (speed2), x $ cur_x, y $ cur_y] # "fast move"
frame [g 101, z 0]
frame [g 100, z 20] # "drilling down"
cur_x #= cur_x + step
count #= count + fix cur_x # "just to test a round op"
cur_y #= cur_y + step
hcode_poligon :: HCode ()
hcode_poligon = do
cx <- newVar 100 # "center x"
cy <- newVar 50 # "center y"
rad <- newVar 20 # "radius"
vertices <- newVar 6
depth <- newVar 10 # "drill depth"
instr <- newVar 1 # "instrument to use"
lengths <- sysTable "_INSTR_LEN"
angle <- newVar 0
ver <- newVar 0
step <- newVarE $ 360 / vertices
while (ver < vertices + 1) $ do
frame [g 0, x $ cx + rad * cos angle, y $ cy + rad * sin angle]
frame [g 1, z $ depth + lengths instr]
frame [g 0, z 0]
lengths instr #= lengths instr - 0.01 # "compensate instrument wearing"
angle #= angle + step
hcode_if_loops :: HCode ()
hcode_if_loops = do
mx <- newVar 10 # "max x"
my <- newVar 20 # "max y"
comment "center"
cx <- newVar 1 # "cur x"
cy <- newVar 1 # "cur y"
label "x-loop"
gIf (cx <= mx) $ do
x cx
label "y-loop"
gIf (cy <= my) $ do
y cy
cy #= cy + 1
goto "y-loop"
cx #= cx + 1
goto "x-loop"
m 30
hcode_for = do
mx <- newVar 10
for 1 (<= mx) (+ 0.5) $ \i -> do
x i
gIf (i == 5) $ break
y 10
y 0
tst <- newVar (42 :: Int)
m 30
hcode_gwhile_break = do
k <- newVar (1 :: Int)
comment "loop starts"
gwhile (k < 100) $ do
inner_loop k # "inner loop comment"
m 30
where inner_loop k = do
x $ fi k
gIf (k > 10) $ break
y $ fi k
hcode_samples = [ (hcode_prog1, "Example1"),
(hcode_prog2, "Example2"),
(hcode_if_loops, "if_loops - test for compound operators in if branches"),
(hcode_for, "for loop"),
(hcode_poligon, "Poligon drawer"),
(hcode_gwhile_break, "gwhile with break") ]
generator_tests = do
putStrLn "***** FanucMacro example:"
putFOps (LabelPrinter show show) fcode_prog1
mapM_ gen_sample hcode_samples
where gen_sample (hcode, descr) = do
putStrLn $ "***** " ++ descr ++ ":"
putHCode hcode
putStrLn $ "***** " ++ descr ++ " Output finished\n"
main = generator_tests
|
akamaus/gcodec
|
test/CNC/HCodeTests.hs
|
bsd-3-clause
| 3,630 | 0 | 17 | 1,002 | 1,577 | 758 | 819 | 119 | 1 |
{-# LANGUAGE PatternGuards #-}
module Idris.Docs (pprintDocs, getDocs, pprintConstDocs, FunDoc(..), Docs (..)) where
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Delaborate
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Docstrings (Docstring, emptyDocstring, noDocs, nullDocstring, renderDocstring, DocTerm, renderDocTerm)
import Util.Pretty
import Data.Maybe
import Data.List
import qualified Data.Text as T
-- TODO: Only include names with public/abstract accessibility
--
-- Issue #1573 on the Issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1573
data FunDoc = FD Name (Docstring DocTerm)
[(Name, PTerm, Plicity, Maybe (Docstring DocTerm))] -- args: name, ty, implicit, docs
PTerm -- function type
(Maybe Fixity)
data Docs = FunDoc FunDoc
| DataDoc FunDoc -- type constructor docs
[FunDoc] -- data constructor docs
| ClassDoc Name (Docstring DocTerm)-- class docs
[FunDoc] -- method docs
[(Name, Maybe (Docstring DocTerm))] -- parameters and their docstrings
[PTerm] -- instances
[PTerm] -- superclasses
showDoc ist d
| nullDocstring d = empty
| otherwise = text " -- " <>
renderDocstring (renderDocTerm (pprintDelab ist) (normaliseAll (tt_ctxt ist) [])) d
pprintFD :: IState -> FunDoc -> Doc OutputAnnotation
pprintFD ist (FD n doc args ty f)
= nest 4 (prettyName True (ppopt_impl ppo) [] n <+> colon <+>
pprintPTerm ppo [] [ n | (n@(UN n'),_,_,_) <- args
, not (T.isPrefixOf (T.pack "__") n') ] infixes ty <$>
renderDocstring (renderDocTerm (pprintDelab ist) (normaliseAll (tt_ctxt ist) [])) doc <$>
maybe empty (\f -> text (show f) <> line) f <>
let argshow = showArgs args [] in
if not (null argshow)
then nest 4 $ text "Arguments:" <$> vsep argshow
else empty)
where ppo = ppOptionIst ist
infixes = idris_infixes ist
showArgs ((n, ty, Exp {}, Just d):args) bnd
= bindingOf n False <+> colon <+>
pprintPTerm ppo bnd [] infixes ty <>
showDoc ist d <> line
:
showArgs args ((n, False):bnd)
showArgs ((n, ty, Constraint {}, Just d):args) bnd
= text "Class constraint" <+>
pprintPTerm ppo bnd [] infixes ty <> showDoc ist d <> line
:
showArgs args ((n, True):bnd)
showArgs ((n, ty, Imp {}, Just d):args) bnd
= text "(implicit)" <+>
bindingOf n True <+> colon <+>
pprintPTerm ppo bnd [] infixes ty <>
showDoc ist d <> line
:
showArgs args ((n, True):bnd)
showArgs ((n, _, _, _):args) bnd = showArgs args ((n, True):bnd)
showArgs [] _ = []
pprintDocs :: IState -> Docs -> Doc OutputAnnotation
pprintDocs ist (FunDoc d) = pprintFD ist d
pprintDocs ist (DataDoc t args)
= text "Data type" <+> pprintFD ist t <$>
if null args then text "No constructors."
else nest 4 (text "Constructors:" <> line <>
vsep (map (pprintFD ist) args))
pprintDocs ist (ClassDoc n doc meths params instances superclasses)
= nest 4 (text "Type class" <+> prettyName True (ppopt_impl ppo) [] n <>
if nullDocstring doc
then empty
else line <> renderDocstring (renderDocTerm (pprintDelab ist) (normaliseAll (tt_ctxt ist) [])) doc)
<> line <$>
nest 4 (text "Parameters:" <$> prettyParameters)
<> line <$>
nest 4 (text "Methods:" <$>
vsep (map (pprintFD ist) meths))
<$>
nest 4 (text "Instances:" <$>
vsep (if null instances' then [text "<no instances>"]
else map dumpInstance instances'))
<>
(if null subclasses then empty
else line <$> nest 4 (text "Subclasses:" <$>
vsep (map (dumpInstance . prettifySubclasses) subclasses)))
<>
(if null superclasses then empty
else line <$> nest 4 (text "Default superclass instances:" <$>
vsep (map dumpInstance superclasses)))
where
params' = zip pNames (repeat False)
pNames = map fst params
ppo = ppOptionIst ist
infixes = idris_infixes ist
dumpInstance :: PTerm -> Doc OutputAnnotation
dumpInstance = pprintPTerm ppo params' [] infixes
prettifySubclasses (PPi (Constraint _ _) _ tm _) = prettifySubclasses tm
prettifySubclasses (PPi plcity nm t1 t2) = PPi plcity (safeHead nm pNames) (prettifySubclasses t1) (prettifySubclasses t2)
prettifySubclasses (PApp fc ref args) = PApp fc ref $ updateArgs pNames args
prettifySubclasses tm = tm
safeHead _ (y:_) = y
safeHead x [] = x
updateArgs (p:ps) ((PExp prty opts _ ref):as) = (PExp prty opts p (updateRef p ref)) : updateArgs ps as
updateArgs ps (a:as) = a : updateArgs ps as
updateArgs _ _ = []
updateRef nm (PRef fc _) = PRef fc nm
updateRef _ pt = pt
isSubclass (PPi (Constraint _ _) _ (PApp _ _ args) (PApp _ (PRef _ nm) args')) = nm == n && map getTm args == map getTm args'
isSubclass (PPi _ _ _ pt) = isSubclass pt
isSubclass _ = False
(subclasses, instances') = partition isSubclass instances
prettyParameters = if any (isJust . snd) params
then vsep (map (\(nm,md) -> prettyName True False params' nm <+> maybe empty (showDoc ist) md) params)
else hsep (punctuate comma (map (prettyName True False params' . fst) params))
getDocs :: Name -> Idris Docs
getDocs n
= do i <- getIState
case lookupCtxt n (idris_classes i) of
[ci] -> docClass n ci
_ -> case lookupCtxt n (idris_datatypes i) of
[ti] -> docData n ti
_ -> do fd <- docFun n
return (FunDoc fd)
docData :: Name -> TypeInfo -> Idris Docs
docData n ti
= do tdoc <- docFun n
cdocs <- mapM docFun (con_names ti)
return (DataDoc tdoc cdocs)
docClass :: Name -> ClassInfo -> Idris Docs
docClass n ci
= do i <- getIState
let docStrings = listToMaybe $ lookupCtxt n $ idris_docstrings i
docstr = maybe emptyDocstring fst docStrings
params = map (\pn -> (pn, docStrings >>= (lookup pn . snd))) (class_params ci)
instances = map (delabTy i) (class_instances ci)
superclasses = catMaybes $ map getDInst (class_default_superclasses ci)
mdocs <- mapM (docFun . fst) (class_methods ci)
return $ ClassDoc n docstr mdocs params instances superclasses
where
getDInst (PInstance _ _ _ _ _ t _ _) = Just t
getDInst _ = Nothing
docFun :: Name -> Idris FunDoc
docFun n
= do i <- getIState
let (docstr, argDocs) = case lookupCtxt n (idris_docstrings i) of
[d] -> d
_ -> noDocs
let ty = delabTy i n
let args = getPArgNames ty argDocs
let infixes = idris_infixes i
let fixdecls = filter (\(Fix _ x) -> x == funName n) infixes
let f = case fixdecls of
[] -> Nothing
(Fix x _:_) -> Just x
return (FD n docstr args ty f)
where funName :: Name -> String
funName (UN n) = str n
funName (NS n _) = funName n
getPArgNames :: PTerm -> [(Name, Docstring DocTerm)] -> [(Name, PTerm, Plicity, Maybe (Docstring DocTerm))]
getPArgNames (PPi plicity name ty body) ds =
(name, ty, plicity, lookup name ds) : getPArgNames body ds
getPArgNames _ _ = []
pprintConstDocs :: IState -> Const -> String -> Doc OutputAnnotation
pprintConstDocs ist c str = text "Primitive" <+> text (if constIsType c then "type" else "value") <+>
pprintPTerm (ppOptionIst ist) [] [] [] (PConstant c) <+> colon <+>
pprintPTerm (ppOptionIst ist) [] [] [] (t c) <>
nest 4 (line <> text str)
where t (Fl _) = PConstant $ AType ATFloat
t (BI _) = PConstant $ AType (ATInt ITBig)
t (Str _) = PConstant StrType
t (Ch c) = PConstant $ AType (ATInt ITChar)
t _ = PType
|
andyarvanitis/Idris-dev
|
src/Idris/Docs.hs
|
bsd-3-clause
| 8,927 | 0 | 23 | 3,208 | 3,037 | 1,532 | 1,505 | 171 | 16 |
{-# LANGUAGE ExistentialQuantification
, FlexibleContexts
#-}
module Unpacker where
import Control.Monad.Except
import Definition
data Unpacker m = forall a. Eq a => AnyUnpacker (LispVal -> m a)
unpackEquals :: MonadError LispError m => LispVal -> LispVal -> Unpacker m -> m Bool
unpackEquals arg1 arg2 (AnyUnpacker unpacker) = do
unpacked1 <- unpacker arg1
unpacked2 <- unpacker arg2
return $ unpacked1 == unpacked2
`catchError` const (return False)
unpackNum :: MonadError LispError m => LispVal -> m SchemeNumber
unpackNum (LNumber n) = return n
unpackNum (LString n) =
let parsed = reads n
in if null parsed
then throwError $ TypeMismatch "number" $ LString n
else return . SInt . fst . head $ parsed
unpackNum notNum = throwError $ TypeMismatch "number" notNum
unpackStr :: MonadError LispError m => (String -> String) -> LispVal -> m String
unpackStr f (LString s) = return $ f s
unpackStr f (LNumber n) = return . f $ show n
unpackStr f (LBool b) = return . f $ show b
unpackStr f (LChar c) = return . f $ show c
unpackStr _ notString = throwError $ TypeMismatch "string" notString
unpackBool :: MonadError LispError m => LispVal -> m Bool
unpackBool (LBool b) = return b
unpackBool notBool = throwError $ TypeMismatch "boolean" notBool
unpackBoolCoerce :: LispVal -> Bool
unpackBoolCoerce (LBool False) = False
unpackBoolCoerce _ = True
|
comraq/scheme-interpreter
|
src/Unpacker.hs
|
bsd-3-clause
| 1,427 | 0 | 11 | 306 | 507 | 248 | 259 | 32 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.