code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
-- Problem 23 -- -- A perfect number is a number for which the sum of its proper divisors is -- exactly equal to the number. For example, the sum of the proper divisors -- of 28 would be 1 + 2 + 4 + 7 + 14 = 28, which means that 28 is a perfect -- number. -- -- A number n is called deficient if the sum of its proper divisors is less -- than n and it is called abundant if this sum exceeds n. -- -- As 12 is the smallest abundant number, 1 + 2 + 3 + 4 + 6 = 16, the -- smallest number that can be written as the sum of two abundant numbers -- is 24. By mathematical analysis, it can be shown that all integers -- greater than 28123 can be written as the sum of two abundant numbers. -- However, this upper limit cannot be reduced any further by analysis even -- though it is known that the greatest number that cannot be expressed as -- the sum of two abundant numbers is less than this limit. -- -- Find the sum of all the positive integers which cannot be written as the -- sum of two abundant numbers. import Data.List import Data.Array.ST import Control.Monad.ST import Control.Monad import Data.Array.Unboxed euler23 = sum [x | (x, True) <- (assocs . obtainNonAbundantSums) $ abundantNos maxNo] maxNo = 28123 obtainNonAbundantSums :: [Int] -> UArray Int Bool obtainNonAbundantSums abNos= runSTUArray $ do arr <- newArray (1, maxNo) True forM_ abNos $ \m -> do let xs = takeWhile (\a -> m + a <= maxNo) $ dropWhile (< m) abNos forM_ xs $ \n -> writeArray arr (m + n) False return arr abundantNos n = filter (\n -> sumProperDivisors n > n) [1..n] sumProperDivisors n | n == 1 = 0 | otherwise = sum factors - n where factors = concatMap (\(x,y)-> if x /= y then [x,y] else [x]) $ factorPairs n factorPairs :: Int -> [(Int, Int)] factorPairs x = [ (y, x `div` y) | y <- [1..truncate (sqrt (fromIntegral x))], x `mod` y == 0]
RossMeikleham/Project-Euler-Haskell
23.hs
mit
1,889
0
20
422
433
239
194
21
2
-- | -- Module : Jeopardy.Controller -- Description : MVC for our game -- Copyright : (c) Jonatan H Sundqvist, year -- License : MIT -- Maintainer : Jonatan H Sundqvist -- Stability : experimental|stable -- Portability : POSIX (not sure) -- -- Created date year -- TODO | - -- - -- SPEC | - -- - module Jeopardy.Controller where --------------------------------------------------------------------------------------------------- -- We'll need these --------------------------------------------------------------------------------------------------- import Graphics.UI.Gtk -- import Graphics.Rendering.Cairo (liftIO, fill) -- import Data.IORef -- import Data.Complex -- import Data.Maybe (listToMaybe, maybe) import Data.List (findIndex) import Text.Printf import qualified Southpaw.Picasso.Palette as Palette import qualified Southpaw.Interactive.Application as App import Jeopardy.Graphics -- TODO: Better name import Jeopardy.Core -- TODO: Better name import Jeopardy.Curator -- import Tiler -- --------------------------------------------------------------------------------------------------- -- Types --------------------------------------------------------------------------------------------------- -- | -- TODO: Input state (?) -- data App = App { _window :: Window, _canvas :: DrawingArea, _size :: (Int, Int), _state :: IORef AppState } -- data AppState = AppState { _game :: Game, _selected :: Maybe Int, _path :: [Complex Double] } -- --------------------------------------------------------------------------------------------------- -- Data --------------------------------------------------------------------------------------------------- fps = 30 :: Int --------------------------------------------------------------------------------------------------- -- Functions --------------------------------------------------------------------------------------------------- -- Events ----------------------------------------------------------------------------------------- -- | -- ondelete :: IO Bool ondelete = do -- TODO: Uhmmm... what? liftIO $ do mainQuit putStrLn "Goodbye!" return False -- | onmousemotion stateref = do (mx, my) <- eventCoordinates liftIO $ do let cursor = mx:+my -- let radius = 18 -- appstate@(AppState { _path=path }) <- readIORef stateref writeIORef stateref $ appstate { _selected=findIndex (within radius cursor) path } return False where within r p = (< r) . realPart . abs . subtract p -- | ondraw appstate = do -- renderGame $ _game appstate -- Testing an unrelated tiling function renderPathWithJoints Palette.chartreuse Palette.darkviolet 18 5 $ _path appstate assuming (_selected appstate) $ \ sel -> do renderCircle 22 $ _path appstate !! sel Palette.choose Palette.limegreen fill renderCircle 22 $ _path appstate !! opposite sel Palette.choose Palette.limegreen fill where assuming (Just a) f = f a assuming _ _ = return () opposite n = (n + 3) `mod` (length $ _path appstate) -- | onanimate :: DrawingArea -> IORef AppState -> IO Bool onanimate canvas stateref = do widgetQueueDraw canvas return True --------------------------------------------------------------------------------------------------- -- | createApp :: IO (App.App) createApp = App.createWindowWithCanvas 650 650 $ AppState { _game=createGame, _selected=Nothing, _path=[ (200:+200)+(70:+0)*(cos θ:+sin θ) | θ <- [0, τ/6..(τ*5/6)] ] } --------------------------------------------------------------------------------------------------- -- | -- TODO: Rename, move (?) mainGTK :: IO () mainGTK = do (App.App { App._window=window, App._canvas=canvas, App._size=size, App._state=stateref }) <- createApp timeoutAdd (onanimate canvas stateref) (1000 `div` fps) >> return () -- Events canvas `on` draw $ (liftIO $ readIORef stateref) >>= ondraw canvas `on` motionNotifyEvent $ onmousemotion stateref -- canvas `on` buttonPressEvent $ onbuttonpress worldref -- canvas `on` buttonReleaseEvent $ onbuttonreleased worldref -- window `on` configureEvent $ onresize window worldref window `on` deleteEvent $ ondelete -- window `on` keyPressEvent $ onkeypress worldref mainGUI
SwiftsNamesake/Leopardy
src/Jeopardy/Controller.hs
mit
4,429
4
15
825
884
485
399
59
2
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleContexts #-} module Nix.Common ( module ClassyPrelude, module Data.Text, module Data.HashMap.Strict, module Data.Fix, module Filesystem.Path.CurrentOS, module Data.Sequence, module GHC.Generics, module Control.DeepSeq, module Control.Monad.Identity, module Control.Monad.Except, Extract(..), ShowIO(..), Record, pathToText, mapToRecord, substring ) where import ClassyPrelude hiding (FilePath, asList, assert, bracket, filterM, find, forM_, ioError, mapM_, maximum, maximumBy, minimum, readFile, replicateM, sequence_, stripPrefix, try, writeFile, ($>), (</>)) import Control.DeepSeq (NFData (..)) import Control.Monad.Identity (Identity (..)) import Control.Monad.Except hiding (foldM) import Control.Monad.State.Strict (MonadState(..), StateT(..)) import Data.Fix import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as H import Data.Sequence (Seq) import Data.Text (Text) import qualified Data.Text as T import Filesystem.Path.CurrentOS hiding (concat, empty, null, (<.>)) import GHC.Generics import qualified Data.Map as M -- | Convert a FilePath into Text. pathToText :: FilePath -> Text pathToText pth = case toText pth of Left p -> p Right p -> p -- | The opposite of 'pure'; classes whose internal values can be -- extracted purely. class Extract m where extract :: m a -> a instance Extract Identity where extract (Identity x) = x -- | For things whose string representation needs to be computed with -- potential side-effects. class ShowIO t where showIO :: t -> IO Text instance ShowIO t => ShowIO (IO t) where showIO action = action >>= showIO instance (ShowIO a, Traversable t, Show (t Text)) => ShowIO (t (IO a)) where showIO vals = do innerReps <- mapM showIO vals return $ tshow innerReps instance (Show a, ShowIO b) => ShowIO (Either a b) where showIO (Left a) = return $ tshow a showIO (Right b) = showIO b instance (Show a, ShowIO b) => ShowIO (ExceptT a IO b) where showIO action = showIO =<< runExceptT action type Record = HashMap Text mapToRecord :: Map Text t -> Record t mapToRecord = H.fromList . M.toList substring :: Integral i => i -> i -> Text -> Text substring s l str = do let (start, len) = (fromIntegral s, fromIntegral l) T.take len $ T.drop start str
adnelson/nix-eval
src/Nix/Common.hs
mit
2,866
0
11
905
782
447
335
65
2
module Main where import Test.Framework import Test.Framework.Providers.HUnit import Test.HUnit import System.IO (stdout) import System.Log.Logger import System.Log.Handler (setFormatter) import System.Log.Handler.Simple (fileHandler) import System.Log.Formatter (simpleLogFormatter) import EDDA.Schema.ParserTest (parserTests) import EDDA.Schema.CommodityV2Test (commodityV2Tests) import EDDA.Schema.CommodityV3Test (commodityV3Tests) import EDDA.Schema.OutfittingV1Test (outfittingV1Tests) import EDDA.Schema.OutfittingV2Test (outfittingV2Tests) import EDDA.Schema.ShipyardV1Test (shipyardV1Tests) import EDDA.Schema.ShipyardV2Test (shipyardV2Tests) tests = hUnitTestToTests $ TestList (parserTests ++ commodityV2Tests ++ commodityV3Tests ++ outfittingV1Tests ++ outfittingV2Tests ++ shipyardV1Tests ++ shipyardV2Tests) setRootLogger :: IO () setRootLogger = do handler <- fileHandler "edda-test.log" DEBUG >>= \lh -> return $ setFormatter lh (simpleLogFormatter "$prio:$loggername:$time - $msg") updateGlobalLogger rootLoggerName (addHandler handler) updateGlobalLogger rootLoggerName (setLevel INFO) main = do setRootLogger defaultMain tests
troydm/edda
test/Spec.hs
mit
1,268
0
13
221
280
158
122
25
1
{-# LANGUAGE OverloadedStrings, ExistentialQuantification, BangPatterns #-} -- | CouchDB View Query options. -- -- For details see -- <http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options>. Note, -- because all options must be a proper URL encoded JSON, construction of -- complex parameters can be very tedious. To simplify this, use 'mkQuery'. module Database.CouchDB.Conduit.View.Query ( -- * Creating Query CouchQP(..), mkQuery, -- * Parameter helpers qpUnit, qpNull ) where import qualified Data.ByteString as B import qualified Data.HashMap.Strict as MS import qualified Data.Aeson as A import Data.String.Conversions (cs, (<>)) import qualified Data.List as L import qualified Network.HTTP.Types as HT -- | CouchDB Query options primitives. data CouchQP = forall a . A.ToJSON a => QPComplex B.ByteString a -- ^ Complex view query parameter. -- -- > couchQP [QPComplex "param" (["a", "b"] :: [String])] -- > [("param", Just "[\"a\",\"b\"]")] -- > ...?param=["a","b"] -- > -- > couchQP [QPComplex "key" (("a", 1) :: (String, Int))] -- > [("key", Just "[\"a\",0]")] -- > ...?param=["a",0] -- -- It't just convert lazy 'BL.ByteString' from 'A.encode' to strict -- 'B.ByteString'. For more efficient use specific functions. | QPBS B.ByteString B.ByteString -- ^ Quoted 'B.ByteString' query parameter. -- -- > ...?param="value" | QPInt B.ByteString Int -- ^ 'Int' query parameter. -- -- > ...?param=100 | QPBool B.ByteString Bool -- ^ 'Bool' query parameter. -- -- > ...?param=true | QPDescending -- ^ Reverse rows output. -- -- > ...?descending=true | QPLimit Int -- ^ Limit rows. Use @Zero (0)@ to omit. -- -- > ...?limit=5 | QPSkip Int -- ^ Skip rows. Use @Zero (0)@ to omit. -- -- > ...?skip=10 | QPStale Bool -- ^ Stale view. On @True@ sets @stale@ parameter to @ok@, else -- sets it to @update_after@. -- -- > ...?stale=ok -- > ...?stale=update_after | forall a . A.ToJSON a => QPKey a -- ^ @key@ query parameter. -- -- > ...?key=... | forall a . A.ToJSON a => QPStartKey a -- ^ Row key to start with. Becomes @endkey@ if @descending@ turned on. -- See 'couchQuery'. -- -- > ...?startkey=... -- > ...?descending=true?endkey=... | forall a . A.ToJSON a => QPEndKey a -- ^ Row key to start with. Becomes @startkey@ if @descending@ -- turned on. See 'couchQuery'. -- -- > ...?endkey=... -- > ...?descending=true?startkey=... | forall a . A.ToJSON a => QPKeys a -- ^ Row key to start with. Use only with 'couchView' and -- 'couchView_'. For large sets of @keys@ use 'couchViewPost' and -- 'couchViewPost_' -- -- > ...?keys=... | QPGroup -- ^ Turn on grouping. -- -- > ...?group=true | QPGroupLevel Int -- ^ Set grouping level. Use @Zero (0)@ to omit. -- -- > ...?group_level=2 | QPReduce Bool -- ^ Control reduce. -- -- > ...?reduce=true -- > ...?reduce=false | QPIncludeDocs -- ^ Turn on inclusion docs in view results. -- -- > ...?include_docs=true | QPInclusiveEnd -- ^ Turn off inclusion @endkey@ in view results. -- -- > ...?inclusive_end=false | QPUpdateSeq -- ^ Response includes an update_seq value indicating which sequence -- id of the database the view reflects -- -- > ...?update_seq=true | QPStartKeyDocId B.ByteString -- ^ Document id to start with. -- -- > ...?startkey_docid=... | QPEndKeyDocId B.ByteString -- ^ Document id to end with. -- -- > ...?endkey_docid=... -- | Make CouchDB query options. mkQuery :: [CouchQP] -- ^ Query options. -> HT.Query mkQuery qs = concatMap parseqp qs where parseqp (QPComplex n v) = [(n, Just $ cs . A.encode $ v)] parseqp (QPBS n v) = [(n, Just $ "\"" <> v <> "\"")] parseqp (QPInt n v) = [(n, Just $ cs . show $ v)] parseqp (QPBool n True) = [(n, Just "true")] parseqp (QPBool n False) = [(n, Just "false")] parseqp QPDescending = boolqp "descending" True parseqp (QPLimit v) = intZeroQp "limit" v parseqp (QPSkip v) = intZeroQp "skip" v parseqp (QPStale True) = [("stale", Just "ok")] parseqp (QPStale False) = [("stale", Just "update_after")] parseqp (QPKey v) = parseqp $ QPComplex "key" v parseqp (QPKeys v) = parseqp $ QPComplex "keys" v parseqp (QPStartKey v) = parseqp $ QPComplex (descDep "startkey" "endkey") v parseqp (QPEndKey v) = parseqp $ QPComplex (descDep "endkey" "startkey") v parseqp QPGroup = boolqp "group" True parseqp (QPGroupLevel v) = intZeroQp "group_level" v parseqp (QPReduce v) = boolqp "reduce" v parseqp QPIncludeDocs = boolqp "include_docs" True parseqp QPInclusiveEnd = boolqp "inclusive_end" False parseqp QPUpdateSeq = boolqp "update_seq" True parseqp (QPStartKeyDocId v) = parseqp $ QPComplex "startkey_docid" v parseqp (QPEndKeyDocId v) = parseqp $ QPComplex "endkey_docid" v -- | Boolean boolqp n v = parseqp $ QPBool n v -- | Ommitable int intZeroQp _ 0 = [] intZeroQp n v = parseqp $ QPInt n v -- | Descending dependent param descDep a b = if isDesc then b else a !isDesc = case L.find isDesc' qs of Nothing -> False _ -> True isDesc' QPDescending = True isDesc' _ = False -- | Returns empty 'MS.HashMap'. Aeson will convert -- this to @\{\}@ (JSON unit). This useful for @startkey@ and @endkey@. -- -- > couchQuery [QPStartKey (1, 0), QPEndKey (1, {})] qpUnit :: MS.HashMap B.ByteString Bool qpUnit = MS.empty -- | Simply return 'A.Null'. qpNull :: A.Value qpNull = A.Null
nikita-volkov/ez-couch
src/Database/CouchDB/Conduit/View/Query.hs
mit
6,313
0
11
1,980
1,064
613
451
75
26
data Price = Price Integer deriving (Eq, Show) data Size = Size Integer deriving (Eq, Show) data Manufacturer = Mini | Mazda | Tata deriving (Eq, Show) data Airline = PapuAir | CatapultsR'Us | TakeYourChancesUnited deriving (Eq, Show) data Vehicle = Car Manufacturer Price | Plane Airline Size deriving (Eq, Show) myCar = Car Mini (Price 14000) urCar = Car Mazda (Price 20000) clownCar = Car Tata (Price 7000) doge = Plane PapuAir (Size 9001) -- 1 - :type myCar :: Vehicle -- 2 isCar :: Vehicle -> Bool isCar (Car _ _) = True isCar _ = False isPlane :: Vehicle -> Bool isPlane (Plane _ _) = True isPlane _ = False areCars :: [Vehicle] -> [Bool] areCars = map isCar -- 3 getManu :: Vehicle -> Manufacturer getManu (Car m _) = m -- 4 getManu doge => bottom
ashnikel/haskellbook
ch11/ch11.6_ex.hs
mit
849
0
7
236
300
162
138
31
1
module Resource where -- type Money = Int type Food = Int type Energy = Int -- data Good = Cattle | Fish | Grain -- | Wood | Coal | Clay | Iron | Hides -- | Loan | Coin deriving (Show, Eq) -- -- ##### ##### ##### ##### ##### ##### -- -- data AdvGood = Meat | SmokedFish | Bread -- | Charcoal | Coke | Brick | Steel | Leather deriving (Show, Eq) -- class PowerGen a where canGen :: a -> Bool genPower :: a -> Energy instance PowerGen Good where canGen Wood = True canGen Coal = True canGen _ = False genPower Wood = 1 genPower Coal = 3 genPower _ = 0 instance PowerGen AdvGood where canGen Charcoal = True canGen _ = False genPower Charcoal = 10 genPower _ = 0 -- -- ##### ##### ##### ##### ##### ##### -- -- class Shippable a where ship :: a -> Money instance Shippable Good where ship Cattle = 3 ship Fish = 1 ship Grain = 1 ship Wood = 1 ship Coal = 3 ship Clay = 1 ship Iron = 2 ship Hides = 2 ship Loan = 4 ship Coin = 1 instance Shippable AdvGood where ship Meat = 2 ship SmokedFish = 2 ship Bread = 3 ship Charcoal = 2 ship Coke = 5 ship Brick = 2 ship Steel = 8 ship Leather = 4 -- -- ##### ##### ##### ##### ##### ##### -- -- class Eatable a where isEatable :: a -> Bool toFood :: a -> Food instance Eatable Good where isEatable Fish = True isEatable Coin = True isEatable _ = False toFood Fish = 1 toFood Coin = 1 toFood _ = 0 instance Shippable AdvGood where isEatable Meat = True isEatable SmokedFish = True isEatable Bread = True isEatable _ = False toFood Meat = 3 toFood SmokedFish = 2 toFood Bread = 2 toFood _ = 0 --
jaiyalas/creepy-waffle
LeHavre/src/LeHavre/Resource.hs
mit
2,033
0
7
827
578
312
266
80
0
module Sllar.Server ( start , stop ) where import Sllar.Config import qualified Sllar.Package.Import (publish) import qualified Sllar.Package.Export import Paths_sllar_server -- System import Control.Concurrent import Control.Monad (forever) import Data.Maybe import Network import System.Directory (removeFile) import System.IO import System.Process (system) import System.Posix.Process (getProcessID) import System.Posix.Daemonize (daemonize) data Request = Request { rtype :: RequestType , path :: String , options :: [(String, String)] } deriving (Show) data Response = Response { body, restype :: String } data RequestType = GET | POST deriving (Show, Read) -- -- Starting a web-server, receiving and routing requests -- start :: IO () start = daemonize $ withSocketsDo $ do -- getting data from server's config config' <- config let port' = port $ fromMaybe (Config 5000) config' sock <- listenOn $ PortNumber (fromInteger port') _ <- forkIO writePid forever $ do (handle, _, _) <- accept sock forkIO $ do request <- fmap (parseRequest . lines) (hGetContents handle) response <- router request hPutStr handle $ template response hFlush handle hClose handle -- -- Killing sllar-server process by PID -- stop :: IO () stop = do tmpFolder <- getDataFileName "tmp/" let tmpFilePath = tmpFolder ++ "sllar-server.pid" pid <- readFile tmpFilePath _ <- system $ "kill " ++ pid removeFile tmpFilePath putStrLn "sllar-server was stopped" -- -- Routing request to a specific content -- Input: incoming request -- Output: content for a specific route -- router :: Request -> IO Response router request = do let Request rtype' path' options' = request (html, json, text) = ("text/html", "application/json", "text/plain") (bodyIO, respType) = case (rtype', path') of (GET, "/packages") -> (Sllar.Package.Export.allJson, json) (POST, "/publish") -> (Sllar.Package.Import.publish options', text) _ -> (getDataFileName "html/index.html" >>= readFile, html) body' <- bodyIO return (Response body' respType) -- -- Wrapping content to a http request headers -- Input: data for response -- Output: final response -- template :: Response -> String template Response { body = b, restype = t } = "HTTP/1.0 200 OK\r\n" ++ "Content-type:" ++ t ++ ";charset=utf-8\r\n" ++ "Content-Length: " ++ show (length b) ++ "\r\n\r\n" ++ b ++ "\r\n" -- -- Parsing incoming request -- Input: raw string with request headers -- Output: parsed request details -- parseRequest :: [String] -> Request parseRequest lns = Request { rtype=read t, path=p, options=parseRequestHelper(tail lns, []) } where [t, p, _] = words (head lns) -- -- Getting request headers (options) -- parseRequestHelper :: ([String], [(String, String)]) -> [(String, String)] parseRequestHelper ([], accum) = accum parseRequestHelper (l:rest, accum) | length (words l) < 2 = accum | otherwise = parseRequestHelper(rest, accum ++ [(init . head . words $ l, unwords . tail . words $ l)] ) -- -- Memoizing Process PID, recording it to a pid file -- writePid :: IO () writePid = do pid <- getProcessID tmpFolder <- getDataFileName "tmp/" let pidfile = tmpFolder ++ "sllar-server.pid" pidStr = show pid writeFile pidfile pidStr
grsmv/sllar
src/Sllar/Server.hs
mit
3,553
0
17
871
1,004
546
458
79
3
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables, OverloadedStrings #-} module Main (main) where import Data.Typeable import Data.Text () import Control.Monad import LIO import LIO.DCLabel import Hails.Data.Hson import Hails.Database import Hails.PolicyModule import Hails.PolicyModule.DSL import LIO.TCB (ioTCB) import LIO.Privs.TCB (mintTCB) import LIO.DCLabel.Privs.TCB (allPrivTCB) import System.Posix.Env (setEnv) data UsersPolicyModule = UsersPolicyModuleTCB DCPriv deriving Typeable instance PolicyModule UsersPolicyModule where initPolicyModule priv = do setPolicy priv $ do database $ do readers ==> anybody writers ==> anybody admins ==> this collection "users" $ do access $ do readers ==> anybody writers ==> anybody clearance $ do secrecy ==> this integrity ==> anybody document $ \_ -> do readers ==> anybody writers ==> anybody field "name" $ searchable field "password" $ labeled $ \doc -> do let user = "name" `at` doc :: String readers ==> this \/ user writers ==> this \/ user return $ UsersPolicyModuleTCB priv where this = privDesc priv withUsersPolicyModule :: DBAction a -> DC a withUsersPolicyModule act = withPolicyModule (\(_ :: UsersPolicyModule) -> act) -- | Create databse config file mkDBConfFile :: IO () mkDBConfFile = do writeFile dbConfFile (unlines [show pm]) setEnv "DATABASE_CONFIG_FILE" dbConfFile False where pm :: (String, String) pm = (mkName (UsersPolicyModuleTCB undefined), "users_db") dbConfFile = "/tmp/hails_example_database.conf" mkName x = let tp = typeRepTyCon $ typeOf x in tyConPackage tp ++ ":" ++ tyConModule tp ++ "." ++ tyConName tp main :: IO () main = do mkDBConfFile withUser "alice" app1 withUser "bob" (app2 False) withUser "bob" (app2 True) withUser "alice" (app2 True) where withUser :: String -> (String -> DCPriv -> DC ()) -> IO () withUser u act = putStrLn . show =<< (paranoidDC $ do let prin = toComponent u setClearanceP allPrivTCB (dcLabel prin dcTrue) act u $ mintTCB prin) app1 :: String -> DCPriv -> DC () app1 usr priv = do let p = toBsonValue ("w00tw00t" :: String) withUsersPolicyModule $ do let doc :: HsonDocument doc = [ "name" -: usr, "password" -: needPolicy p] insertP_ priv "users" doc app2 :: Bool -> String -> DCPriv -> DC () app2 readPass _ priv = do ldocs <- withUsersPolicyModule $ do cur <-findP priv $ select [] "users" getAll [] cur -- forM_ ldocs $ \ldoc -> do doc <- unlabelP priv ldoc putStrLn' $ "name = " ++ ("name" `at` doc) when readPass $ do lpass <- getPolicyLabeled ("password" `at` doc) pass <- unlabelP priv lpass putStrLn' $ "password = " ++ show pass where getAll acc cur = do mldoc <- nextP priv cur case mldoc of Nothing -> return acc Just ldoc -> getAll (ldoc:acc) cur putStrLn' :: String -> DC () putStrLn' m = ioTCB $ putStrLn m
scslab/hails
examples/simpleDBExample.hs
mit
3,217
0
25
909
1,046
510
536
93
2
main = print "hello world"
patbeagan1/AcerArchDev
haskell/helloworld.hs
gpl-2.0
27
0
5
5
9
4
5
1
1
{- Module to parse the output from k Such output is constructed into named Cells and their contents, or Strings. See KOutput All formatting info (spaces, newlines, etc) with respect to the contents of cells is preserved, but the cells themselves do not preserve formatting. E.g.: <mycell> </mycell> === < mycell > </ mycell> -} module ParseKOutput where import ByteStringUtils import Text.Parsec import Text.Parsec.ByteString import Control.Applicative ((<$>)) import qualified Data.ByteString.Char8 as B type Name = ByteString type CellStack = [ByteString] data KOutput = Cell Name [KOutput] | String Name ByteString deriving (Show, Read, Eq) -- A string parser that has a stack of cell names for state (currenty unused), and outputs a KOutput type KOutputParser = Parsec ByteString CellStack KOutput -- Get rid of the maude header and stuff header :: Parsec ByteString CellStack () header = manyTill anyChar (try (lookAhead beginCell)) >> return () -- Open the file, and parse it. Return a list of parses (e.g. if there are multiple cells at the top level) parseKOutFile :: FilePath -> IO [KOutput] parseKOutFile fp = do input <- B.readFile fp case runParser parseTop [] "" input of Left err -> error $ show err Right res -> return res parseTop :: Parsec ByteString CellStack [KOutput] parseTop = header >> many (try parseKOutput) -- Start parsing parseKOutput :: KOutputParser parseKOutput = spaces >> parseCell parseCell :: KOutputParser parseCell = do name <- beginCell contents <- manyTill parseInternals (try (endCell name)) return $ Cell name (combineStrings contents) -- Parse the internals of a cell. First attempt to do a nested cell, then try an underlying string parseInternals :: KOutputParser parseInternals = try parseCell <|> parseString -- Parse the underlying string content of a cell, i.e. what is there that isn't a new inner cell. parseString :: KOutputParser parseString = peek >>= \name -> (String name . pack <$> many1 (noneOf "<")) <|> (char '<' >> parseString >>= \k -> case k of String _ s -> return (String name (cons '<' s))) beginCell :: Parsec ByteString CellStack ByteString beginCell = do char '<' >> spaces name <- pack <$> many1 alphaNum push name spaces >> char '>' return name endCell :: Name -> Parsec ByteString CellStack () endCell s = do spaces char '<' >> spaces >> char '/' >> spaces string (unpack s) >> spaces char '>' pop return () -- Combine strings when they were split due to a literal '<' -- When a '<' is parsed as part of the text (as opposed to a cell), everything before and everything after -- it will be in seperate Strings. combineStrings :: [KOutput] -> [KOutput] combineStrings (String n s1 : String _ s2 : ks) | B.head s2 == '<' = combineStrings (String n (s1 `append` s2) : ks) combineStrings (x:xs) = x : combineStrings xs combineStrings [] = [] -- Stack-based operations on the state. push :: ByteString -> Parsec ByteString [ByteString] () push s = modifyState (\l -> s : l) pop :: Parsec ByteString [ByteString] () pop = modifyState $ \s -> (drop 1 s) peek :: Parsec ByteString [ByteString] ByteString peek = head <$> getState -- Test the parser testParser :: String -> IO () testParser s = case runParser parseTop [] "" (pack s) of Left err -> error $ show err Right cs -> print cs
bakineggs/k-framework
tools/OutputFilter/ParseKOutput.hs
gpl-2.0
3,726
0
18
1,029
904
459
445
59
2
{-# LANGUAGE OverloadedStrings #-} module Carbon.Data.User where import Data.Aeson ((.=), ToJSON(..), object) import Data.Function (on) import Carbon.Data.Alias import Carbon.Data.Id import Carbon.Data.Hash import Carbon.Data.Salt data User = User { userId :: Id , username :: String , userhash :: Hash , usersalt :: Salt , userCreation :: Timestamp , lastLogin :: Timestamp , isAdmin :: Bool , profile :: Maybe Id , session :: Maybe SessionKey } deriving (Show) instance Eq User where (==) = (==) `on` userId instance Ord User where compare = compare `on` userId instance ToJSON User where toJSON u = object [ "id" .= userId u , "username" .= username u , "userCreation" .= userCreation u , "lastLogin" .= lastLogin u , "isAdmin" .= isAdmin u , "profile" .= profile u ]
runjak/carbon-adf
Carbon/Data/User.hs
gpl-3.0
899
0
9
253
261
155
106
31
0
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.Kinesis.Types.Product -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.AWS.Kinesis.Types.Product where import Network.AWS.Kinesis.Types.Sum import Network.AWS.Prelude -- | The range of possible hash key values for the shard, which is a set of -- ordered contiguous positive integers. -- -- /See:/ 'hashKeyRange' smart constructor. data HashKeyRange = HashKeyRange' { _hkrStartingHashKey :: !Text , _hkrEndingHashKey :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'HashKeyRange' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'hkrStartingHashKey' -- -- * 'hkrEndingHashKey' hashKeyRange :: Text -- ^ 'hkrStartingHashKey' -> Text -- ^ 'hkrEndingHashKey' -> HashKeyRange hashKeyRange pStartingHashKey_ pEndingHashKey_ = HashKeyRange' { _hkrStartingHashKey = pStartingHashKey_ , _hkrEndingHashKey = pEndingHashKey_ } -- | The starting hash key of the hash key range. hkrStartingHashKey :: Lens' HashKeyRange Text hkrStartingHashKey = lens _hkrStartingHashKey (\ s a -> s{_hkrStartingHashKey = a}); -- | The ending hash key of the hash key range. hkrEndingHashKey :: Lens' HashKeyRange Text hkrEndingHashKey = lens _hkrEndingHashKey (\ s a -> s{_hkrEndingHashKey = a}); instance FromJSON HashKeyRange where parseJSON = withObject "HashKeyRange" (\ x -> HashKeyRange' <$> (x .: "StartingHashKey") <*> (x .: "EndingHashKey")) -- | Represents the output for 'PutRecords'. -- -- /See:/ 'putRecordsRequestEntry' smart constructor. data PutRecordsRequestEntry = PutRecordsRequestEntry' { _prreExplicitHashKey :: !(Maybe Text) , _prreData :: !Base64 , _prrePartitionKey :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'PutRecordsRequestEntry' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'prreExplicitHashKey' -- -- * 'prreData' -- -- * 'prrePartitionKey' putRecordsRequestEntry :: ByteString -- ^ 'prreData' -> Text -- ^ 'prrePartitionKey' -> PutRecordsRequestEntry putRecordsRequestEntry pData_ pPartitionKey_ = PutRecordsRequestEntry' { _prreExplicitHashKey = Nothing , _prreData = _Base64 # pData_ , _prrePartitionKey = pPartitionKey_ } -- | The hash value used to determine explicitly the shard that the data -- record is assigned to by overriding the partition key hash. prreExplicitHashKey :: Lens' PutRecordsRequestEntry (Maybe Text) prreExplicitHashKey = lens _prreExplicitHashKey (\ s a -> s{_prreExplicitHashKey = a}); -- | The data blob to put into the record, which is base64-encoded when the -- blob is serialized. When the data blob (the payload before -- base64-encoding) is added to the partition key size, the total size must -- not exceed the maximum record size (1 MB). -- -- /Note:/ This 'Lens' automatically encodes and decodes Base64 data, -- despite what the AWS documentation might say. -- The underlying isomorphism will encode to Base64 representation during -- serialisation, and decode from Base64 representation during deserialisation. -- This 'Lens' accepts and returns only raw unencoded data. prreData :: Lens' PutRecordsRequestEntry ByteString prreData = lens _prreData (\ s a -> s{_prreData = a}) . _Base64; -- | Determines which shard in the stream the data record is assigned to. -- Partition keys are Unicode strings with a maximum length limit of 256 -- characters for each key. Amazon Kinesis uses the partition key as input -- to a hash function that maps the partition key and associated data to a -- specific shard. Specifically, an MD5 hash function is used to map -- partition keys to 128-bit integer values and to map associated data -- records to shards. As a result of this hashing mechanism, all data -- records with the same partition key map to the same shard within the -- stream. prrePartitionKey :: Lens' PutRecordsRequestEntry Text prrePartitionKey = lens _prrePartitionKey (\ s a -> s{_prrePartitionKey = a}); instance ToJSON PutRecordsRequestEntry where toJSON PutRecordsRequestEntry'{..} = object (catMaybes [("ExplicitHashKey" .=) <$> _prreExplicitHashKey, Just ("Data" .= _prreData), Just ("PartitionKey" .= _prrePartitionKey)]) -- | Represents the result of an individual record from a 'PutRecords' -- request. A record that is successfully added to your Amazon Kinesis -- stream includes SequenceNumber and ShardId in the result. A record that -- fails to be added to your Amazon Kinesis stream includes ErrorCode and -- ErrorMessage in the result. -- -- /See:/ 'putRecordsResultEntry' smart constructor. data PutRecordsResultEntry = PutRecordsResultEntry' { _prreSequenceNumber :: !(Maybe Text) , _prreErrorCode :: !(Maybe Text) , _prreErrorMessage :: !(Maybe Text) , _prreShardId :: !(Maybe Text) } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'PutRecordsResultEntry' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'prreSequenceNumber' -- -- * 'prreErrorCode' -- -- * 'prreErrorMessage' -- -- * 'prreShardId' putRecordsResultEntry :: PutRecordsResultEntry putRecordsResultEntry = PutRecordsResultEntry' { _prreSequenceNumber = Nothing , _prreErrorCode = Nothing , _prreErrorMessage = Nothing , _prreShardId = Nothing } -- | The sequence number for an individual record result. prreSequenceNumber :: Lens' PutRecordsResultEntry (Maybe Text) prreSequenceNumber = lens _prreSequenceNumber (\ s a -> s{_prreSequenceNumber = a}); -- | The error code for an individual record result. 'ErrorCodes' can be -- either 'ProvisionedThroughputExceededException' or 'InternalFailure'. prreErrorCode :: Lens' PutRecordsResultEntry (Maybe Text) prreErrorCode = lens _prreErrorCode (\ s a -> s{_prreErrorCode = a}); -- | The error message for an individual record result. An 'ErrorCode' value -- of 'ProvisionedThroughputExceededException' has an error message that -- includes the account ID, stream name, and shard ID. An 'ErrorCode' value -- of 'InternalFailure' has the error message -- '\"Internal Service Failure\"'. prreErrorMessage :: Lens' PutRecordsResultEntry (Maybe Text) prreErrorMessage = lens _prreErrorMessage (\ s a -> s{_prreErrorMessage = a}); -- | The shard ID for an individual record result. prreShardId :: Lens' PutRecordsResultEntry (Maybe Text) prreShardId = lens _prreShardId (\ s a -> s{_prreShardId = a}); instance FromJSON PutRecordsResultEntry where parseJSON = withObject "PutRecordsResultEntry" (\ x -> PutRecordsResultEntry' <$> (x .:? "SequenceNumber") <*> (x .:? "ErrorCode") <*> (x .:? "ErrorMessage") <*> (x .:? "ShardId")) -- | The unit of data of the Amazon Kinesis stream, which is composed of a -- sequence number, a partition key, and a data blob. -- -- /See:/ 'record' smart constructor. data Record = Record' { _rApproximateArrivalTimestamp :: !(Maybe POSIX) , _rSequenceNumber :: !Text , _rData :: !Base64 , _rPartitionKey :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'Record' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rApproximateArrivalTimestamp' -- -- * 'rSequenceNumber' -- -- * 'rData' -- -- * 'rPartitionKey' record :: Text -- ^ 'rSequenceNumber' -> ByteString -- ^ 'rData' -> Text -- ^ 'rPartitionKey' -> Record record pSequenceNumber_ pData_ pPartitionKey_ = Record' { _rApproximateArrivalTimestamp = Nothing , _rSequenceNumber = pSequenceNumber_ , _rData = _Base64 # pData_ , _rPartitionKey = pPartitionKey_ } -- | The approximate time that the record was inserted into the stream. rApproximateArrivalTimestamp :: Lens' Record (Maybe UTCTime) rApproximateArrivalTimestamp = lens _rApproximateArrivalTimestamp (\ s a -> s{_rApproximateArrivalTimestamp = a}) . mapping _Time; -- | The unique identifier of the record in the stream. rSequenceNumber :: Lens' Record Text rSequenceNumber = lens _rSequenceNumber (\ s a -> s{_rSequenceNumber = a}); -- | The data blob. The data in the blob is both opaque and immutable to the -- Amazon Kinesis service, which does not inspect, interpret, or change the -- data in the blob in any way. When the data blob (the payload before -- base64-encoding) is added to the partition key size, the total size must -- not exceed the maximum record size (1 MB). -- -- /Note:/ This 'Lens' automatically encodes and decodes Base64 data, -- despite what the AWS documentation might say. -- The underlying isomorphism will encode to Base64 representation during -- serialisation, and decode from Base64 representation during deserialisation. -- This 'Lens' accepts and returns only raw unencoded data. rData :: Lens' Record ByteString rData = lens _rData (\ s a -> s{_rData = a}) . _Base64; -- | Identifies which shard in the stream the data record is assigned to. rPartitionKey :: Lens' Record Text rPartitionKey = lens _rPartitionKey (\ s a -> s{_rPartitionKey = a}); instance FromJSON Record where parseJSON = withObject "Record" (\ x -> Record' <$> (x .:? "ApproximateArrivalTimestamp") <*> (x .: "SequenceNumber") <*> (x .: "Data") <*> (x .: "PartitionKey")) -- | The range of possible sequence numbers for the shard. -- -- /See:/ 'sequenceNumberRange' smart constructor. data SequenceNumberRange = SequenceNumberRange' { _snrEndingSequenceNumber :: !(Maybe Text) , _snrStartingSequenceNumber :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'SequenceNumberRange' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'snrEndingSequenceNumber' -- -- * 'snrStartingSequenceNumber' sequenceNumberRange :: Text -- ^ 'snrStartingSequenceNumber' -> SequenceNumberRange sequenceNumberRange pStartingSequenceNumber_ = SequenceNumberRange' { _snrEndingSequenceNumber = Nothing , _snrStartingSequenceNumber = pStartingSequenceNumber_ } -- | The ending sequence number for the range. Shards that are in the OPEN -- state have an ending sequence number of 'null'. snrEndingSequenceNumber :: Lens' SequenceNumberRange (Maybe Text) snrEndingSequenceNumber = lens _snrEndingSequenceNumber (\ s a -> s{_snrEndingSequenceNumber = a}); -- | The starting sequence number for the range. snrStartingSequenceNumber :: Lens' SequenceNumberRange Text snrStartingSequenceNumber = lens _snrStartingSequenceNumber (\ s a -> s{_snrStartingSequenceNumber = a}); instance FromJSON SequenceNumberRange where parseJSON = withObject "SequenceNumberRange" (\ x -> SequenceNumberRange' <$> (x .:? "EndingSequenceNumber") <*> (x .: "StartingSequenceNumber")) -- | A uniquely identified group of data records in an Amazon Kinesis stream. -- -- /See:/ 'shard' smart constructor. data Shard = Shard' { _sAdjacentParentShardId :: !(Maybe Text) , _sParentShardId :: !(Maybe Text) , _sShardId :: !Text , _sHashKeyRange :: !HashKeyRange , _sSequenceNumberRange :: !SequenceNumberRange } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'Shard' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'sAdjacentParentShardId' -- -- * 'sParentShardId' -- -- * 'sShardId' -- -- * 'sHashKeyRange' -- -- * 'sSequenceNumberRange' shard :: Text -- ^ 'sShardId' -> HashKeyRange -- ^ 'sHashKeyRange' -> SequenceNumberRange -- ^ 'sSequenceNumberRange' -> Shard shard pShardId_ pHashKeyRange_ pSequenceNumberRange_ = Shard' { _sAdjacentParentShardId = Nothing , _sParentShardId = Nothing , _sShardId = pShardId_ , _sHashKeyRange = pHashKeyRange_ , _sSequenceNumberRange = pSequenceNumberRange_ } -- | The shard Id of the shard adjacent to the shard\'s parent. sAdjacentParentShardId :: Lens' Shard (Maybe Text) sAdjacentParentShardId = lens _sAdjacentParentShardId (\ s a -> s{_sAdjacentParentShardId = a}); -- | The shard Id of the shard\'s parent. sParentShardId :: Lens' Shard (Maybe Text) sParentShardId = lens _sParentShardId (\ s a -> s{_sParentShardId = a}); -- | The unique identifier of the shard within the Amazon Kinesis stream. sShardId :: Lens' Shard Text sShardId = lens _sShardId (\ s a -> s{_sShardId = a}); -- | The range of possible hash key values for the shard, which is a set of -- ordered contiguous positive integers. sHashKeyRange :: Lens' Shard HashKeyRange sHashKeyRange = lens _sHashKeyRange (\ s a -> s{_sHashKeyRange = a}); -- | The range of possible sequence numbers for the shard. sSequenceNumberRange :: Lens' Shard SequenceNumberRange sSequenceNumberRange = lens _sSequenceNumberRange (\ s a -> s{_sSequenceNumberRange = a}); instance FromJSON Shard where parseJSON = withObject "Shard" (\ x -> Shard' <$> (x .:? "AdjacentParentShardId") <*> (x .:? "ParentShardId") <*> (x .: "ShardId") <*> (x .: "HashKeyRange") <*> (x .: "SequenceNumberRange")) -- | Represents the output for 'DescribeStream'. -- -- /See:/ 'streamDescription' smart constructor. data StreamDescription = StreamDescription' { _sdStreamName :: !Text , _sdStreamARN :: !Text , _sdStreamStatus :: !StreamStatus , _sdShards :: ![Shard] , _sdHasMoreShards :: !Bool } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'StreamDescription' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'sdStreamName' -- -- * 'sdStreamARN' -- -- * 'sdStreamStatus' -- -- * 'sdShards' -- -- * 'sdHasMoreShards' streamDescription :: Text -- ^ 'sdStreamName' -> Text -- ^ 'sdStreamARN' -> StreamStatus -- ^ 'sdStreamStatus' -> Bool -- ^ 'sdHasMoreShards' -> StreamDescription streamDescription pStreamName_ pStreamARN_ pStreamStatus_ pHasMoreShards_ = StreamDescription' { _sdStreamName = pStreamName_ , _sdStreamARN = pStreamARN_ , _sdStreamStatus = pStreamStatus_ , _sdShards = mempty , _sdHasMoreShards = pHasMoreShards_ } -- | The name of the stream being described. sdStreamName :: Lens' StreamDescription Text sdStreamName = lens _sdStreamName (\ s a -> s{_sdStreamName = a}); -- | The Amazon Resource Name (ARN) for the stream being described. sdStreamARN :: Lens' StreamDescription Text sdStreamARN = lens _sdStreamARN (\ s a -> s{_sdStreamARN = a}); -- | The current status of the stream being described. -- -- The stream status is one of the following states: -- -- - 'CREATING' - The stream is being created. Amazon Kinesis immediately -- returns and sets 'StreamStatus' to 'CREATING'. -- - 'DELETING' - The stream is being deleted. The specified stream is in -- the 'DELETING' state until Amazon Kinesis completes the deletion. -- - 'ACTIVE' - The stream exists and is ready for read and write -- operations or deletion. You should perform read and write operations -- only on an 'ACTIVE' stream. -- - 'UPDATING' - Shards in the stream are being merged or split. Read -- and write operations continue to work while the stream is in the -- 'UPDATING' state. sdStreamStatus :: Lens' StreamDescription StreamStatus sdStreamStatus = lens _sdStreamStatus (\ s a -> s{_sdStreamStatus = a}); -- | The shards that comprise the stream. sdShards :: Lens' StreamDescription [Shard] sdShards = lens _sdShards (\ s a -> s{_sdShards = a}) . _Coerce; -- | If set to 'true', more shards in the stream are available to describe. sdHasMoreShards :: Lens' StreamDescription Bool sdHasMoreShards = lens _sdHasMoreShards (\ s a -> s{_sdHasMoreShards = a}); instance FromJSON StreamDescription where parseJSON = withObject "StreamDescription" (\ x -> StreamDescription' <$> (x .: "StreamName") <*> (x .: "StreamARN") <*> (x .: "StreamStatus") <*> (x .:? "Shards" .!= mempty) <*> (x .: "HasMoreShards")) -- | Metadata assigned to the stream, consisting of a key-value pair. -- -- /See:/ 'tag' smart constructor. data Tag = Tag' { _tagValue :: !(Maybe Text) , _tagKey :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'Tag' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'tagValue' -- -- * 'tagKey' tag :: Text -- ^ 'tagKey' -> Tag tag pKey_ = Tag' { _tagValue = Nothing , _tagKey = pKey_ } -- | An optional string, typically used to describe or define the tag. -- Maximum length: 256 characters. Valid characters: Unicode letters, -- digits, white space, _ . \/ = + - % \' tagValue :: Lens' Tag (Maybe Text) tagValue = lens _tagValue (\ s a -> s{_tagValue = a}); -- | A unique identifier for the tag. Maximum length: 128 characters. Valid -- characters: Unicode letters, digits, white space, _ . \/ = + - % \' tagKey :: Lens' Tag Text tagKey = lens _tagKey (\ s a -> s{_tagKey = a}); instance FromJSON Tag where parseJSON = withObject "Tag" (\ x -> Tag' <$> (x .:? "Value") <*> (x .: "Key"))
olorin/amazonka
amazonka-kinesis/gen/Network/AWS/Kinesis/Types/Product.hs
mpl-2.0
18,664
0
15
4,004
2,801
1,656
1,145
294
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.IAM.UpdateGroup -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Updates the name and\/or the path of the specified group. -- -- You should understand the implications of changing a group\'s path or -- name. For more information, see -- <http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_WorkingWithGroupsAndUsers.html Renaming Users and Groups> -- in the /IAM User Guide/. -- -- To change a group name the requester must have appropriate permissions -- on both the source object and the target object. For example, to change -- Managers to MGRs, the entity making the request must have permission on -- Managers and MGRs, or must have permission on all (*). For more -- information about permissions, see -- <http://docs.aws.amazon.com/IAM/latest/UserGuide/PermissionsAndPolicies.html Permissions and Policies>. -- -- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_UpdateGroup.html AWS API Reference> for UpdateGroup. module Network.AWS.IAM.UpdateGroup ( -- * Creating a Request updateGroup , UpdateGroup -- * Request Lenses , ugNewGroupName , ugNewPath , ugGroupName -- * Destructuring the Response , updateGroupResponse , UpdateGroupResponse ) where import Network.AWS.IAM.Types import Network.AWS.IAM.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | /See:/ 'updateGroup' smart constructor. data UpdateGroup = UpdateGroup' { _ugNewGroupName :: !(Maybe Text) , _ugNewPath :: !(Maybe Text) , _ugGroupName :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'UpdateGroup' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'ugNewGroupName' -- -- * 'ugNewPath' -- -- * 'ugGroupName' updateGroup :: Text -- ^ 'ugGroupName' -> UpdateGroup updateGroup pGroupName_ = UpdateGroup' { _ugNewGroupName = Nothing , _ugNewPath = Nothing , _ugGroupName = pGroupName_ } -- | New name for the group. Only include this if changing the group\'s name. ugNewGroupName :: Lens' UpdateGroup (Maybe Text) ugNewGroupName = lens _ugNewGroupName (\ s a -> s{_ugNewGroupName = a}); -- | New path for the group. Only include this if changing the group\'s path. ugNewPath :: Lens' UpdateGroup (Maybe Text) ugNewPath = lens _ugNewPath (\ s a -> s{_ugNewPath = a}); -- | Name of the group to update. If you\'re changing the name of the group, -- this is the original name. ugGroupName :: Lens' UpdateGroup Text ugGroupName = lens _ugGroupName (\ s a -> s{_ugGroupName = a}); instance AWSRequest UpdateGroup where type Rs UpdateGroup = UpdateGroupResponse request = postQuery iAM response = receiveNull UpdateGroupResponse' instance ToHeaders UpdateGroup where toHeaders = const mempty instance ToPath UpdateGroup where toPath = const "/" instance ToQuery UpdateGroup where toQuery UpdateGroup'{..} = mconcat ["Action" =: ("UpdateGroup" :: ByteString), "Version" =: ("2010-05-08" :: ByteString), "NewGroupName" =: _ugNewGroupName, "NewPath" =: _ugNewPath, "GroupName" =: _ugGroupName] -- | /See:/ 'updateGroupResponse' smart constructor. data UpdateGroupResponse = UpdateGroupResponse' deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'UpdateGroupResponse' with the minimum fields required to make a request. -- updateGroupResponse :: UpdateGroupResponse updateGroupResponse = UpdateGroupResponse'
olorin/amazonka
amazonka-iam/gen/Network/AWS/IAM/UpdateGroup.hs
mpl-2.0
4,270
0
11
855
533
325
208
68
1
{- Author: Rohit Jha File: PE005.hs July 7, 2013 Problem 5: 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20? -} main = putStrLn $ show $ foldr1 lcm [3,5,7,11,13,16,17,18,19] {- 232792560 real 0m0.002s user 0m0.000s sys 0m0.000s -}
rohitjha/ProjectEuler
Haskell/PE005.hs
unlicense
394
0
7
77
48
29
19
1
1
module TicTacToe where {-# LANGUAGE FlexibleInstances #-} import Data.Either import Safe import Data.Maybe import Data.List -- |Slots in the board can either be filled with Naughts or Crosses data Symbol = X | O deriving (Show, Eq) -- |Empty slots are referred to by their location on the board type Piece = Either Int Symbol -- |A set of three pieces is used to represent rows, columns, and diagonals type Three = [Piece] -- how do I constrain this to a length of three? -- |The game board is made up of three rows of three pieces each. data Board = Board Three Three Three deriving (Eq) --Thank you to http://projects.haskell.org/operational/examples/TicTacToe.hs.html for the show function instance Show Board where show board = unlines . surround "+---+---+---+" . map (concat . surround "|". map showSquare) $ (rows board) where surround x xs = [x] ++ intersperse x xs ++ [x] showSquare = either (\n -> " " ++ show n ++ " ") (\n -> color n) -- | \ESC[ for ANSI escape esc :: Int -> String esc i = concat ["\ESC[", show i, "m"] -- | Black=30, Red=31, Green=32, Yellow=33, Blue=34, Magenta=35, Cyan=36, White=37 color :: Symbol -> String color s | s == X = esc 32++" "++show s++" "++esc 0 | otherwise = esc 31++" "++show s++" "++esc 0 -- |Convenience function for constructing an empty board emptyBoard :: Board emptyBoard = Board [Left 1, Left 2, Left 3] [Left 4, Left 5, Left 6] [Left 7, Left 8, Left 9] -- |Given either a row, column, or diagonal, it checks whether it is entirely filled with naughts or crosses full :: Three -> Bool full ts@[a,b,c] = noLefts && allEqual where noLefts = foldl (\acc curr -> acc && (isRight curr)) True ts allEqual = a == b && b == c -- |Given a game board, check whether the game is over because someone won won :: Board -> Bool won b = foldl (\acc curr -> acc || (full curr)) False ((rows b) ++ (cols b) ++ (diags b)) -- |Given a game board, check whether the game is over due to a draw draw :: Board -> Bool draw b = length (possibleMoves b) == 0 -- |Message to display to the user about the results of the game winner :: Board -> String winner b = if length winnerType > 0 then head winnerType else "It was a draw!" where allConfigs = ((rows b) ++ (cols b) ++ (diags b)) winnerType = [if a == (Right O) then "The computer wins!" else "You win!" | curr@[a,b,c] <- allConfigs, full curr] -- |Extract rows from game board rows :: Board -> [Three] rows (Board x y z) = [x, y, z] -- |Extract columns from game board cols :: Board -> [Three] cols (Board [a, b, c] [d, e, f] [g, h, i]) = [[a, d, g], [b, e, h], [c, f, i]] -- |Extract diagonals from game board diags :: Board -> [Three] diags (Board [a, _, c] [_, e, _] [g, _, i]) = [[a, e, i], [c, e, g]] -- |List of places where a piece can be placed possibleMoves :: Board -> [Piece] possibleMoves board = filter isLeft (boardToList board) -- |Helper function to convert a board into a list of values boardToList :: Board -> [Piece] boardToList (Board x y z) = x ++ y ++ z -- |Helper function to convert a list of values into a board listToBoard :: [Piece] -> Board listToBoard [a,b,c,d,e,f,g,h,i] = Board [a,b,c] [d,e,f] [g,h,i] -- |Function to update the game board with a new value at a specified point findAndReplace :: Board -> Piece -> Piece -> Board findAndReplace b p1 p2 = listToBoard [if x==p1 then p2 else x | x <- bl] where bl = boardToList b -- |Check if O's can immediately win, and if so, do it winningOMove :: Board -> Maybe Board winningOMove b = headMay [findAndReplace b p (Right O) | p <- (possibleMoves b), won (findAndReplace b p (Right O))] -- |Check if X's can immediately win, and if so, block it blockXWin :: Board -> Maybe Board blockXWin b = headMay [findAndReplace b p (Right O) | p <- (possibleMoves b), won (findAndReplace b p (Right X))] -- |Check whether a board has been forked isFork :: Board -> Bool isFork b = 2 == length [findAndReplace b p (Right O) | p <- (possibleMoves b), won (findAndReplace b p (Right O))] -- |Check if O's can make a fork, and if so, do it forkO :: Board -> Maybe Board forkO b = headMay [findAndReplace b p (Right O) | p <- (possibleMoves b), isFork (findAndReplace b p (Right O))] -- |Check if X's can make a fork, and if so, block it blockXFork :: Board -> Maybe Board blockXFork b = headMay [findAndReplace b p (Right O) | p <- (possibleMoves b), isFork (findAndReplace b p (Right X))] -- |Decision tree for AI that will go down the list to make its move makeOMove :: Board -> Board makeOMove board@(Board x@[a, b, c] y@[d, e, f] z@[g, h, i]) | isJust (winningOMove board) = fromJust (winningOMove board) | isJust (blockXWin board) = fromJust (blockXWin board) | isJust (forkO board) = fromJust (forkO board) | isJust (blockXFork board) = fromJust (blockXFork board) | elem e (possibleMoves board) = findAndReplace board e (Right O) | otherwise = if length (possibleMoves board) > 0 then findAndReplace board (head (possibleMoves board)) (Right O) else board --This should not happen
2016rshah/Tic-Hack-Toe
src/TicTacToe.hs
apache-2.0
5,056
22
13
1,042
1,829
974
855
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE Rank2Types #-} -- Copyright 2014 (c) Diego Souza <[email protected]> -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. module Leela.Data.LQL.Read ( Source (..) , parseLQL , parseLQL1 , parseUsing , loads , chkloads ) where import Data.Word import qualified Data.Vector as V import Data.Monoid (mconcat) import Control.Monad import qualified Data.Sequence as S import Leela.Data.LQL import Leela.Data.Time import qualified Data.ByteString as B import Leela.Data.Types import Leela.Data.Funclib import Control.Applicative import Leela.Data.Pipeline import qualified Data.ByteString.UTF8 as U import qualified Data.ByteString.Lazy as L import Control.Monad.Identity import Data.Attoparsec.ByteString as A import Data.Attoparsec.ByteString.Char8 (decimal, signed, double) data Direction a = L a | R a | B a class Source a where bytestring :: a -> B.ByteString fromInt :: Num b => Int -> b fromInt = fromIntegral isSpace :: Parser Bool isSpace = liftM (== Just 0x20) peekWord8 hardspace :: Parser () hardspace = void $ word8 0x20 parseNode :: Parser (Kind, Node) parseNode = do (Kind kind) <- Kind <$> qstring 64 0x28 0x3a (Node name) <- Node <$> qstring 512 0x3a 0x29 when (L.null kind) (fail "kind must not be null") when (L.null name) (fail "name must not be null") return (Kind kind, Node name) parseTree :: Parser (Maybe User, Tree) parseTree = do name <- qstring 512 0x28 0x29 buildResult (L.break (== 0x3a) name) where buildResult (left, right) | L.null right = return (Nothing, Tree left) | otherwise = case (L.take 2 right) of "::" -> liftM2 (,) (Just <$> User <$> (validate left)) (Tree <$> (validate $ L.drop 2 right)) _ -> fail "parseTree: syntax error" validate s | 0x3a `L.elem` s = fail "parseTree: syntax error" | otherwise = return s parseLabel :: Parser Label parseLabel = Label <$> qstring 512 0x5b 0x5d parseAttr :: Parser Attr parseAttr = Attr <$> qstring 512 0x22 0x22 parseGUID :: Parser GUID parseGUID = guidFromBS <$> (A.take 36) parseGUIDOrStar :: Parser (Maybe GUID) parseGUIDOrStar = (Just <$> parseGUID) <|> ("*" *> return Nothing) parseGUIDAttr :: Parser (GUID, Attr) parseGUIDAttr = do g <- parseGUID hardspace k <- parseAttr return (g, k) parseMaybeGUID :: Parser (Maybe GUID) parseMaybeGUID = ("()" *> return Nothing) <|> (Just <$> parseGUID) parseNodeSelector :: Parser Bool parseNodeSelector = ("()" *> return False) <|> ("(++)" *> return True) qstring :: Int -> Word8 -> Word8 -> Parser L.ByteString qstring limit l r = word8 l >> anyWord8 >>= loop (limit - 1) [] where loop lim acc x | x == r = return (L.pack $ reverse acc) | lim < 0 = fail "name is too long" | x == 0x5c = do c <- anyWord8 anyWord8 >>= loop (lim - 1) (c : acc) | otherwise = anyWord8 >>= loop (lim - 1) (x : acc) newline :: Parser () newline = void ((word8 0x2c >> word8 0x20) <|> word8 0x0a) separator :: Parser () separator = void (word8 0x0a <|> word8 0x20) semicolon :: Parser () semicolon = void $ word8 0x3b parseLink :: Parser (Direction Label) parseLink = do mdir <- word8 0x2d <|> word8 0x3c l <- option (Label L.empty) parseLabel f <- case mdir of 0x2d -> (const B <$> (word8 0x2d)) <|> (const R <$> (word8 0x3e)) 0x3c -> (const L <$> (word8 0x2d)) _ -> fail "parseLink" return (f l) parseRLink :: Parser Label parseRLink = do dlink <- parseLink case dlink of R l -> return l _ -> fail "parseRLink: wrong direction" parseMakeCreate :: Parser (S.Seq Journal) parseMakeCreate = do k <- parseGUID peekWord8 >>= go k [] where go a g (Just 0x20) = do l <- hardspace >> parseLink b <- hardspace >> parseGUID peekWord8 >>= go b (asLink g a b l) go _ g _ = return (mconcat $ map (\(a, l, b) -> (S.fromList [PutLabel a l, PutLink a l b])) g) asLink acc a b (L l) = (b, l, a) : acc asLink acc a b (R l) = (a, l, b) : acc asLink acc a b (B l) = (a, l, b) : (b, l, a) : acc endBy :: Parser a -> Parser b -> Parser a endBy = liftM2 const safeHead :: [a] -> Parser a safeHead [] = fail "parsing error" safeHead xs = return $ head xs parseQuery :: Parser (Matcher, [(Bool, GUID -> Matcher)]) parseQuery = do a <- parseGUID ok <- isSpace if ok then do q <- parseQuery1 False [] f <- safeHead q return (snd f a, tail q) else return (ByNode a, []) parseQuery1 :: Bool -> [(Bool, GUID -> Matcher)] -> Parser [(Bool, GUID -> Matcher)] parseQuery1 nilOk acc = do ok <- isSpace if ok then do hardspace l <- parseRLink nilOk' <- hardspace >> parseNodeSelector parseQuery1 nilOk' ((nilOk, ByLabel l) : acc) else return (reverse acc) nan :: Double nan = 0/0 inf :: Double inf = 1/0 parseDouble :: Parser Double parseDouble = "nan" *> return nan <|> "inf" *> return inf <|> "-inf" *> return (negate inf) <|> signed double parseValue :: Parser Value parseValue = do mw <- peekWord8 case mw of Just 0x22 -> Text <$> qstring (64 * 1024) 0x22 0x22 Just 0x28 -> Int32 <$> ("(int32 " *> (signed decimal `endBy` word8 0x29)) <|> Int64 <$> ("(int64 " *> (signed decimal `endBy` word8 0x29)) <|> UInt32 <$> ("(uint32 " *> (decimal `endBy` word8 0x29)) <|> UInt64 <$> ("(uint64 " *> (decimal `endBy` word8 0x29)) <|> Double <$> ("(double " *> (parseDouble `endBy` word8 0x29)) <|> "(bool true)" *> return (Bool True) <|> "(bool false)" *> return (Bool False) _ -> Double <$> parseDouble parseTimePoint :: Parser Time parseTimePoint = do _ <- word8 0x5b r <- fromSeconds <$> double _ <- word8 0x5d return r parseTimeRange :: Parser TimeRange parseTimeRange = do _ <- word8 0x5b t0 <- fromSeconds <$> double _ <- word8 0x3a t1 <- fromSeconds <$> double _ <- word8 0x5d return (Range t0 t1) parseWithStmt :: Parser [Option] parseWithStmt = "with " *> (parseOption `sepBy` (string ", ")) where parseOption = "ttl:" *> (TTL <$> decimal) <|> "max_data_points:" *> (MaxDataPoints <$> decimal) <|> "data" *> (Data <$> qstring 64 0x3a 0x3d <*> qstring 512 0x22 0x22) parseStmtMake :: Using -> Parser LQL parseStmtMake u = do _ <- string "make " at <- peekWord8 case at of Just 0x28 -> AlterStmt <$> S.singleton <$> uncurry (PutNode (targetUser u) (uTree u)) <$> parseNode Just _ -> AlterStmt <$> parseMakeCreate _ -> fail "bad make statement" parseStmtPath :: Parser LQL parseStmtPath = "path " *> (PathStmt <$> parseQuery) parseStmtName :: Using -> Parser LQL parseStmtName u = "name " *> (NameStmt u <$> (S.singleton <$> parseGUID)) parseStmtGUID :: Using -> Parser LQL parseStmtGUID u = "guid " *> (GUIDStmt u <$> (S.singleton <$> parseNode)) parseStmtStat :: Parser LQL parseStmtStat = "stat" *> return StatStmt parseArithOper :: Parser (Double -> Double -> Double) parseArithOper = do c <- anyWord8 case c of 0x2b -> return (+) 0x2d -> return (-) 0x2a -> return (*) 0x2f -> return (/) _ -> fail "parseArithOp: bad operator" parseCmpOper :: Parser (Double -> Double -> Bool) parseCmpOper = do c1 <- anyWord8 case c1 of 0x3e -> (word8 0x3d >> return (>=)) <|> return (>) 0x3c -> (word8 0x3d >> return (<=)) <|> return (<) 0x3d -> word8 0x3d >> return (==) _ -> fail "parseCmpOper: bad operator" parseFilterSection :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseFilterSection = liftFilter <$> (parseLeft <|> parseRight) where parseLeft = do _ <- word8 0x28 f <- parseCmpOper v <- hardspace >> double _ <- word8 0x29 return ((`f` v) . snd) parseRight = do _ <- word8 0x28 v <- double f <- hardspace >> parseCmpOper _ <- word8 0x29 return ((v `f`) . snd) parseMapSection :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseMapSection = liftMapV <$> fmap <$> (parseLeft <|> parseRight) where parseLeft = do _ <- word8 0x28 f <- parseArithOper v <- hardspace >> double _ <- word8 0x29 return (`f` v) parseRight = do _ <- word8 0x28 v <- double f <- hardspace >> parseArithOper _ <- word8 0x29 return (v `f`) parseMean :: Parser (Func (Maybe (Double, Double)) (V.Vector (t, Double)) (t, Double)) parseMean = "mean" *> return (pairV mean) parseHMean :: Parser (Func (Maybe (Double, Double)) (V.Vector (t, Double)) (t, Double)) parseHMean = "hmean" *> return (pairV hmean) parseEwma :: Parser (Func (Maybe Double) (V.Vector (t, Double)) (t, Double)) parseEwma = "(ewma " *> go where go = do f <- (pairV . ewma) <$> double _ <- word8 0x29 return f parseCount :: Parser (Func Double (V.Vector (t, Double)) (t, Double)) parseCount = "count" *> return (pairV countV) parseMax :: Parser (Func (Maybe Double) (V.Vector (t, Double)) (t, Double)) parseMax = "max" *> return (pairV $ liftFoldV max id) parseSum :: Parser (Func (Maybe Double) (V.Vector (t, Double)) (t, Double)) parseSum = "(+)" *> return (pairV $ liftFoldV (+) id) parseMul :: Parser (Func (Maybe Double) (V.Vector (t, Double)) (t, Double)) parseMul = "(*)" *> return (pairV $ liftFoldV (*) id) parseMin :: Parser (Func (Maybe Double) (V.Vector (t, Double)) (t, Double)) parseMin = "min" *> return (pairV $ liftFoldV min id) parseSqrt :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseSqrt = "sqrt" *> return (pairVV $ liftMapV sqrt) parseLog :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseLog = "log " *> (pairVV <$> liftMapV <$> logBase <$> double) parseAbs :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseAbs = "abs" *> return (pairVV $ liftMapV abs) parseCeil :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseCeil = "ceil" *> return (pairVV $ liftMapV (fromInt . ceiling)) parseFloor :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseFloor = "floor" *> return (pairVV $ liftMapV (fromInt . floor)) parseRound :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseRound = "round" *> return (pairVV $ liftMapV (fromInt . round)) parseTruncate :: Parser (Func () (V.Vector (t, Double)) (V.Vector (t, Double))) parseTruncate = "truncate" *> return (pairVV $ liftMapV (fromInt . truncate)) mkFilter :: (forall s. s -> Env m s) -> Parser (Func s (V.Vector (t, Double)) (V.Vector (t, Double))) -> Parser (Pipeline m (V.Vector (t, Double))) mkFilter mkEnv runParser = do f <- runParser return (Filter (mkEnv $ dump f) f) parseRFun :: (forall s. s -> Env m s) -> (forall s. Func s (V.Vector (t, Double)) (t, Double) -> Func s (V.Vector (t, Double)) (V.Vector (t, Double))) -> Parser (Pipeline m (V.Vector (t, Double))) parseRFun mkEnv mkFun = mkFilter mkEnv (mkFun <$> parseMax) <|> mkFilter mkEnv (mkFun <$> parseMin) <|> mkFilter mkEnv (mkFun <$> parseEwma) <|> mkFilter mkEnv (mkFun <$> parseMean) <|> mkFilter mkEnv (mkFun <$> parseCount) <|> mkFilter mkEnv (mkFun <$> parseHMean) parseMFun :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (t, Double))) parseMFun mkEnv = mkFilter mkEnv parseAbs <|> mkFilter mkEnv parseLog <|> mkFilter mkEnv parseCeil <|> mkFilter mkEnv parseSqrt <|> mkFilter mkEnv parseFloor <|> mkFilter mkEnv parseRound <|> mkFilter mkEnv parseTruncate parseMap :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (t, Double))) parseMap mkEnv = "map " *> (sectionFilter <|> parseRFun mkEnv iter <|> parseMFun mkEnv) where sectionFilter = do f <- parseMapSection return (Filter (mkEnv $ dump f) f) parseWindow :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (t, Double))) parseWindow mkEnv = "window " *> mkFilter mkEnv winFun where winFun = (window <$> decimal <*> (hardspace >> parseMax)) <|> window <$> decimal <*> (hardspace >> parseMin) <|> window <$> decimal <*> (hardspace >> parseMul) <|> window <$> decimal <*> (hardspace >> parseSum) <|> window <$> decimal <*> (hardspace >> parseEwma) <|> window <$> decimal <*> (hardspace >> parseMean) <|> window <$> decimal <*> (hardspace >> parseCount) <|> window <$> decimal <*> (hardspace >> parseHMean) parseTimeWindow :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (Time, Double))) parseTimeWindow mkEnv = "time-window " *> mkFilter mkEnv winFun where parseTimeCmp = do t <- fromSeconds <$> double return (\(a, _) (b, _) -> diff a b >= t) winFun = (windowBy <$> parseTimeCmp <*> (hardspace >> parseMax)) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseMin) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseMul) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseSum) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseEwma) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseMean) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseCount) <|> windowBy <$> parseTimeCmp <*> (hardspace >> parseHMean) parseFilter :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (t, Double))) parseFilter mkEnv = "filter " *> filterFun where filterFun = do f <- parseFilterSection return (Filter (mkEnv $ dump f) f) parseReduce :: (forall s. s -> Env m s) -> Parser (Pipeline m (V.Vector (t, Double))) parseReduce mkEnv = "reduce " *> (sectionFilter <|> parseRFun mkEnv (fmap V.singleton)) where sectionFilter = do f <- parseMul <|> parseSum return (Filter (mkEnv $ dump f) (fmap V.singleton f)) parseStmtKill :: Parser LQL parseStmtKill = "kill " *> doParse where doParse = do ma <- parseMaybeGUID dl <- hardspace >> parseLink mb <- hardspace >> parseMaybeGUID case (ma, mb, dl) of (Just a, Nothing, R l) -> return $ AlterStmt (S.singleton $ DelLink a l Nothing) (Just a, Just b, R l) -> return $ AlterStmt (S.singleton $ DelLink a l (Just b)) (Nothing, Just b, L l) -> return $ AlterStmt (S.singleton $ DelLink b l Nothing) (Just a, Just b, L l) -> return $ AlterStmt (S.singleton $ DelLink b l (Just a)) (Just a, Just b, B l) -> return $ AlterStmt (S.fromList [DelLink a l (Just b), DelLink b l (Just a)]) _ -> fail "invalid kill command" parsePipeline :: Parser [Pipeline Identity (V.Vector (Time, Double))] parsePipeline = do f <- parseMap mkEnv <|> parseFilter mkEnv <|> parseReduce mkEnv <|> parseWindow mkEnv <|> parseTimeWindow mkEnv fs <- option [] (" | " *> parsePipeline) return (f : fs) where mkEnv s = Env (return s) (const $ return ()) parseGrepAttr :: Parser Grep parseGrepAttr = do guid <- parseGUIDOrStar attr <- hardspace >> parseAttr next <- peekWord8 case next of Just 0x20 -> " []" *> return (GrepTAttr guid attr) _ -> return $ GrepKAttr guid attr parseGrepMake :: Parser Grep parseGrepMake = do next <- hardspace >> peekWord8 case next of Just 0x28 -> do kind <- Kind <$> qstring 512 0x22 0x22 node <- Node <$> qstring 512 0x22 0x22 return $ GrepMakeVertex kind node _ -> do guid <- parseGUIDOrStar label <- Label <$> qstring 512 0x22 0x22 guidB <- parseGUIDOrStar return $ GrepMakeLink guid label guidB parseGrepKill :: Parser Grep parseGrepKill = do guid <- parseGUIDOrStar next <- peekWord8 case next of Just 0x20 -> do re <- hardspace >> parseLabel guidB <- hardspace >> parseGUIDOrStar return $ GrepKillLink guid re guidB _ -> return $ GrepKillVertex guid parseGrepStmt :: Using -> Parser LQL parseGrepStmt u = "grep attr " *> (GrepStmt u <$> parseGrepAttr) <|> "grep make " *> (GrepStmt u <$> parseGrepMake) <|> "grep kill " *> (GrepStmt u <$> parseGrepKill) parseStmtAttr :: Parser LQL parseStmtAttr = "attr put " *> parsePutAttr <|> "attr last * " *> parseLastAttrAll <|> "attr last " *> parseLastAttrOn <|> "attr get " *> parseGetAttr <|> "attr del " *> parseDelAttr <|> "attr kls " *> parseListAttr KAttrListStmt <|> "attr tls " *> parseListAttr TAttrListStmt where parsePutAttr = do (g, k) <- parseGUIDAttr token <- hardspace >> peekWord8 case token of Just 0x5b -> parsePutTAttr g k _ -> parsePutKAttr g k parseLastAttrAll = do a <- parseAttr return (TAttrLastStmt Nothing a) parseLastAttrOn = do g <- parseGUID a <- hardspace >> parseAttr return (TAttrLastStmt (Just g) a) parsePutKAttr g k = do v <- parseValue w <- option [] (hardspace >> parseWithStmt) return (AlterStmt (S.singleton $ PutKAttr g k v w)) parsePutTAttr g k = do t <- parseTimePoint v <- hardspace >> parseValue w <- option [] (hardspace >> parseWithStmt) return (AlterStmt (S.singleton $ PutTAttr g k t v w)) parseGetAttr = do (g, a) <- parseGUIDAttr (TAttrGetStmt g a <$> (hardspace >> parseTimeRange) <*> (option [] (" | " *> parsePipeline))) <|> (KAttrGetStmt g a <$> (option [] (hardspace >> parsePipeline))) parseListAttr kind = do (g, Attr a) <- parseGUIDAttr return (kind g (Attr <$> glob a)) parseDelAttr = do (g, k) <- parseGUIDAttr AlterStmt <$> (S.singleton <$> ((DelTAttr g k <$> (hardspace >> parseTimeRange)) <|> (return (DelKAttr g k)))) parseStmt :: Using -> Parser LQL parseStmt u = do w <- peekWord8 case w of Just 0x6d -> parseStmtMake u Just 0x67 -> parseStmtGUID u <|> parseGrepStmt u Just 0x70 -> parseStmtPath Just 0x6e -> parseStmtName u Just 0x6b -> parseStmtKill Just 0x73 -> parseStmtStat Just 0x61 -> parseStmtAttr _ -> fail "bad statement" parseStmts :: Using -> Parser [LQL] parseStmts u = groupLQL <$> parseStmt u `sepBy1` newline parseUsing :: User -> Parser Using parseUsing user = do (asUser, tree) <- "using " *> parseTree return (Using user tree asUser) parseLQL :: User -> Parser [LQL] parseLQL u = parseUsing u `endBy` separator >>= parseLQL1 parseLQL1 :: Using -> Parser [LQL] parseLQL1 u = parseStmts u `endBy` semicolon loads :: (Source i) => Parser a -> i -> Either String a loads p = parseOnly p . bytestring chkloads :: (Source i) => Parser a -> [i] -> Either String a chkloads _ [] = Left "empty data" chkloads p (x:xs) = go (parse p (bytestring x)) xs where go r [] = eitherResult (feed r B.empty) go r (i:is) = go (feed r (bytestring i)) is instance Source B.ByteString where bytestring = id instance Source String where bytestring = U.fromString instance Source L.ByteString where bytestring = L.toStrict
locaweb/leela
src/warpdrive/src/Leela/Data/LQL/Read.hs
apache-2.0
20,363
0
31
5,543
7,727
3,925
3,802
477
8
{-# LANGUAGE FlexibleContexts #-} module CLSParser( module Parser , pCLSSts ) where import Control.Applicative hiding ((<|>)) import Types import Parser import Data.Int import Text.Parsec.Prim((<|>),(<?>)) import qualified Text.Parsec.Prim as P import qualified Text.Parsec.Combinator as P import qualified Text.Parsec.Char as P -- dir/file.cl`kernel[-DT=int]<4096x4096>(0:w,0) pCLSSts :: P [CLSSt] pCLSSts = P.sepBy1 pCLSSt pDelim where pDelim = P.option () (pSymbol ";" >> return ()) pCLSSt :: P CLSSt pCLSSt = pLetSt <|> pCallSt where -- let x = ..., y = ... pLetSt = withPos $ \p -> do pKeyword "let" bs <- pBindings return $! CLSStLet p bs pCallSt = withPos $ \p -> do c <- pCLSCall -- bs <- P.option [] $ pKeyword "where" >> pBindings return $! CLSStCall p c -- bs pBindings :: P [(String,Init)] pBindings = P.sepBy1 (P.try pBinding) (pSymbol ",") where pBinding = do nm <- pIdentifier pSymbol "=" arg <- pKernelArg return (nm,arg) pCLSCall :: P CLSCall pCLSCall = withPos $ \p -> do pfx <- P.option "" (P.try pFilePathPrefix) knm <- pIdentifier bopts <- P.option "" (P.try pBuildOptions) (glb,loc) <- pRange P.char '(' as <- P.sepBy pKernelArg (pSymbol ",") P.char ')' return $! CLSCall { clscPos = p , clscPath = pfx , clscKernel = knm , clscBuildOpts = bopts , clscGlobal = glb , clscLocal = loc , clscArgs = as } -- dir/file.cl` pFilePathPrefix :: P FilePath pFilePathPrefix = pComponent where pComponent :: P FilePath pComponent = do let pPathChar :: P Char pPathChar = P.alphaNum <|> P.oneOf ".,[]+-_@" p <- P.many1 pPathChar ps <- pEnd <|> pMorePath return $ p ++ ps pEnd :: P FilePath pEnd = P.char '`' >> return "" pMorePath :: P FilePath pMorePath = do P.char '/' <|> P.char '\\' p <- pComponent return $ "/" ++ p -- [-DT=int -I includes -cl-opt-disable] pBuildOptions :: P String pBuildOptions = body where body = do pSymbol "[" str <- P.many (P.try pBuildOptEsc <|> pBuildOptChar) pSymbol "]" return str pBuildOptEsc = P.char '\\' >> (P.char ']' <|> P.char '\\') pBuildOptChar = P.noneOf "]" -- rename NDRange to Size or Dim? -- -- dir/file.cl`kernel<4096x4096>(0:w,0) -- ^^^^^^^^^^^ pRange :: P (NDRange,NDRange) pRange = do pSymbol "<" glb <- pNDRange loc <- P.option NDRNull $ do pSymbol "," pNDRange pSymbol ">" return (glb,loc) -- 4096x4096 or 4096 or pNDRange :: P NDRange pNDRange = p1D where p1D = do x <- pInt P.try (pSymbol "x" >> p2D x) <|> return (NDR1D x) p2D x = do y <- pInt P.try (pSymbol "x" >> p3D x y) <|> return (NDR2D x y) p3D x y = do z <- pInt return (NDR3D x y z) -- <val> (':' bsize? battrs)? -- -- 0:w -- img("boo.bmp"):r -- 15 -- 0x123 -- {1,2,3,4} pKernelArg :: P Init pKernelArg = body where body = withPos $ \p ->do a <- pScaArg P.option a $ do P.char ':' msz <- P.optionMaybe pSizeExpr let pFinishBuf1 = do acc <- pBufAcc bt <- pBufTrans return $! InitBuf p a msz acc bt pFinishBuf2 = do acc <- pBufAcc bt <- pBufTrans return $! InitBuf p a msz acc bt P.try pFinishBuf1 <|> pFinishBuf2 pSizeExpr :: P SizeExpr pSizeExpr = do pSymbol "[" e <- pSizeExprBody pSymbol "]" return e pBufAcc = body <?> "buffer access: 'rw', 'r', or 'w'" where body = P.try ((P.char 'r' >> P.char 'w') >> return BufAccRW) <|> P.try ((P.char 'w' >> P.char 'r') >> return BufAccRW) <|> (P.char 'r' >> return BufAccR) <|> (P.char 'w' >> return BufAccW) pBufTrans = body <?> "transfer method: 'c', 'm', or 's'" where body = (P.char 'c' >> return BufTransC) <|> (P.char 'm' >> return BufTransM) <|> (P.char 's' >> return BufTransS) <|> return BufTransC -- [8*gx] pSizeExprBody :: P SizeExpr pSizeExprBody = pSizeExprAddSub where pSizeExprAddSub = withPos $ \p -> do e1 <- pSizeExprMulDivMod P.option e1 $ P.try $ do cons <- pAddOp e2 <- pSizeExprAddSub return $! cons p e1 e2 pAddOp = P.try (pSymbol "+" >> return SizeAdd) <|> (pSymbol "-" >> return SizeSub) pSizeExprMulDivMod = withPos $ \p -> do e1 <- pPrimary P.option e1 $ P.try $ do cons <- pMulOp e2 <- pSizeExprMulDivMod return $! cons p e1 e2 pMulOp = P.try (pSymbol "*" >> return SizeMul) <|> P.try (pSymbol "/" >> return SizeDiv) <|> (pSymbol "%" >> return SizeMod) pPrimary = pIntSize <|> pDimSize <|> pGrp <|> pSizeRef pDimSize = pGlb <|> pLoc where pGlb = withPos $ \p -> do P.char 'g' >> ((P.char 'x' >> return (SizeGlobalX p)) <|> (P.char 'y' >> return (SizeGlobalY p)) <|> (P.char 'z' >> return (SizeGlobalZ p))) pLoc = withPos $ \p -> do P.char 'l' >> ((P.char 'x' >> return (SizeLocalX p)) <|> (P.char 'y' >> return (SizeLocalY p)) <|> (P.char 'z' >> return (SizeLocalZ p))) pSizeRef = withPos $ \p -> do id <- pIdentifier return $! SizeRef p id pIntSize = withPos $ \p -> SizeLit p <$> pScaled pInt64 pGrp = do pSymbol "(" e <- pSizeExprAddSub pSymbol ")" return e pScaled :: Num a => P a -> P a pScaled par = do z <- par let pK = P.oneOf "kK" >> return (1000*z) pM = P.oneOf "mM" >> return (1000*1000*z) pB = P.oneOf "bB" >> return (1000*1000*1000*z) P.option z (pK <|> pM <|> pB) pScaArg :: P Init pScaArg = pRecord <|> P.try pFloating <|> pIntegral <|> pReference where pRecord = withPos $ \p -> do pSymbol "{" as <- P.sepBy1 pScaArg (pSymbol ",") pSymbol "}" return $ InitRec p as pIntegral = withPos $ \p -> do f <- P.option id (pSymbol "-" >> return negate) (InitInt p . f) <$> pScaled pInt64 pFloating = withPos $ \p -> do f <- P.option id (pSymbol "-" >> return negate) (InitFlt p . f) <$> pScaled pDouble pReference = withPos $ \p -> do InitRef p <$> pIdentifier
trbauer/clr
src/CLSParser.hs
bsd-2-clause
7,005
0
21
2,604
2,332
1,144
1,188
188
1
{-# LANGUAGE BangPatterns #-} module Main where import Criterion.Main import PregenKeys import qualified Crypto.Hash.SHA1 as SHA1 import Crypto.PubKey.RSA as RSA import Crypto.PubKey.RSA.PKCS15 as PKCS15 import Crypto.PubKey.RSA.OAEP as OAEP import Crypto.PubKey.RSA.PSS as PSS import Crypto.PubKey.HashDescr import Crypto.PubKey.ECC.ECDSA as ECDSA import Crypto.Random import Control.DeepSeq import qualified Data.ByteString as B right (Right r) = r right (Left _) = error "left received" instance NFData Signature where rnf (Signature r s) = rnf r `seq` rnf s main = do rng <- cprgCreate `fmap` createEntropyPool :: IO SystemRNG let !bs = B.replicate 32 0 !encryptedMsgPKCS = (right . fst . PKCS15.encrypt rng rsaPublickey) bs !encryptedMsgOAEP = (right . fst . OAEP.encrypt rng oaepParams rsaPublickey) bs !signedMsgPKCS = (right . PKCS15.sign Nothing hashDescrSHA1 rsaPrivatekey) bs !signedMsgPSS = (right . fst . PSS.sign rng Nothing pssParams rsaPrivatekey) bs privateKeySlow = rsaPrivatekey { RSA.private_p = 0, RSA.private_q = 0 } !blinder = fst $ generateBlinder rng (RSA.public_n rsaPublickey) oaepParams = OAEP.defaultOAEPParams SHA1.hash pssParams = PSS.defaultPSSParamsSHA1 ecdsaSignatureP = fst $ ECDSA.sign rng ecdsaPrivatekeyP SHA1.hash bs ecdsaSignatureB = fst $ ECDSA.sign rng ecdsaPrivatekeyB SHA1.hash bs defaultMain [ bgroup "RSA" [ bgroup "PKCS15" [ bench "encryption" $ nf (right . fst . PKCS15.encrypt rng rsaPublickey) bs , bgroup "decryption" [ bench "slow" $ nf (right . PKCS15.decrypt Nothing privateKeySlow) encryptedMsgPKCS , bench "fast" $ nf (right . PKCS15.decrypt Nothing rsaPrivatekey) encryptedMsgPKCS , bench "slow+blinding" $ nf (right . PKCS15.decrypt (Just blinder) privateKeySlow) encryptedMsgPKCS , bench "fast+blinding" $ nf (right . PKCS15.decrypt (Just blinder) rsaPrivatekey) encryptedMsgPKCS ] , bgroup "signing" [ bench "slow" $ nf (right . PKCS15.sign Nothing hashDescrSHA1 privateKeySlow) bs , bench "fast" $ nf (right . PKCS15.sign Nothing hashDescrSHA1 rsaPrivatekey) bs , bench "slow+blinding" $ nf (right . PKCS15.sign (Just blinder) hashDescrSHA1 privateKeySlow) bs , bench "fast+blinding" $ nf (right . PKCS15.sign (Just blinder) hashDescrSHA1 rsaPrivatekey) bs ] , bench "verify" $ nf (PKCS15.verify hashDescrSHA1 rsaPublickey bs) signedMsgPKCS ] , bgroup "OAEP" [ bench "encryption" $ nf (right . fst . OAEP.encrypt rng oaepParams rsaPublickey) bs , bgroup "decryption" [ bench "slow" $ nf (right . OAEP.decrypt Nothing oaepParams privateKeySlow) encryptedMsgOAEP , bench "fast" $ nf (right . OAEP.decrypt Nothing oaepParams rsaPrivatekey) encryptedMsgOAEP , bench "slow+blinding" $ nf (right . OAEP.decrypt (Just blinder) oaepParams privateKeySlow) encryptedMsgOAEP , bench "fast+blinding" $ nf (right . OAEP.decrypt (Just blinder) oaepParams rsaPrivatekey) encryptedMsgOAEP ] ] , bgroup "PSS" [ bgroup "signing" [ bench "slow" $ nf (right . fst . PSS.sign rng Nothing pssParams privateKeySlow) bs , bench "fast" $ nf (right . fst . PSS.sign rng Nothing pssParams rsaPrivatekey) bs , bench "slow+blinding" $ nf (right . fst . PSS.sign rng (Just blinder) pssParams privateKeySlow) bs , bench "fast+blinding" $ nf (right . fst . PSS.sign rng (Just blinder) pssParams rsaPrivatekey) bs ] , bench "verify" $ nf (PSS.verify pssParams rsaPublickey bs) signedMsgPSS ] ] , bgroup "ECDSA" [ bgroup "secp160r1" [ bench "sign" $ nf (fst . ECDSA.sign rng ecdsaPrivatekeyP SHA1.hash) bs , bench "verify" $ nf (ECDSA.verify SHA1.hash ecdsaPublickeyP ecdsaSignatureP) bs ] , bgroup "sect163k1" [ bench "sign" $ nf (fst . ECDSA.sign rng ecdsaPrivatekeyB SHA1.hash) bs , bench "verify" $ nf (ECDSA.verify SHA1.hash ecdsaPublickeyB ecdsaSignatureB) bs ] ] ]
vincenthz/hs-crypto-pubkey
Benchs/Bench.hs
bsd-2-clause
4,585
0
21
1,407
1,325
667
658
67
1
import Shelly import Prelude hiding (FilePath) import Text.Shakespeare.Text (lt) import qualified Data.Text.Lazy as LT import Data.Text.Lazy (Text) import Control.Monad (forM_) import System.Console.CmdArgs import Data.Maybe (fromMaybe) #if __GLASGOW_HASKELL__ < 704 import Data.Monoid (Monoid, mappend) infixr 5 <> (<>) :: Monoid m => m -> m -> m (<>) = mappend #else import Data.Monoid ((<>)) #endif data CmdOptions = CmdOptions { clean :: Bool, fast :: Bool, install_extra :: [String] } deriving (Show,Data,Typeable) {- determine what repo you're in dynamically by parsing the output of git remote -v. we need to handle a few platform-specific formats. if you adjust this line to support your platform, please note it here and ensure your fix handles all cases present. Linux - git version 1.7.7.4. using git@ or https, with or without .git extension. Mac OSX - git version 1.7.5.4. using git@ or https, with or without .git extension. origin [email protected]:yesodweb/scripts.git (fetch) origin https://github.com:yesodweb/scripts.git (fetch) origin [email protected]:yesodweb/scripts (fetch) origin https://github.com:yesodweb/scripts (fetch) -} determine_repo :: ShIO Text determine_repo = do mpkg <- get_env "YESODPKG" case mpkg of Just pkg -> return pkg Nothing -> do repo <- fmap LT.strip $ run "sh" ["-c", "git remote -v | sed '/^origin.*\\/\\([^ ]*\\) *(fetch)$/!d; s//\\1/; s/\\.git$//'"] when (LT.null repo) $ errorExit [lt| unable to determine yesod package to install via `git remote -v`. if you're not using git or otherwise need to manually define the yesod package name, set the YESODPKG environment variable: YESODPKG=hamlet ./script/install |] return repo test_pkg :: FilePath -> Bool -> Text -> ShIO () test_pkg cabal test pkg = do when test $ do echo $ "testing " <> pkg run_ cabal ["configure","-ftest","-ftest_export","--enable-tests","--disable-optimization","--disable-library-profiling"] run_ cabal ["build"] run_ cabal ["test"] if pkg == "./mongoDB-haskell" then echo "skipping check for mongoDB-haskell" -- stupid Cabal warning else run_ cabal ["check"] install_packages :: Text -- ^ repo -> FilePath -- ^ cabal -> ([Text] -> ShIO ()) -- ^ cabal install -> CmdOptions -> Bool -- ^ test -> [Text] -> ShIO () install_packages repo cabal cabal_install opts test pkgs = do let documentation = if fast opts then ["--disable-documentation"] else [] when (clean opts) $ do echo "cleaning packages" forM_ pkgs $ \pkg -> do chdir (fromText pkg) $ run_ cabal ["clean"] echo $ "installing package dependencies for " <> repo cabal_install $ "--force-reinstalls":"--only-dependencies":pkgs echo $ "installing packages: " <> LT.intercalate " " pkgs let i = cabal_install $ "--force-reinstalls":"-ftest_export": documentation ++ ["--ghc-options=-Wall"] ++ pkgs catchany_sh i $ \_ -> errorExit [lt| installation failure! Please try a clean build with: ./script/install --clean If you are peforming a clean install and haven't mucked with the code, Please report this error to the mail list at http://groups.google.com/group/yesodweb or on the issue tracker at http://github.com/yesodweb/#{repo}/issues |] echo "all packages installed. Doing a cabal-src-install and testing." msrc_install <- which "cabal-src-install" let src_install = case msrc_install of Just _ -> True Nothing -> False echo [lt|cabal-src-install #{show src_install}|] forM_ (filter (LT.isPrefixOf "./") pkgs) $ \pkg -> do chdir (fromText pkg) $ do when src_install $ catchany_sh (run_ "cabal-src-install" ["--src-only"]) $ \_ -> do echo "failed while creating an sdist for cabal-src" exit 1 test_pkg cabal test pkg {- deprecated, but perhaps still useful? install_individual_pkgs() { local pkg for pkg; do [[ -d "./$pkg" ]] || continue echo "Installing $pkg..." ( cd "./$pkg" $clean && $CABAL clean if ! $CABAL configure --ghc-options='-Wall'; then $CABAL install --only-dependencies $CABAL configure --ghc-options='-Wall' fi test_pkg $pkg if $fast; then $CABAL install --disable-documentation else $CABAL install fi which cabal-src-install && cabal-src-install --src-only ) done } -} main :: IO () main = shelly $ verbosely $ do -- allow an env var to override cabal <- fmap (fromText . fromMaybe "cabal") $ get_env "CABAL" let cabal_install = command_ cabal ["install"] repo <- determine_repo echo [lt|Installing for repo #{repo}...|] -- set the pkgs array to those appropriate for that repo pkgs <- fmap LT.lines $ readfile "sources.txt" when (null pkgs) $ errorExit [lt|no packages to install for repository #{repo}.|] opts <- liftIO $ cmdArgs $ CmdOptions {clean=False, fast=False, install_extra=def&=args} -- allow individual packages to be passed on the commandline let install = install_packages repo cabal cabal_install opts let extra = map LT.pack $ install_extra opts unless (null extra) $ cabal_install extra run_ "git" ["submodule", "init"] run_ "git" ["submodule", "update"] -- persistent is handled specially if repo == "persistent" then do echo "installing packages for the tests suites" cabal_install ["--force-reinstalls","HUnit","QuickCheck","file-location","hspec >= 1.3 && < 1.4"] -- install all persistent packages without tests install False pkgs echo "now running persistent tests" chdir "./persistent-test" $ do -- persistent-test is the only persistent package with tests test_pkg cabal True "persistent-test" else do echo "installing packages for the tests suites" cabal_install ["--force-reinstalls","HUnit","QuickCheck","hspec >= 1.3 && < 1.4", "blaze-html >= 0.5 && < 0.6"] echo [lt|installing #{repo} packages|] install True pkgs echo "" echo "Success: all packages installed and tested."
yesodweb/scripts
install.hs
bsd-2-clause
6,211
1
19
1,408
1,206
609
597
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ConstraintKinds #-} module Web.Auth0.Management( module Web.Auth0.Types, searchUsers, getUser, deleteUser, blockUser, createEmailUser, createPhoneUser, updateUserFrom, setEmail, setPhone, setAppMetadata, linkProfile ) where import Web.Auth0.Types import Web.Auth0.Common import Control.Lens (view) import Data.Aeson import Data.Maybe (maybeToList) import Data.Monoid ((<>)) import Network.HTTP.Nano import qualified Data.ByteString.Char8 as B type Auth0M m r e = (HttpM m r e, HasAuth0 r) -- |Search users based on a lucene query into user profile fields searchUsers :: (Auth0M m r e, FromJSON a, FromJSON b) => Maybe Query -> m [Profile' a b] searchUsers mQuery = do let q = fmap (B.unpack . renderQueryUrlEncoded) mQuery path = "api/v2/users?search_engine=v2&per_page=100" ++ maybe "" ("&q=" ++) q searchPages path 0 searchPages :: (Auth0M m r e, FromJSON a, FromJSON b) => String -> Int -> m [Profile' a b] searchPages bpath page = do let path = bpath ++ "&page=" ++ show page profs <- httpJSON =<< a0Req GET path NoRequestData case profs of [] -> return profs _ -> do profs' <- searchPages bpath (page + 1) return $ profs ++ profs' -- |Get a user by ID getUser :: (Auth0M m r e, FromJSON a, FromJSON b) => String -> m (Profile' a b) getUser uid = httpJSON =<< a0Req GET ("api/v2/users/"++uid) NoRequestData -- |Delete a user deleteUser :: Auth0M m r e => String -> m () deleteUser uid = http' =<< a0Req DELETE ("api/v2/users/"++uid) NoRequestData -- |Create an email user createEmailUser :: (Auth0M m r e, HasAuth0 r, FromJSON a, FromJSON b) => NewEmailUser -> m (Profile' a b) createEmailUser dta = httpJSON =<< a0Req POST "api/v2/users" (mkJSONData dta) -- |Create a phone user createPhoneUser :: (Auth0M m r e, HasAuth0 r, FromJSON a, FromJSON b) => NewPhoneUser -> m (Profile' a b) createPhoneUser dta = httpJSON =<< a0Req POST "api/v2/users" (mkJSONData dta) -- | Copy as much data fields from given 'Profile' as possible and update -- user specified by provided ID with it. updateUserFrom :: (Auth0M m r e, HasAuth0 r) => String -- ^ ID of user to update -> Profile -- ^ Where to get data from -> m () updateUserFrom uid profile = do clientID <- view auth0ClientID let val = object $ m "blocked" (view profileBlocked profile) <> m "email_verified" (view (profileData . profileEmailVerified) profile) <> m "email" (view (profileData . profileEmail) profile) <> m "phone_number" (view (profileData . profilePhoneNumber) profile) <> m "phone_verified" (view (profileData . profilePhoneNumberVerified) profile) <> m "user_metadata" (view profileUserMeta profile) <> m "app_metadata" (view profileAppMeta profile) <> m "username" (view (profileData . profileUsername) profile) <> m "client_id" (Just clientID) m k v = maybeToList ((k .=) <$> v) http' =<< a0Req PATCH ("api/v2/users/" ++ uid) (mkJSONData val) -- Set the blocked flag for a user blockUser :: (Auth0M m r e, HasAuth0 r, FromJSON a, FromJSON b) => String -> Bool -> m (Profile' a b) blockUser userId b= httpJSON =<< a0Req PATCH ("api/v2/users/"++userId) j where j = mkJSONData $ object ["blocked" .= b] -- |Set the email address of a profile setEmail :: (Auth0M m r e, FromJSON a, FromJSON b) => String -> String -> m (Profile' a b) setEmail uid email = do let dta = mkJSONData $ object ["email" .= email, "verify_email" .= False, "email_verified" .= True] httpJSON =<< a0Req PATCH ("api/v2/users/"++uid) dta -- |Set the phone number of a profile setPhone :: (Auth0M m r e, FromJSON a, FromJSON b) => String -> String -> m (Profile' a b) setPhone uid phone = do let dta = mkJSONData $ object ["phone_number" .= phone, "verify_phone_number" .= False, "phone_verified" .= True] httpJSON =<< a0Req PATCH ("api/v2/users/"++uid) dta setAppMetadata :: (Auth0M m r e, ToJSON d, FromJSON a, FromJSON b) => String -> d -> m (Profile' a b) setAppMetadata uid metaData = do let dta = mkJSONData $ object [ "app_metadata" .= toJSON metaData ] httpJSON =<< a0Req PATCH ("api/v2/users/"++uid) dta linkProfile :: Auth0M m r e => String -- ^ Provider (Twitter, Google, etc.) -> String -- ^ Profile id to link with -> String -- ^ User ID to link -> m () linkProfile provider rootID subID = do let val = object [ "provider" .= provider , "user_id" .= subID ] http' =<< a0Req POST ("api/v2/users/" ++ rootID ++ "/identities") (mkJSONData val)
collegevine/auth0
src/Web/Auth0/Management.hs
bsd-3-clause
4,771
0
22
1,137
1,542
794
748
89
2
-- | Support for command-line completion at the REPL and in the prover module Idris.Completion (replCompletion, proverCompletion) where import Idris.Core.Evaluate (ctxtAlist) import Idris.Core.TT import Idris.AbsSyntaxTree import Idris.Help import Idris.Colours import Idris.ParseHelpers(opChars) import Control.Monad.State.Strict import Data.List import Data.Maybe import Data.Char(toLower) import System.Console.Haskeline import System.Console.ANSI (Color) fst3 :: (a, b, c) -> a fst3 (a, b, c) = a commands = concatMap fst3 (help ++ extraHelp) -- | A specification of the arguments that tactics can take data TacticArg = NameTArg -- ^ Names: n1, n2, n3, ... n | ExprTArg | AltsTArg -- | A list of available tactics and their argument requirements tacticArgs :: [(String, Maybe TacticArg)] tacticArgs = [ ("intro", Nothing) -- FIXME syntax for intro (fresh name) , ("refine", Just ExprTArg) , ("mrefine", Just ExprTArg) , ("rewrite", Just ExprTArg) , ("let", Nothing) -- FIXME syntax for let , ("focus", Just ExprTArg) , ("exact", Just ExprTArg) , ("equiv", Just ExprTArg) , ("applyTactic", Just ExprTArg) , ("byReflection", Just ExprTArg) , ("reflect", Just ExprTArg) , ("fill", Just ExprTArg) , ("try", Just AltsTArg) , ("induction", Just NameTArg) , (":t", Just ExprTArg) , (":type", Just ExprTArg) , (":e", Just ExprTArg) , (":eval", Just ExprTArg) ] ++ map (\x -> (x, Nothing)) [ "intros", "compute", "trivial", "search", "solve", "attack", "unify", "state", "term", "undo", "qed", "abandon", ":q" ] tactics = map fst tacticArgs -- | Convert a name into a string usable for completion. Filters out names -- that users probably don't want to see. nameString :: Name -> Maybe String nameString (UN nm) | not (tnull nm) && (thead nm == '@' || thead nm == '#') = Nothing nameString (UN n) = Just (str n) nameString (NS n _) = nameString n nameString _ = Nothing -- FIXME: Respect module imports -- | Get the user-visible names from the current interpreter state. names :: Idris [String] names = do i <- get let ctxt = tt_ctxt i return . nub $ mapMaybe (nameString . fst) (ctxtAlist ctxt) ++ -- Explicitly add primitive types, as these are special-cased in the parser ["Int", "Integer", "Float", "Char", "String", "Type", "Ptr", "Bits8", "Bits16", "Bits32", "Bits64", "Bits8x16", "Bits16x8", "Bits32x4", "Bits64x2"] metavars :: Idris [String] metavars = do i <- get return . map (show . nsroot) $ map fst (filter (\(_, (_,_,t)) -> not t) (idris_metavars i)) \\ primDefs modules :: Idris [String] modules = do i <- get return $ map show $ imported i completeWith :: [String] -> String -> [Completion] completeWith ns n = if uniqueExists then [simpleCompletion n] else map simpleCompletion prefixMatches where prefixMatches = filter (isPrefixOf n) ns uniqueExists = [n] == prefixMatches completeName :: [String] -> String -> Idris [Completion] completeName extra n = do ns <- names return $ completeWith (extra ++ ns) n completeExpr :: [String] -> CompletionFunc Idris completeExpr extra = completeWord Nothing (" \t(){}:" ++ opChars) (completeName extra) completeMetaVar :: CompletionFunc Idris completeMetaVar = completeWord Nothing (" \t(){}:" ++ opChars) completeM where completeM m = do mvs <- metavars return $ completeWith mvs m completeOption :: CompletionFunc Idris completeOption = completeWord Nothing " \t" completeOpt where completeOpt = return . completeWith [ "errorcontext" , "showimplicits" , "originalerrors" , "autosolve" , "nobanner" ] completeConsoleWidth :: CompletionFunc Idris completeConsoleWidth = completeWord Nothing " \t" completeW where completeW = return . completeWith ["auto", "infinite", "80", "120"] isWhitespace :: Char -> Bool isWhitespace = (flip elem) " \t\n" lookupInHelp :: String -> Maybe CmdArg lookupInHelp cmd = lookupInHelp' cmd help where lookupInHelp' cmd ((cmds, arg, _):xs) | elem cmd cmds = Just arg | otherwise = lookupInHelp' cmd xs lookupInHelp' cmd [] = Nothing completeColour :: CompletionFunc Idris completeColour (prev, next) = case words (reverse prev) of [c] | isCmd c -> do cmpls <- completeColourOpt next return (reverse $ c ++ " ", cmpls) [c, o] | o `elem` opts -> let correct = (c ++ " " ++ o) in return (reverse correct, [simpleCompletion ""]) | o `elem` colourTypes -> completeColourFormat (prev, next) | otherwise -> let cmpls = completeWith (opts ++ colourTypes) o in let sofar = (c ++ " ") in return (reverse sofar, cmpls) cmd@(c:o:_) | isCmd c && o `elem` colourTypes -> completeColourFormat (prev, next) _ -> noCompletion (prev, next) where completeColourOpt :: String -> Idris [Completion] completeColourOpt = return . completeWith (opts ++ colourTypes) opts = ["on", "off"] colourTypes = map (map toLower . reverse . drop 6 . reverse . show) $ enumFromTo (minBound::ColourType) maxBound isCmd ":colour" = True isCmd ":color" = True isCmd _ = False colours = map (map toLower . show) $ enumFromTo (minBound::Color) maxBound formats = ["vivid", "dull", "underline", "nounderline", "bold", "nobold", "italic", "noitalic"] completeColourFormat = let getCmpl = completeWith (colours ++ formats) in completeWord Nothing " \t" (return . getCmpl) -- | Get the completion function for a particular command completeCmd :: String -> CompletionFunc Idris completeCmd cmd (prev, next) = fromMaybe completeCmdName $ fmap completeArg $ lookupInHelp cmd where completeArg FileArg = completeFilename (prev, next) completeArg NameArg = completeExpr [] (prev, next) -- FIXME only complete one name completeArg OptionArg = completeOption (prev, next) completeArg ModuleArg = noCompletion (prev, next) -- FIXME do later completeArg NamespaceArg = noCompletion (prev, next) -- FIXME do later completeArg ExprArg = completeExpr [] (prev, next) completeArg MetaVarArg = completeMetaVar (prev, next) -- FIXME only complete one name completeArg ColourArg = completeColour (prev, next) completeArg NoArg = noCompletion (prev, next) completeArg ConsoleWidthArg = completeConsoleWidth (prev, next) completeCmdName = return $ ("", completeWith commands cmd) -- | Complete REPL commands and defined identifiers replCompletion :: CompletionFunc Idris replCompletion (prev, next) = case firstWord of ':':cmdName -> completeCmd (':':cmdName) (prev, next) _ -> completeExpr [] (prev, next) where firstWord = fst $ break isWhitespace $ dropWhile isWhitespace $ reverse prev completeTactic :: [String] -> String -> CompletionFunc Idris completeTactic as tac (prev, next) = fromMaybe completeTacName . fmap completeArg $ lookup tac tacticArgs where completeTacName = return $ ("", completeWith tactics tac) completeArg Nothing = noCompletion (prev, next) completeArg (Just NameTArg) = noCompletion (prev, next) -- this is for binding new names! completeArg (Just ExprTArg) = completeExpr as (prev, next) completeArg (Just AltsTArg) = noCompletion (prev, next) -- TODO -- | Complete tactics and their arguments proverCompletion :: [String] -- ^ The names of current local assumptions -> CompletionFunc Idris proverCompletion assumptions (prev, next) = completeTactic assumptions firstWord (prev, next) where firstWord = fst $ break isWhitespace $ dropWhile isWhitespace $ reverse prev
DanielWaterworth/Idris-dev
src/Idris/Completion.hs
bsd-3-clause
8,932
0
16
2,871
2,399
1,288
1,111
149
10
module Spire.Canonical.InitialEnv ( _Branches , initEnv ) where import Control.Applicative import Data.Monoid (mempty) import Unbound.LocallyNameless hiding ( Spine ) import Spire.Canonical.Types import Spire.Surface.PrettyPrinter import qualified Spire.Canonical.Builtins as B ---------------------------------------------------------------------- initEnv :: Env initEnv = [ def B.tt VTT VUnit , def B.true VTrue VBool , def B.false VFalse VBool , def B._Unit VUnit VType , def B._Bool VBool VType , def B._String VString VType , def B._Enum VEnum VType , def B._Tel VTel VType , def B._Type VType VType , def B.nil VNil VEnum , def B.cons (vEta2 VCons "x" "xs") (vArr VString (VEnum `vArr` VEnum)) , def B._Emp VEmp VTel , def B._Ext _Ext __Ext , def B._Desc (vEta VDesc "I" ) (VType `vArr` VType) , def B._Tag (vEta VTag "E") (VEnum `vArr` VType) , def B.elimUnit elimUnit _ElimUnit , def B.elimBool elimBool _ElimBool , def B.elimPair elimPair _ElimPair , def B.elimEq elimEq _ElimEq , def B.elimEnum elimEnum _ElimEnum , def B.elimTel elimTel _ElimTel , def B.elimDesc elimDesc _ElimDesc , def B._Branches _Branches __Branches , def B._case _case _Case , def B._Func _Func __Func , def B._Hyps _Hyps __Hyps , def B.prove prove _Prove , def B.ind ind _Ind , def B._Fix _Fix __Fix ] __Ext :: Type __Ext = vPi "A" VType $ vPi "B" (var "A" `vArr` VTel) $ VTel _Ext :: Value _Ext = vLam "A" $ vLam "B" $ VExt (var "A") (fbind "B" "a") _ElimUnit :: Type _ElimUnit = vPi "P" (VUnit `vArr` VType) $ vArr (vApp "P" VTT) $ vPi "u" VUnit $ vApp "P" (var "u") elimUnit :: Value elimUnit = vLam "P" $ vLam "ptt" $ vLam "u" $ vElimUnit "P" "ptt" "u" _ElimPair :: Type _ElimPair = vPi "A" VType $ vPi "B" (var "A" `vArr` VType) $ vPi "P" (VSg (var "A") (fbind "B" "a") `vArr` VType) $ vArr (vPi "a" (var "A") $ vPi "b" ("B" `vApp` var "a") $ ("P" `vApp` VPair (var "a") (var "b"))) $ vPi "ab" (VSg (var "A") (fbind "B" "a")) $ vApp "P" (var "ab") elimPair :: Value elimPair = vLam "A" $ vLam "B" $ vLam "P" $ vLam "ppair" $ vLam "ab" $ vElimPair "A" "B" "P" "ppair" "ab" _ElimBool :: Type _ElimBool = vPi "P" (VBool `vArr` VType) $ vArr (vApp "P" VTrue) $ vArr (vApp "P" VFalse) $ vPi "b" VBool $ vApp "P" (var "b") elimBool :: Value elimBool = vLam "P" $ vLam "pt" $ vLam "pf" $ vLam "b" $ vElimBool "P" "pt" "pf" "b" _ElimEq :: Type _ElimEq = vPi "A" VType $ vPi "x" (var "A") $ vPi "P" (vPi "y" (var "A") $ VEq (var "A") (var "x") (var "A") (var "y") `vArr` VType) $ vArr (vApp2 "P" (var "x") VRefl) $ vPi "y" (var "A") $ vPi "q" (VEq (var "A") (var "x") (var "A") (var "y")) $ vApp2 "P" (var "y") (var "q") elimEq :: Value elimEq = vLam "A" $ vLam "x" $ vLam "P" $ vLam "prefl" $ vLam "y" $ vLam "q" $ vElimEq "A" "x" "P" "prefl" "y" "q" _ElimEnum :: Type _ElimEnum = vPi "P" (VEnum `vArr` VType) $ vArr (vApp "P" VNil) $ vArr (vPi "x" VString $ vPi "xs" VEnum $ vArr (vApp "P" (var "xs")) $ vApp "P" (VCons (var "x") (var "xs"))) $ vPi "E" VEnum $ vApp "P" (var "E") elimEnum :: Value elimEnum = vLam "P" $ vLam "pnil" $ vLam "pcons" $ vLam "xs" $ vElimEnum "P" "pnil" "pcons" "xs" _ElimTel :: Type _ElimTel = vPi "P" (VTel `vArr` VType) $ vPi "pemp" ("P" `vApp` VEmp) $ vPi "pext" ( vPi "A" VType $ vPi "B" (var "A" `vArr` VTel) $ vArr (vPi "a" (var "A") ("P" `vApp` (vApp "B" (var "a")))) $ vApp "P" (VExt (var "A") (fbind "B" "a")) ) $ vPi "T" VTel $ vApp "P" (var "T") elimTel :: Value elimTel = vLam "P" $ vLam "pemp" $ vLam "pext" $ vLam "T" $ vElimTel "P" "pemp" "pext" "T" _ElimDesc :: Type _ElimDesc = vPi "I" VType $ vPi "P" (VDesc (var "I") `vArr` VType) $ vPi "pend" (vPi "i" (var "I") $ vApp "P" (VEnd (var "i"))) $ vPi "prec" (vPi "i" (var "I") $ vPi "D" (VDesc (var "I")) $ vApp "P" (var "D") `vArr` vApp "P" (VRec (var "i") (var "D"))) $ vPi "parg" ( vPi "A" VType $ vPi "B" (var "A" `vArr` VDesc (var "I")) $ vArr (vPi "a" (var "A") ("P" `vApp` (vApp "B" (var "a")))) $ vApp "P" (VArg (var "A") (fbind "B" "a")) ) $ vPi "D" (VDesc (var "I")) $ vApp "P" (var "D") elimDesc :: Value elimDesc = vLam "I" $ vLam "P" $ vLam "pend" $ vLam "prec" $ vLam "parg" $ vLam "D" $ vElimDesc "I" "P" "pend" "prec" "parg" "D" __Branches :: Type __Branches = vPi "E" VEnum $ vPi "P" (VTag (var "E") `vArr` VType) $ VType _Branches :: Value _Branches = vLam "E" $ vLam "P" $ vBranches "E" "P" _Case :: Type _Case = vPi "E" VEnum $ vPi "P" (VTag (var "E") `vArr` VType) $ vPi "cs" (vBranches "E" "P") $ vPi "t" (VTag (var "E")) $ vApp "P" (var "t") _case :: Value _case = vLam "E" $ vLam "P" $ vLam "cs" $ vLam "t" $ vCase "E" "P" "cs" "t" __Func :: Type __Func = vPi "I" VType $ vPi "D" (VDesc (var "I")) $ vPi "X" (var "I" `vArr` VType) $ vPi "i" (var "I") $ VType _Func :: Value _Func = vLam "I" $ vLam "D" $ vLam "X" $ vLam "i" $ vFunc "I" "D" "X" "i" __Hyps :: Type __Hyps = vPi "I" VType $ vPi "D" (VDesc (var "I")) $ vPi "X" (var "I" `vArr` VType) $ vPi "M" (vPi "i" (var "I") $ ("X" `vApp` var "i") `vArr` VType) $ vPi "i" (var "I") $ vPi "xs" (vFunc "I" "D" "X" "i") $ VType _Hyps :: Value _Hyps = vLam "I" $ vLam "D" $ vLam "X" $ vLam "M" $ vLam "i" $ vLam "xs" $ vHyps "I" "D" "X" "M" "i" "xs" _Prove :: Type _Prove = vPi "I" VType $ vPi "D" (VDesc (var "I")) $ vPi "X" (var "I" `vArr` VType) $ vPi "M" (vPi "i" (var "I") $ ("X" `vApp` var "i") `vArr` VType) $ vPi "m" (vPi "i" (var "I") $ vPi "x" ("X" `vApp` var "i") $ vApp2 "M" (var "i") (var "x")) $ vPi "i" (var "I") $ vPi "xs" (vFunc "I" "D" "X" "i") $ vHyps "I" "D" "X" "M" "i" "xs" prove :: Value prove = vLam "I" $ vLam "D" $ vLam "X" $ vLam "M" $ vLam "m" $ vLam "i" $ vLam "xs" $ vProve "I" "D" "X" "M" "m" "i" "xs" _Ind :: Type _Ind = vPi "l" VString $ vPi "P" VType $ vPi "I" VType $ vPi "D" (VDesc (var "I")) $ vPi "p" (var "P") $ vPi "M" (vPi "i" (var "I") $ (vFix "l" "P" "I" "D" "p" "i") `vArr` VType) $ vPi "m" (vPi "i" (var "I") $ vPi "xs" (rFunc (var "I") (s2n "D") (sbind "j" (vFix "l" "P" "I" "D" "p" "j")) (var "i")) $ vPi "ihs" (rHyps (var "I") (s2n "D") (sbind "j" (vFix "l" "P" "I" "D" "p" "j")) (fbind2 "M" "j" "x") (var "i") (var "xs")) $ vApp2 "M" (var "i") (VInit (var "xs"))) $ vPi "i" (var "I") $ vPi "x" (vFix "l" "P" "I" "D" "p" "i") $ vApp2 "M" (var "i") (var "x") ind :: Value ind = vLam "l" $ vLam "P" $ vLam "I" $ vLam "D" $ vLam "p" $ vLam "M" $ vLam "m" $ vLam "i" $ vLam "x" $ vInd "l" "P" "I" "D" "p" "M" "m" "i" "x" __Fix :: Type __Fix = vPi "l" VString $ vPi "P" VType $ vPi "I" VType $ vPi "D" (VDesc (var "I")) $ vPi "p" (var "P") $ vPi "i" (var "I") $ VType _Fix :: Value _Fix = vLam "l" $ vLam "P" $ vLam "I" $ vLam "D" $ vLam "p" $ vLam "i" $ vFix "l" "P" "I" "D" "p" "i" def :: String -> Value -> Type -> VDef def = VDef . s2n ----------------------------------------------------------------------
spire/spire
src/Spire/Canonical/InitialEnv.hs
bsd-3-clause
7,077
0
20
1,738
3,612
1,790
1,822
229
1
-- | Typing Module module SSTG.Core.Language.Typing ( module SSTG.Core.Language.Typing ) where import SSTG.Core.Language.Syntax -- | Typeable typeclass. class Typeable a where typeOf :: a -> Type -- | `Var` instance of `Typeable`. instance Typeable Var where typeOf (Var _ ty) = ty -- | `Lit` instance of `Typeable`. instance Typeable Lit where typeOf (MachChar _ ty) = ty typeOf (MachStr _ ty) = ty typeOf (MachInt _ ty) = ty typeOf (MachWord _ ty) = ty typeOf (MachFloat _ ty) = ty typeOf (MachDouble _ ty) = ty typeOf (MachLabel _ _ ty) = ty typeOf (MachNullAddr ty) = ty typeOf (BlankAddr) = Bottom typeOf (AddrLit _) = Bottom typeOf (LitEval pfun args) = foldl AppTy (typeOf pfun) (map typeOf args) -- | `Atom` instance of `Typeable`. instance Typeable Atom where typeOf (LitAtom lit) = typeOf lit typeOf (VarAtom var) = typeOf var -- | `PrimFun` instance of `Typeable`. instance Typeable PrimFun where typeOf (PrimFun _ ty) = ty -- | `DataCon` instance of `Typeable`. instance Typeable DataCon where typeOf (DataCon _ ty tys) = foldr FunTy ty tys -- | `Alt` instance of `Typeable`. instance Typeable Alt where typeOf (Alt _ expr) = typeOf expr -- | `Expr` instance of `Typeable`. instance Typeable Expr where typeOf (Atom atom) = typeOf atom typeOf (PrimApp pfun args) = foldl AppTy (typeOf pfun) (map typeOf args) typeOf (ConApp dcon args) = foldl AppTy (typeOf dcon) (map typeOf args) typeOf (FunApp fun args) = foldl AppTy (typeOf fun) (map typeOf args) typeOf (Let _ expr) = typeOf expr typeOf (Case _ _ (alt:_)) = typeOf alt typeOf _ = Bottom
AntonXue/SSTG
src/SSTG/Core/Language/Typing.hs
bsd-3-clause
1,672
0
10
385
591
301
290
36
0
module Main (main) where import Paths_eclogues_mock (getDataFileName) import Language.Haskell.HLint (hlint) import System.Exit (exitFailure, exitSuccess) paths :: [String] paths = [ "app" , "src" , "test" ] arguments :: IO [String] arguments = go <$> getDataFileName "HLint.hints" where go p = ("--hint=" ++ p) : paths main :: IO () main = do hints <- hlint =<< arguments if null hints then exitSuccess else exitFailure
futufeld/eclogues
eclogues-mock/test/HLint.hs
bsd-3-clause
454
0
9
98
147
83
64
16
2
{- -*- coding:utf-8 -*- -} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TupleSections #-} module Main where import Lib import Dfm import Checks.DfmLocalizationProblemsInSql as E import System.IO (stdout, stderr, hPutStrLn) import System.Console.CmdArgs import System.Environment (getProgName) import Control.Monad (unless, forM_) import Text.Regex.TDFA import Data.Either (isLeft, isRight, rights) import System.Directory (doesFileExist, doesDirectoryExist) import Paths_delphi_lint (version) import Data.Version (showVersion) import Data.String (IsString, fromString) programVersion :: String programVersion = showVersion version -- | Флаги коммандной строки data Flags = Flags { source_path_1 :: FilePath, sources_paths :: [FilePath], ignore_path_pattern :: [String], ignore_binary_dfm :: Bool , ignore_unparseable_sql :: Bool , include_sql_in_report :: Bool , ignore_numeric_fields_ambiguous_size :: Bool } deriving (Data, Typeable) opts' :: IO Flags opts' = getProgName >>= \programName -> return $ Flags { source_path_1 = def &= argPos 0 &= typ "DIR_OR_FILE_1", sources_paths = def &= args &= typ "DIR_OR_FILE_2 [DIR_OR_FILE_3 [...]]", ignore_path_pattern = def &= typ "REGEX" &= help "Regex pattern to ignore. It is applied to each file name with full path", ignore_binary_dfm = def &= help "DFM's in binary format are not checked by this utility but a warning is displayed for every DFM in binary format. This option suppress these warnings", ignore_unparseable_sql = def &= help "Do not report SQL's that we cannot parse to find potential problems", include_sql_in_report = def &= help "Include original SQL in report", ignore_numeric_fields_ambiguous_size = def &= help "Do not check ambiguous size of numeric fields" } &= program programName &= summary ("Lint for Delphi version " ++ programVersion) &= details [ "Checks Delphi sources for common errors." ,"Usage examples:" ,programName ++ " --ignore-path-pattern=\"[\\]enu[\\]\" --ignore-path-pattern=\"[\\]dc[\\]\" .\\Source" ,programName ++ " Unit1.dfm Unit2.dfm" ] instance IsString err => MonadFail (Either err) where fail = Left . fromString fileNameCompOpts = defaultCompOpt { -- Case insensitive because Delphi is for windows and there file -- names are case insensitive. caseSensitive = False } fileNameExecOpts = defaultExecOpt -- | Compile regex for matching file name makeFileNameRegexM = makeRegexOptsM fileNameCompOpts fileNameExecOpts checkOptions :: Flags -> IO () checkOptions opts = do let badRegexps = filter (isLeft . snd) $ map (\x -> (x, makeFileNameRegexM x)) $ ignore_path_pattern opts unless (null badRegexps) $ let firstElem = head badRegexps errMsg = case snd firstElem of Left x -> x in error $ "Bad regex \"" ++ fst firstElem ++ "\" in --ignore-path-pattern: " ++ errMsg forM_ (source_path_1 opts : sources_paths opts) $ \path -> do b <- (||) <$> doesFileExist path <*> doesDirectoryExist path unless b $ hPutStrLn stderr $ "File or directory \"" ++ path ++ "\" does not exists" main :: IO () main = do opts'' <- cmdArgs =<< opts' checkOptions opts'' let ignorePath :: FilePath -> Bool ignorePath path = any (`matchTest` path) compiledRegexps where compiledRegexps = rights $ map makeFileNameRegexM $ ignore_path_pattern opts'' files <- fmap concat $ mapM (findFiles filterDfmFiles) $ source_path_1 opts'' : sources_paths opts'' mapM (\f -> (f,) <$> checkDfmFile opts'' f) (filter (not . ignorePath) files) >>= printResults return () -- | Проверяет указанный dfm-файл и возвращает текст со описанием ошибок, которые -- необходимо поправить. Если при обработке файла произошла ошибка, то её текст будет checkDfmFile :: Flags -> FilePath -> IO (Either String [String]) checkDfmFile opts file = do parsedDfm <- parseDfmFile file (ParseDfmOpts{ignoreBinaryDfm = ignore_binary_dfm opts}) --print parsedDfm return $ case parsedDfm of Left e -> Left $ show e Right (Just o) -> Right $ E.checkDfm (DfmCheckConfig { ignoreUnparseableSql = ignore_unparseable_sql opts , includeSqlInReport = include_sql_in_report opts , ignoreNumericFieldsAmbiguousSize = ignore_numeric_fields_ambiguous_size opts }) o _ -> Right [] printResults :: [(FilePath, Either String [String])] -> IO () printResults = mapM_ printOneResult where printOneResult (fileName, Left msg) = hPutStrLn stderr $ "Error while parsing file \"" ++ fileName ++ "\": " <> msg printOneResult (_, Right []) = return () printOneResult (fileName, Right problems) = putStrLn ("\n\nDFM-file \"" ++ fileName ++ "\" has issues:") >> mapM_ putStrLn problems
ki11men0w/delphi-lint
app/Main.hs
bsd-3-clause
5,822
0
16
1,812
1,158
609
549
110
3
module LeapYearKata.Day1Spec (spec) where import Test.Hspec import LeapYearKata.Day1 (isLeapYear) spec :: Spec spec = do it "is a leap year when divisible by 4" $ do isLeapYear 2016 it "is not a leap year when not divisible by 4" $ do not $ isLeapYear 2015 it "is not a leap year when divisible by 100" $ do not $ isLeapYear 1900 it "is a leap year when divisible by 4 and 100" $ do isLeapYear 2000
Alex-Diez/haskell-tdd-kata
old-katas/test/LeapYearKata/Day1Spec.hs
bsd-3-clause
500
0
11
177
108
50
58
13
1
-- Copyright : Daan Leijen (c) 1999, [email protected] -- HWT Group (c) 2003, [email protected] -- License : BSD-style module Opaleye.Internal.HaskellDB.Sql where ----------------------------------------------------------- -- * SQL data type ----------------------------------------------------------- type SqlTable = String newtype SqlColumn = SqlColumn String deriving Show -- | A valid SQL name for a parameter. type SqlName = String data SqlOrderNulls = SqlNullsFirst | SqlNullsLast deriving Show data SqlOrderDirection = SqlAsc | SqlDesc deriving Show data SqlOrder = SqlOrder { sqlOrderDirection :: SqlOrderDirection , sqlOrderNulls :: SqlOrderNulls } deriving Show -- | Expressions in SQL statements. data SqlExpr = ColumnSqlExpr SqlColumn | BinSqlExpr String SqlExpr SqlExpr | PrefixSqlExpr String SqlExpr | PostfixSqlExpr String SqlExpr | FunSqlExpr String [SqlExpr] | AggrFunSqlExpr String [SqlExpr] [(SqlExpr, SqlOrder)] -- ^ Aggregate functions separate from normal functions. | ConstSqlExpr String | CaseSqlExpr [(SqlExpr,SqlExpr)] SqlExpr | ListSqlExpr [SqlExpr] | ParamSqlExpr (Maybe SqlName) SqlExpr | PlaceHolderSqlExpr | ParensSqlExpr SqlExpr | CastSqlExpr String SqlExpr deriving Show -- | Data type for SQL UPDATE statements. data SqlUpdate = SqlUpdate SqlTable [(SqlColumn,SqlExpr)] [SqlExpr] -- | Data type for SQL DELETE statements. data SqlDelete = SqlDelete SqlTable [SqlExpr] --- | Data type for SQL INSERT statements. data SqlInsert = SqlInsert SqlTable [SqlColumn] [SqlExpr]
silkapp/haskell-opaleye
src/Opaleye/Internal/HaskellDB/Sql.hs
bsd-3-clause
1,806
0
8
482
274
171
103
28
0
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 -} {-# LANGUAGE CPP #-} module BuildTyCl ( buildDataCon, mkDataConUnivTyVarBinders, buildPatSyn, TcMethInfo, buildClass, mkNewTyConRhs, mkDataTyConRhs, newImplicitBinder, newTyConRepName ) where #include "HsVersions.h" import IfaceEnv import FamInstEnv( FamInstEnvs, mkNewTypeCoAxiom ) import TysWiredIn( isCTupleTyConName ) import TysPrim ( voidPrimTy ) import DataCon import PatSyn import Var import VarSet import BasicTypes import Name import MkId import Class import TyCon import Type import Id import TcType import SrcLoc( SrcSpan, noSrcSpan ) import DynFlags import TcRnMonad import UniqSupply import Util import Outputable mkDataTyConRhs :: [DataCon] -> AlgTyConRhs mkDataTyConRhs cons = DataTyCon { data_cons = cons, is_enum = not (null cons) && all is_enum_con cons -- See Note [Enumeration types] in TyCon } where is_enum_con con | (_univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res) <- dataConFullSig con = null ex_tvs && null eq_spec && null theta && null arg_tys mkNewTyConRhs :: Name -> TyCon -> DataCon -> TcRnIf m n AlgTyConRhs -- ^ Monadic because it makes a Name for the coercion TyCon -- We pass the Name of the parent TyCon, as well as the TyCon itself, -- because the latter is part of a knot, whereas the former is not. mkNewTyConRhs tycon_name tycon con = do { co_tycon_name <- newImplicitBinder tycon_name mkNewTyCoOcc ; let nt_ax = mkNewTypeCoAxiom co_tycon_name tycon etad_tvs etad_roles etad_rhs ; traceIf (text "mkNewTyConRhs" <+> ppr nt_ax) ; return (NewTyCon { data_con = con, nt_rhs = rhs_ty, nt_etad_rhs = (etad_tvs, etad_rhs), nt_co = nt_ax } ) } -- Coreview looks through newtypes with a Nothing -- for nt_co, or uses explicit coercions otherwise where tvs = tyConTyVars tycon roles = tyConRoles tycon inst_con_ty = piResultTys (dataConUserType con) (mkTyVarTys tvs) rhs_ty = ASSERT( isFunTy inst_con_ty ) funArgTy inst_con_ty -- Instantiate the data con with the -- type variables from the tycon -- NB: a newtype DataCon has a type that must look like -- forall tvs. <arg-ty> -> T tvs -- Note that we *can't* use dataConInstOrigArgTys here because -- the newtype arising from class Foo a => Bar a where {} -- has a single argument (Foo a) that is a *type class*, so -- dataConInstOrigArgTys returns []. etad_tvs :: [TyVar] -- Matched lazily, so that mkNewTypeCo can etad_roles :: [Role] -- return a TyCon without pulling on rhs_ty etad_rhs :: Type -- See Note [Tricky iface loop] in LoadIface (etad_tvs, etad_roles, etad_rhs) = eta_reduce (reverse tvs) (reverse roles) rhs_ty eta_reduce :: [TyVar] -- Reversed -> [Role] -- also reversed -> Type -- Rhs type -> ([TyVar], [Role], Type) -- Eta-reduced version -- (tyvars in normal order) eta_reduce (a:as) (_:rs) ty | Just (fun, arg) <- splitAppTy_maybe ty, Just tv <- getTyVar_maybe arg, tv == a, not (a `elemVarSet` tyCoVarsOfType fun) = eta_reduce as rs fun eta_reduce tvs rs ty = (reverse tvs, reverse rs, ty) ------------------------------------------------------ buildDataCon :: FamInstEnvs -> Name -> Bool -- Declared infix -> TyConRepName -> [HsSrcBang] -> Maybe [HsImplBang] -- See Note [Bangs on imported data constructors] in MkId -> [FieldLabel] -- Field labels -> [TyVarBinder] -- Universals -> [TyVarBinder] -- Existentials -> [EqSpec] -- Equality spec -> ThetaType -- Does not include the "stupid theta" -- or the GADT equalities -> [Type] -> Type -- Argument and result types -> TyCon -- Rep tycon -> TcRnIf m n DataCon -- A wrapper for DataCon.mkDataCon that -- a) makes the worker Id -- b) makes the wrapper Id if necessary, including -- allocating its unique (hence monadic) -- c) Sorts out the TyVarBinders. See mkDataConUnivTyBinders buildDataCon fam_envs src_name declared_infix prom_info src_bangs impl_bangs field_lbls univ_tvs ex_tvs eq_spec ctxt arg_tys res_ty rep_tycon = do { wrap_name <- newImplicitBinder src_name mkDataConWrapperOcc ; work_name <- newImplicitBinder src_name mkDataConWorkerOcc -- This last one takes the name of the data constructor in the source -- code, which (for Haskell source anyway) will be in the DataName name -- space, and puts it into the VarName name space ; traceIf (text "buildDataCon 1" <+> ppr src_name) ; us <- newUniqueSupply ; dflags <- getDynFlags ; let stupid_ctxt = mkDataConStupidTheta rep_tycon arg_tys univ_tvs data_con = mkDataCon src_name declared_infix prom_info src_bangs field_lbls univ_tvs ex_tvs eq_spec ctxt arg_tys res_ty NoRRI rep_tycon stupid_ctxt dc_wrk dc_rep dc_wrk = mkDataConWorkId work_name data_con dc_rep = initUs_ us (mkDataConRep dflags fam_envs wrap_name impl_bangs data_con) ; traceIf (text "buildDataCon 2" <+> ppr src_name) ; return data_con } -- The stupid context for a data constructor should be limited to -- the type variables mentioned in the arg_tys -- ToDo: Or functionally dependent on? -- This whole stupid theta thing is, well, stupid. mkDataConStupidTheta :: TyCon -> [Type] -> [TyVarBinder] -> [PredType] mkDataConStupidTheta tycon arg_tys univ_tvs | null stupid_theta = [] -- The common case | otherwise = filter in_arg_tys stupid_theta where tc_subst = zipTvSubst (tyConTyVars tycon) (mkTyVarTys (binderVars univ_tvs)) stupid_theta = substTheta tc_subst (tyConStupidTheta tycon) -- Start by instantiating the master copy of the -- stupid theta, taken from the TyCon arg_tyvars = tyCoVarsOfTypes arg_tys in_arg_tys pred = not $ isEmptyVarSet $ tyCoVarsOfType pred `intersectVarSet` arg_tyvars mkDataConUnivTyVarBinders :: [TyConBinder] -- From the TyCon -> [TyVarBinder] -- For the DataCon -- See Note [Building the TyBinders for a DataCon] mkDataConUnivTyVarBinders tc_bndrs = map mk_binder tc_bndrs where mk_binder (TvBndr tv tc_vis) = mkTyVarBinder vis tv where vis = case tc_vis of AnonTCB -> Specified NamedTCB Required -> Specified NamedTCB vis -> vis {- Note [Building the TyBinders for a DataCon] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A DataCon needs to keep track of the visibility of its universals and existentials, so that visible type application can work properly. This is done by storing the universal and existential TyVarBinders. See Note [TyVarBinders in DataCons] in DataCon. During construction of a DataCon, we often start from the TyBinders of the parent TyCon. For example data Maybe a = Nothing | Just a The DataCons start from the TyBinders of the parent TyCon. But the ultimate TyBinders for the DataCon are *different* than those of the DataCon. Here is an example: data App a b = MkApp (a b) -- App :: forall {k}. (k->*) -> k -> * The TyCon has tyConTyVars = [ k:*, a:k->*, b:k] tyConTyBinders = [ Named (TvBndr (k :: *) Inferred), Anon (k->*), Anon k ] The TyBinders for App line up with App's kind, given above. But the DataCon MkApp has the type MkApp :: forall {k} (a:k->*) (b:k). a b -> App k a b That is, its TyBinders should be dataConUnivTyVarBinders = [ TvBndr (k:*) Inferred , TvBndr (a:k->*) Specified , TvBndr (b:k) Specified ] So we want to take the TyCon's TyBinders and the TyCon's TyVars and merge them, pulling - variable names from the TyVars - visibilities from the TyBinders - but changing Anon/Required to Specified The last part about Required->Specified comes from this: data T k (a:k) b = MkT (a b) Here k is Required in T's kind, but we don't have Required binders in the TyBinders for a term (see Note [No Required TyBinder in terms] in TyCoRep), so we change it to Specified when making MkT's TyBinders This merging operation is done by mkDataConUnivTyBinders. In contrast, the TyBinders passed to mkDataCon are the final TyBinders stored in the DataCon (mkDataCon does no further work). -} ------------------------------------------------------ buildPatSyn :: Name -> Bool -> (Id,Bool) -> Maybe (Id, Bool) -> ([TyVarBinder], ThetaType) -- ^ Univ and req -> ([TyVarBinder], ThetaType) -- ^ Ex and prov -> [Type] -- ^ Argument types -> Type -- ^ Result type -> [FieldLabel] -- ^ Field labels for -- a record pattern synonym -> PatSyn buildPatSyn src_name declared_infix matcher@(matcher_id,_) builder (univ_tvs, req_theta) (ex_tvs, prov_theta) arg_tys pat_ty field_labels = -- The assertion checks that the matcher is -- compatible with the pattern synonym ASSERT2((and [ univ_tvs `equalLength` univ_tvs1 , ex_tvs `equalLength` ex_tvs1 , pat_ty `eqType` substTy subst pat_ty1 , prov_theta `eqTypes` substTys subst prov_theta1 , req_theta `eqTypes` substTys subst req_theta1 , compareArgTys arg_tys (substTys subst arg_tys1) ]) , (vcat [ ppr univ_tvs <+> twiddle <+> ppr univ_tvs1 , ppr ex_tvs <+> twiddle <+> ppr ex_tvs1 , ppr pat_ty <+> twiddle <+> ppr pat_ty1 , ppr prov_theta <+> twiddle <+> ppr prov_theta1 , ppr req_theta <+> twiddle <+> ppr req_theta1 , ppr arg_tys <+> twiddle <+> ppr arg_tys1])) mkPatSyn src_name declared_infix (univ_tvs, req_theta) (ex_tvs, prov_theta) arg_tys pat_ty matcher builder field_labels where ((_:_:univ_tvs1), req_theta1, tau) = tcSplitSigmaTy $ idType matcher_id ([pat_ty1, cont_sigma, _], _) = tcSplitFunTys tau (ex_tvs1, prov_theta1, cont_tau) = tcSplitSigmaTy cont_sigma (arg_tys1, _) = (tcSplitFunTys cont_tau) twiddle = char '~' subst = zipTvSubst (univ_tvs1 ++ ex_tvs1) (mkTyVarTys (binderVars (univ_tvs ++ ex_tvs))) -- For a nullary pattern synonym we add a single void argument to the -- matcher to preserve laziness in the case of unlifted types. -- See #12746 compareArgTys :: [Type] -> [Type] -> Bool compareArgTys [] [x] = x `eqType` voidPrimTy compareArgTys arg_tys matcher_arg_tys = arg_tys `eqTypes` matcher_arg_tys ------------------------------------------------------ type TcMethInfo -- A temporary intermediate, to communicate -- between tcClassSigs and buildClass. = ( Name -- Name of the class op , Type -- Type of the class op , Maybe (DefMethSpec (SrcSpan, Type))) -- Nothing => no default method -- -- Just VanillaDM => There is an ordinary -- polymorphic default method -- -- Just (GenericDM (loc, ty)) => There is a generic default metho -- Here is its type, and the location -- of the type signature -- We need that location /only/ to attach it to the -- generic default method's Name; and we need /that/ -- only to give the right location of an ambiguity error -- for the generic default method, spat out by checkValidClass buildClass :: Name -- Name of the class/tycon (they have the same Name) -> [TyConBinder] -- Of the tycon -> [Role] -> ThetaType -> [FunDep TyVar] -- Functional dependencies -> [ClassATItem] -- Associated types -> [TcMethInfo] -- Method info -> ClassMinimalDef -- Minimal complete definition -> TcRnIf m n Class buildClass tycon_name binders roles sc_theta fds at_items sig_stuff mindef = fixM $ \ rec_clas -> -- Only name generation inside loop do { traceIf (text "buildClass") ; datacon_name <- newImplicitBinder tycon_name mkClassDataConOcc ; tc_rep_name <- newTyConRepName tycon_name ; op_items <- mapM (mk_op_item rec_clas) sig_stuff -- Build the selector id and default method id -- Make selectors for the superclasses ; sc_sel_names <- mapM (newImplicitBinder tycon_name . mkSuperDictSelOcc) (takeList sc_theta [fIRST_TAG..]) ; let sc_sel_ids = [ mkDictSelId sc_name rec_clas | sc_name <- sc_sel_names] -- We number off the Dict superclass selectors, 1, 2, 3 etc so that we -- can construct names for the selectors. Thus -- class (C a, C b) => D a b where ... -- gives superclass selectors -- D_sc1, D_sc2 -- (We used to call them D_C, but now we can have two different -- superclasses both called C!) ; let use_newtype = isSingleton arg_tys -- Use a newtype if the data constructor -- (a) has exactly one value field -- i.e. exactly one operation or superclass taken together -- (b) that value is of lifted type (which they always are, because -- we box equality superclasses) -- See note [Class newtypes and equality predicates] -- We treat the dictionary superclasses as ordinary arguments. -- That means that in the case of -- class C a => D a -- we don't get a newtype with no arguments! args = sc_sel_names ++ op_names op_tys = [ty | (_,ty,_) <- sig_stuff] op_names = [op | (op,_,_) <- sig_stuff] arg_tys = sc_theta ++ op_tys rec_tycon = classTyCon rec_clas univ_bndrs = mkDataConUnivTyVarBinders binders univ_tvs = binderVars univ_bndrs ; rep_nm <- newTyConRepName datacon_name ; dict_con <- buildDataCon (panic "buildClass: FamInstEnvs") datacon_name False -- Not declared infix rep_nm (map (const no_bang) args) (Just (map (const HsLazy) args)) [{- No fields -}] univ_bndrs [{- no existentials -}] [{- No GADT equalities -}] [{- No theta -}] arg_tys (mkTyConApp rec_tycon (mkTyVarTys univ_tvs)) rec_tycon ; rhs <- if use_newtype then mkNewTyConRhs tycon_name rec_tycon dict_con else if isCTupleTyConName tycon_name then return (TupleTyCon { data_con = dict_con , tup_sort = ConstraintTuple }) else return (mkDataTyConRhs [dict_con]) ; let { tycon = mkClassTyCon tycon_name binders roles rhs rec_clas tc_rep_name -- A class can be recursive, and in the case of newtypes -- this matters. For example -- class C a where { op :: C b => a -> b -> Int } -- Because C has only one operation, it is represented by -- a newtype, and it should be a *recursive* newtype. -- [If we don't make it a recursive newtype, we'll expand the -- newtype like a synonym, but that will lead to an infinite -- type] ; result = mkClass tycon_name univ_tvs fds sc_theta sc_sel_ids at_items op_items mindef tycon } ; traceIf (text "buildClass" <+> ppr tycon) ; return result } where no_bang = HsSrcBang Nothing NoSrcUnpack NoSrcStrict mk_op_item :: Class -> TcMethInfo -> TcRnIf n m ClassOpItem mk_op_item rec_clas (op_name, _, dm_spec) = do { dm_info <- mk_dm_info op_name dm_spec ; return (mkDictSelId op_name rec_clas, dm_info) } mk_dm_info :: Name -> Maybe (DefMethSpec (SrcSpan, Type)) -> TcRnIf n m (Maybe (Name, DefMethSpec Type)) mk_dm_info _ Nothing = return Nothing mk_dm_info op_name (Just VanillaDM) = do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc ; return (Just (dm_name, VanillaDM)) } mk_dm_info op_name (Just (GenericDM (loc, dm_ty))) = do { dm_name <- newImplicitBinderLoc op_name mkDefaultMethodOcc loc ; return (Just (dm_name, GenericDM dm_ty)) } {- Note [Class newtypes and equality predicates] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider class (a ~ F b) => C a b where op :: a -> b We cannot represent this by a newtype, even though it's not existential, because there are two value fields (the equality predicate and op. See Trac #2238 Moreover, class (a ~ F b) => C a b where {} Here we can't use a newtype either, even though there is only one field, because equality predicates are unboxed, and classes are boxed. -} newImplicitBinder :: Name -- Base name -> (OccName -> OccName) -- Occurrence name modifier -> TcRnIf m n Name -- Implicit name -- Called in BuildTyCl to allocate the implicit binders of type/class decls -- For source type/class decls, this is the first occurrence -- For iface ones, the LoadIface has already allocated a suitable name in the cache newImplicitBinder base_name mk_sys_occ = newImplicitBinderLoc base_name mk_sys_occ (nameSrcSpan base_name) newImplicitBinderLoc :: Name -- Base name -> (OccName -> OccName) -- Occurrence name modifier -> SrcSpan -> TcRnIf m n Name -- Implicit name -- Just the same, but lets you specify the SrcSpan newImplicitBinderLoc base_name mk_sys_occ loc | Just mod <- nameModule_maybe base_name = newGlobalBinder mod occ loc | otherwise -- When typechecking a [d| decl bracket |], -- TH generates types, classes etc with Internal names, -- so we follow suit for the implicit binders = do { uniq <- newUnique ; return (mkInternalName uniq occ loc) } where occ = mk_sys_occ (nameOccName base_name) -- | Make the 'TyConRepName' for this 'TyCon' newTyConRepName :: Name -> TcRnIf gbl lcl TyConRepName newTyConRepName tc_name | Just mod <- nameModule_maybe tc_name , (mod, occ) <- tyConRepModOcc mod (nameOccName tc_name) = newGlobalBinder mod occ noSrcSpan | otherwise = newImplicitBinder tc_name mkTyConRepOcc
mettekou/ghc
compiler/iface/BuildTyCl.hs
bsd-3-clause
20,512
0
19
7,057
2,985
1,627
1,358
251
5
-- | Utility functions {-# LANGUAGE OverloadedStrings #-} module NumberSix.Util ( sleep , forkIrc , (<>) , (==?) , toLower , breakWord , prettyList , removeNewlines , randomElement , parseJsonEither , readText , maxLineLength ) where -------------------------------------------------------------------------------- import Control.Arrow (second) import Control.Concurrent (forkIO, threadDelay) import Control.Monad.Reader (ask) import Control.Monad.Trans (liftIO) import Data.Aeson (FromJSON, json, parseJSON) import Data.Aeson.Types (parseEither) import Data.Attoparsec (parseOnly) import Data.ByteString (ByteString) import Data.Char (isSpace) import Data.Text (Text) import qualified Data.Text as T import System.Random (randomRIO) -------------------------------------------------------------------------------- import NumberSix.Irc import NumberSix.Message -------------------------------------------------------------------------------- -- | Sleep a while. sleep :: Double -- ^ Number of seconds to sleep -> Irc () -- ^ Result sleep x = liftIO $ threadDelay (round $ x * 1000000) -------------------------------------------------------------------------------- -- | 'forkIO' lifted to the Irc monad forkIrc :: Irc () -- ^ Action to execute in another thread -> Irc () -- ^ Returns immediately forkIrc irc = do _<- liftIO . forkIO . runIrc irc =<< ask return () -------------------------------------------------------------------------------- -- | Take a word from a string, returing the word and the remainder. breakWord :: Text -> (Text, Text) breakWord = second (T.drop 1) . T.break isSpace -------------------------------------------------------------------------------- -- | Show a list of strings in a pretty format prettyList :: [Text] -> Text prettyList [] = "none" prettyList (x : []) = x prettyList (x : y : []) = x <> " and " <> y prettyList (x : y : z : r) = x <> ", " <> prettyList (y : z : r) -------------------------------------------------------------------------------- -- | Replace newlines by spaces removeNewlines :: Text -> Text removeNewlines = T.map (\x -> if x `elem` "\r\n" then ' ' else x) -------------------------------------------------------------------------------- -- | Random element from a list randomElement :: [a] -> IO a randomElement ls = fmap (ls !!) $ randomRIO (0, length ls - 1) -------------------------------------------------------------------------------- -- | Parse JSON from a bytestring parseJsonEither :: FromJSON a => ByteString -> Either String a parseJsonEither bs = parseOnly json bs >>= parseEither parseJSON -------------------------------------------------------------------------------- -- | Read applied to a bytestring, lifted to maybe readText :: Read a => Text -> Maybe a readText t = case reads (T.unpack t) of [(x, "")] -> Just x _ -> Nothing -------------------------------------------------------------------------------- -- | To prevent flooding maxLineLength :: Int maxLineLength = 450
itkovian/number-six
src/NumberSix/Util.hs
bsd-3-clause
3,279
0
10
697
672
380
292
55
2
{-| Description: SDL events subsystem. -} module Graphics.UI.SDL.Events ( module Queue , module Types ) where import Graphics.UI.SDL.Events.Queue as Queue import Graphics.UI.SDL.Events.Types as Types
abbradar/MySDL
src/Graphics/UI/SDL/Events.hs
bsd-3-clause
223
0
4
47
39
30
9
5
0
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} module Configuration.Screen where import Lens.Simple ( (^.) , makeLenses ) import Data.Yaml ( (.!=) , (.:?) , FromJSON(..) ) import qualified Data.Yaml as Y data ImprovizScreenConfig = ImprovizScreenConfig { _front :: Float , _back :: Float } deriving Show makeLenses ''ImprovizScreenConfig defaultScreenConfig = ImprovizScreenConfig { _front = 0.1, _back = 100 } instance FromJSON ImprovizScreenConfig where parseJSON (Y.Object v) = ImprovizScreenConfig <$> v .:? "front" .!= (defaultScreenConfig ^. front) <*> v .:? "back" .!= (defaultScreenConfig ^. back) parseJSON _ = fail "Expected Object for ScreenConfig value"
rumblesan/improviz
src/Configuration/Screen.hs
bsd-3-clause
1,069
0
11
476
179
106
73
25
1
{-# language CPP #-} -- | = Name -- -- VK_EXT_display_surface_counter - instance extension -- -- == VK_EXT_display_surface_counter -- -- [__Name String__] -- @VK_EXT_display_surface_counter@ -- -- [__Extension Type__] -- Instance extension -- -- [__Registered Extension Number__] -- 91 -- -- [__Revision__] -- 1 -- -- [__Extension and Version Dependencies__] -- -- - Requires Vulkan 1.0 -- -- - Requires @VK_KHR_display@ -- -- [__Contact__] -- -- - James Jones -- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_display_surface_counter] @cubanismo%0A<<Here describe the issue or question you have about the VK_EXT_display_surface_counter extension>> > -- -- == Other Extension Metadata -- -- [__Last Modified Date__] -- 2016-12-13 -- -- [__IP Status__] -- No known IP claims. -- -- [__Contributors__] -- -- - Pierre Boudier, NVIDIA -- -- - James Jones, NVIDIA -- -- - Damien Leone, NVIDIA -- -- - Pierre-Loup Griffais, Valve -- -- - Daniel Vetter, Intel -- -- == Description -- -- This extension defines a vertical blanking period counter associated -- with display surfaces. It provides a mechanism to query support for such -- a counter from a 'Vulkan.Extensions.Handles.SurfaceKHR' object. -- -- == New Commands -- -- - 'getPhysicalDeviceSurfaceCapabilities2EXT' -- -- == New Structures -- -- - 'SurfaceCapabilities2EXT' -- -- == New Enums -- -- - 'SurfaceCounterFlagBitsEXT' -- -- == New Bitmasks -- -- - 'SurfaceCounterFlagsEXT' -- -- == New Enum Constants -- -- - 'EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME' -- -- - 'EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION' -- -- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType': -- -- - 'STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT' -- -- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT' -- -- == Version History -- -- - Revision 1, 2016-12-13 (James Jones) -- -- - Initial draft -- -- == See Also -- -- 'SurfaceCapabilities2EXT', 'SurfaceCounterFlagBitsEXT', -- 'SurfaceCounterFlagsEXT', 'getPhysicalDeviceSurfaceCapabilities2EXT' -- -- == Document Notes -- -- For more information, see the -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_display_surface_counter Vulkan Specification> -- -- This page is a generated document. Fixes and changes should be made to -- the generator scripts, not directly. module Vulkan.Extensions.VK_EXT_display_surface_counter ( getPhysicalDeviceSurfaceCapabilities2EXT , pattern STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT , pattern SURFACE_COUNTER_VBLANK_EXT , SurfaceCapabilities2EXT(..) , SurfaceCounterFlagsEXT , SurfaceCounterFlagBitsEXT( SURFACE_COUNTER_VBLANK_BIT_EXT , .. ) , EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION , pattern EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION , EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME , pattern EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME , SurfaceKHR(..) , CompositeAlphaFlagBitsKHR(..) , CompositeAlphaFlagsKHR , SurfaceTransformFlagBitsKHR(..) , SurfaceTransformFlagsKHR ) where import Vulkan.Internal.Utils (enumReadPrec) import Vulkan.Internal.Utils (enumShowsPrec) import Vulkan.Internal.Utils (traceAroundEvent) import Control.Monad (unless) import Control.Monad.IO.Class (liftIO) import Foreign.Marshal.Alloc (allocaBytes) import GHC.Base (when) import GHC.IO (throwIO) import GHC.Ptr (nullFunPtr) import Foreign.Ptr (nullPtr) import Foreign.Ptr (plusPtr) import GHC.Show (showString) import Numeric (showHex) import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Cont (evalContT) import Vulkan.CStruct (FromCStruct) import Vulkan.CStruct (FromCStruct(..)) import Vulkan.CStruct (ToCStruct) import Vulkan.CStruct (ToCStruct(..)) import Vulkan.Zero (Zero) import Vulkan.Zero (Zero(..)) import Control.Monad.IO.Class (MonadIO) import Data.Bits (Bits) import Data.Bits (FiniteBits) import Data.String (IsString) import Data.Typeable (Typeable) import Foreign.Storable (Storable) import Foreign.Storable (Storable(peek)) import Foreign.Storable (Storable(poke)) import qualified Foreign.Storable (Storable(..)) import GHC.Generics (Generic) import GHC.IO.Exception (IOErrorType(..)) import GHC.IO.Exception (IOException(..)) import Foreign.Ptr (FunPtr) import Foreign.Ptr (Ptr) import GHC.Read (Read(readPrec)) import GHC.Show (Show(showsPrec)) import Data.Word (Word32) import Data.Kind (Type) import Control.Monad.Trans.Cont (ContT(..)) import Vulkan.Extensions.VK_KHR_surface (CompositeAlphaFlagsKHR) import Vulkan.Core10.FundamentalTypes (Extent2D) import Vulkan.Core10.FundamentalTypes (Flags) import Vulkan.Core10.Enums.ImageUsageFlagBits (ImageUsageFlags) import Vulkan.Dynamic (InstanceCmds(pVkGetPhysicalDeviceSurfaceCapabilities2EXT)) import Vulkan.Core10.Handles (PhysicalDevice) import Vulkan.Core10.Handles (PhysicalDevice(..)) import Vulkan.Core10.Handles (PhysicalDevice(PhysicalDevice)) import Vulkan.Core10.Handles (PhysicalDevice_T) import Vulkan.Core10.Enums.Result (Result) import Vulkan.Core10.Enums.Result (Result(..)) import Vulkan.Core10.Enums.StructureType (StructureType) import Vulkan.Extensions.Handles (SurfaceKHR) import Vulkan.Extensions.Handles (SurfaceKHR(..)) import Vulkan.Extensions.VK_KHR_surface (SurfaceTransformFlagBitsKHR) import Vulkan.Extensions.VK_KHR_surface (SurfaceTransformFlagsKHR) import Vulkan.Exception (VulkanException(..)) import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT)) import Vulkan.Core10.Enums.Result (Result(SUCCESS)) import Vulkan.Extensions.VK_KHR_surface (CompositeAlphaFlagBitsKHR(..)) import Vulkan.Extensions.VK_KHR_surface (CompositeAlphaFlagsKHR) import Vulkan.Extensions.Handles (SurfaceKHR(..)) import Vulkan.Extensions.VK_KHR_surface (SurfaceTransformFlagBitsKHR(..)) import Vulkan.Extensions.VK_KHR_surface (SurfaceTransformFlagsKHR) foreign import ccall #if !defined(SAFE_FOREIGN_CALLS) unsafe #endif "dynamic" mkVkGetPhysicalDeviceSurfaceCapabilities2EXT :: FunPtr (Ptr PhysicalDevice_T -> SurfaceKHR -> Ptr SurfaceCapabilities2EXT -> IO Result) -> Ptr PhysicalDevice_T -> SurfaceKHR -> Ptr SurfaceCapabilities2EXT -> IO Result -- | vkGetPhysicalDeviceSurfaceCapabilities2EXT - Query surface capabilities -- -- = Description -- -- 'getPhysicalDeviceSurfaceCapabilities2EXT' behaves similarly to -- 'Vulkan.Extensions.VK_KHR_surface.getPhysicalDeviceSurfaceCapabilitiesKHR', -- with the ability to return extended information by adding extending -- structures to the @pNext@ chain of its @pSurfaceCapabilities@ parameter. -- -- == Valid Usage -- -- - [[VUID-{refpage}-surface-06523]] @surface@ /must/ be a valid -- 'Vulkan.Extensions.Handles.SurfaceKHR' handle -- -- - [[VUID-{refpage}-surface-06211]] @surface@ /must/ be supported by -- @physicalDevice@, as reported by -- 'Vulkan.Extensions.VK_KHR_surface.getPhysicalDeviceSurfaceSupportKHR' -- or an equivalent platform-specific mechanism -- -- == Valid Usage (Implicit) -- -- - #VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter# -- @physicalDevice@ /must/ be a valid -- 'Vulkan.Core10.Handles.PhysicalDevice' handle -- -- - #VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter# -- @surface@ /must/ be a valid 'Vulkan.Extensions.Handles.SurfaceKHR' -- handle -- -- - #VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter# -- @pSurfaceCapabilities@ /must/ be a valid pointer to a -- 'SurfaceCapabilities2EXT' structure -- -- - #VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent# Both -- of @physicalDevice@, and @surface@ /must/ have been created, -- allocated, or retrieved from the same -- 'Vulkan.Core10.Handles.Instance' -- -- == Return Codes -- -- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>] -- -- - 'Vulkan.Core10.Enums.Result.SUCCESS' -- -- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>] -- -- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY' -- -- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY' -- -- - 'Vulkan.Core10.Enums.Result.ERROR_SURFACE_LOST_KHR' -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_display_surface_counter VK_EXT_display_surface_counter>, -- 'Vulkan.Core10.Handles.PhysicalDevice', 'SurfaceCapabilities2EXT', -- 'Vulkan.Extensions.Handles.SurfaceKHR' getPhysicalDeviceSurfaceCapabilities2EXT :: forall io . (MonadIO io) => -- | @physicalDevice@ is the physical device that will be associated with the -- swapchain to be created, as described for -- 'Vulkan.Extensions.VK_KHR_swapchain.createSwapchainKHR'. PhysicalDevice -> -- | @surface@ is the surface that will be associated with the swapchain. SurfaceKHR -> io (SurfaceCapabilities2EXT) getPhysicalDeviceSurfaceCapabilities2EXT physicalDevice surface = liftIO . evalContT $ do let vkGetPhysicalDeviceSurfaceCapabilities2EXTPtr = pVkGetPhysicalDeviceSurfaceCapabilities2EXT (case physicalDevice of PhysicalDevice{instanceCmds} -> instanceCmds) lift $ unless (vkGetPhysicalDeviceSurfaceCapabilities2EXTPtr /= nullFunPtr) $ throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPhysicalDeviceSurfaceCapabilities2EXT is null" Nothing Nothing let vkGetPhysicalDeviceSurfaceCapabilities2EXT' = mkVkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXTPtr pPSurfaceCapabilities <- ContT (withZeroCStruct @SurfaceCapabilities2EXT) r <- lift $ traceAroundEvent "vkGetPhysicalDeviceSurfaceCapabilities2EXT" (vkGetPhysicalDeviceSurfaceCapabilities2EXT' (physicalDeviceHandle (physicalDevice)) (surface) (pPSurfaceCapabilities)) lift $ when (r < SUCCESS) (throwIO (VulkanException r)) pSurfaceCapabilities <- lift $ peekCStruct @SurfaceCapabilities2EXT pPSurfaceCapabilities pure $ (pSurfaceCapabilities) -- No documentation found for TopLevel "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT" pattern STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT -- No documentation found for TopLevel "VK_SURFACE_COUNTER_VBLANK_EXT" pattern SURFACE_COUNTER_VBLANK_EXT = SURFACE_COUNTER_VBLANK_BIT_EXT -- | VkSurfaceCapabilities2EXT - Structure describing capabilities of a -- surface -- -- == Valid Usage (Implicit) -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_display_surface_counter VK_EXT_display_surface_counter>, -- 'Vulkan.Extensions.VK_KHR_surface.CompositeAlphaFlagsKHR', -- 'Vulkan.Core10.FundamentalTypes.Extent2D', -- 'Vulkan.Core10.Enums.ImageUsageFlagBits.ImageUsageFlags', -- 'Vulkan.Core10.Enums.StructureType.StructureType', -- 'SurfaceCounterFlagsEXT', -- 'Vulkan.Extensions.VK_KHR_surface.SurfaceTransformFlagBitsKHR', -- 'Vulkan.Extensions.VK_KHR_surface.SurfaceTransformFlagsKHR', -- 'getPhysicalDeviceSurfaceCapabilities2EXT' data SurfaceCapabilities2EXT = SurfaceCapabilities2EXT { -- | @minImageCount@ is the minimum number of images the specified device -- supports for a swapchain created for the surface, and will be at least -- one. minImageCount :: Word32 , -- | @maxImageCount@ is the maximum number of images the specified device -- supports for a swapchain created for the surface, and will be either 0, -- or greater than or equal to @minImageCount@. A value of 0 means that -- there is no limit on the number of images, though there /may/ be limits -- related to the total amount of memory used by presentable images. maxImageCount :: Word32 , -- | @currentExtent@ is the current width and height of the surface, or the -- special value (0xFFFFFFFF, 0xFFFFFFFF) indicating that the surface size -- will be determined by the extent of a swapchain targeting the surface. currentExtent :: Extent2D , -- | @minImageExtent@ contains the smallest valid swapchain extent for the -- surface on the specified device. The @width@ and @height@ of the extent -- will each be less than or equal to the corresponding @width@ and -- @height@ of @currentExtent@, unless @currentExtent@ has the special -- value described above. minImageExtent :: Extent2D , -- | @maxImageExtent@ contains the largest valid swapchain extent for the -- surface on the specified device. The @width@ and @height@ of the extent -- will each be greater than or equal to the corresponding @width@ and -- @height@ of @minImageExtent@. The @width@ and @height@ of the extent -- will each be greater than or equal to the corresponding @width@ and -- @height@ of @currentExtent@, unless @currentExtent@ has the special -- value described above. maxImageExtent :: Extent2D , -- | @maxImageArrayLayers@ is the maximum number of layers presentable images -- /can/ have for a swapchain created for this device and surface, and will -- be at least one. maxImageArrayLayers :: Word32 , -- | @supportedTransforms@ is a bitmask of -- 'Vulkan.Extensions.VK_KHR_surface.SurfaceTransformFlagBitsKHR' -- indicating the presentation transforms supported for the surface on the -- specified device. At least one bit will be set. supportedTransforms :: SurfaceTransformFlagsKHR , -- | @currentTransform@ is -- 'Vulkan.Extensions.VK_KHR_surface.SurfaceTransformFlagBitsKHR' value -- indicating the surface’s current transform relative to the presentation -- engine’s natural orientation. currentTransform :: SurfaceTransformFlagBitsKHR , -- | @supportedCompositeAlpha@ is a bitmask of -- 'Vulkan.Extensions.VK_KHR_surface.CompositeAlphaFlagBitsKHR', -- representing the alpha compositing modes supported by the presentation -- engine for the surface on the specified device, and at least one bit -- will be set. Opaque composition /can/ be achieved in any alpha -- compositing mode by either using an image format that has no alpha -- component, or by ensuring that all pixels in the presentable images have -- an alpha value of 1.0. supportedCompositeAlpha :: CompositeAlphaFlagsKHR , -- | @supportedUsageFlags@ is a bitmask of -- 'Vulkan.Core10.Enums.ImageUsageFlagBits.ImageUsageFlagBits' representing -- the ways the application /can/ use the presentable images of a swapchain -- created with 'Vulkan.Extensions.VK_KHR_surface.PresentModeKHR' set to -- 'Vulkan.Extensions.VK_KHR_surface.PRESENT_MODE_IMMEDIATE_KHR', -- 'Vulkan.Extensions.VK_KHR_surface.PRESENT_MODE_MAILBOX_KHR', -- 'Vulkan.Extensions.VK_KHR_surface.PRESENT_MODE_FIFO_KHR' or -- 'Vulkan.Extensions.VK_KHR_surface.PRESENT_MODE_FIFO_RELAXED_KHR' for the -- surface on the specified device. -- 'Vulkan.Core10.Enums.ImageUsageFlagBits.IMAGE_USAGE_COLOR_ATTACHMENT_BIT' -- /must/ be included in the set. Implementations /may/ support additional -- usages. supportedUsageFlags :: ImageUsageFlags , -- | @supportedSurfaceCounters@ is a bitmask of 'SurfaceCounterFlagBitsEXT' -- indicating the supported surface counter types. -- -- #VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246# -- @supportedSurfaceCounters@ /must/ not include -- 'SURFACE_COUNTER_VBLANK_BIT_EXT' unless the surface queried is a -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#wsi-display-surfaces display surface> supportedSurfaceCounters :: SurfaceCounterFlagsEXT } deriving (Typeable) #if defined(GENERIC_INSTANCES) deriving instance Generic (SurfaceCapabilities2EXT) #endif deriving instance Show SurfaceCapabilities2EXT instance ToCStruct SurfaceCapabilities2EXT where withCStruct x f = allocaBytes 72 $ \p -> pokeCStruct p x (f p) pokeCStruct p SurfaceCapabilities2EXT{..} f = do poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT) poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) poke ((p `plusPtr` 16 :: Ptr Word32)) (minImageCount) poke ((p `plusPtr` 20 :: Ptr Word32)) (maxImageCount) poke ((p `plusPtr` 24 :: Ptr Extent2D)) (currentExtent) poke ((p `plusPtr` 32 :: Ptr Extent2D)) (minImageExtent) poke ((p `plusPtr` 40 :: Ptr Extent2D)) (maxImageExtent) poke ((p `plusPtr` 48 :: Ptr Word32)) (maxImageArrayLayers) poke ((p `plusPtr` 52 :: Ptr SurfaceTransformFlagsKHR)) (supportedTransforms) poke ((p `plusPtr` 56 :: Ptr SurfaceTransformFlagBitsKHR)) (currentTransform) poke ((p `plusPtr` 60 :: Ptr CompositeAlphaFlagsKHR)) (supportedCompositeAlpha) poke ((p `plusPtr` 64 :: Ptr ImageUsageFlags)) (supportedUsageFlags) poke ((p `plusPtr` 68 :: Ptr SurfaceCounterFlagsEXT)) (supportedSurfaceCounters) f cStructSize = 72 cStructAlignment = 8 pokeZeroCStruct p f = do poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT) poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) poke ((p `plusPtr` 16 :: Ptr Word32)) (zero) poke ((p `plusPtr` 20 :: Ptr Word32)) (zero) poke ((p `plusPtr` 24 :: Ptr Extent2D)) (zero) poke ((p `plusPtr` 32 :: Ptr Extent2D)) (zero) poke ((p `plusPtr` 40 :: Ptr Extent2D)) (zero) poke ((p `plusPtr` 48 :: Ptr Word32)) (zero) poke ((p `plusPtr` 52 :: Ptr SurfaceTransformFlagsKHR)) (zero) poke ((p `plusPtr` 56 :: Ptr SurfaceTransformFlagBitsKHR)) (zero) poke ((p `plusPtr` 60 :: Ptr CompositeAlphaFlagsKHR)) (zero) poke ((p `plusPtr` 64 :: Ptr ImageUsageFlags)) (zero) f instance FromCStruct SurfaceCapabilities2EXT where peekCStruct p = do minImageCount <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32)) maxImageCount <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32)) currentExtent <- peekCStruct @Extent2D ((p `plusPtr` 24 :: Ptr Extent2D)) minImageExtent <- peekCStruct @Extent2D ((p `plusPtr` 32 :: Ptr Extent2D)) maxImageExtent <- peekCStruct @Extent2D ((p `plusPtr` 40 :: Ptr Extent2D)) maxImageArrayLayers <- peek @Word32 ((p `plusPtr` 48 :: Ptr Word32)) supportedTransforms <- peek @SurfaceTransformFlagsKHR ((p `plusPtr` 52 :: Ptr SurfaceTransformFlagsKHR)) currentTransform <- peek @SurfaceTransformFlagBitsKHR ((p `plusPtr` 56 :: Ptr SurfaceTransformFlagBitsKHR)) supportedCompositeAlpha <- peek @CompositeAlphaFlagsKHR ((p `plusPtr` 60 :: Ptr CompositeAlphaFlagsKHR)) supportedUsageFlags <- peek @ImageUsageFlags ((p `plusPtr` 64 :: Ptr ImageUsageFlags)) supportedSurfaceCounters <- peek @SurfaceCounterFlagsEXT ((p `plusPtr` 68 :: Ptr SurfaceCounterFlagsEXT)) pure $ SurfaceCapabilities2EXT minImageCount maxImageCount currentExtent minImageExtent maxImageExtent maxImageArrayLayers supportedTransforms currentTransform supportedCompositeAlpha supportedUsageFlags supportedSurfaceCounters instance Storable SurfaceCapabilities2EXT where sizeOf ~_ = 72 alignment ~_ = 8 peek = peekCStruct poke ptr poked = pokeCStruct ptr poked (pure ()) instance Zero SurfaceCapabilities2EXT where zero = SurfaceCapabilities2EXT zero zero zero zero zero zero zero zero zero zero zero type SurfaceCounterFlagsEXT = SurfaceCounterFlagBitsEXT -- | VkSurfaceCounterFlagBitsEXT - Surface-relative counter types -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_display_surface_counter VK_EXT_display_surface_counter>, -- 'SurfaceCounterFlagsEXT', -- 'Vulkan.Extensions.VK_EXT_display_control.getSwapchainCounterEXT' newtype SurfaceCounterFlagBitsEXT = SurfaceCounterFlagBitsEXT Flags deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits) -- | 'SURFACE_COUNTER_VBLANK_BIT_EXT' specifies a counter incrementing once -- every time a vertical blanking period occurs on the display associated -- with the surface. pattern SURFACE_COUNTER_VBLANK_BIT_EXT = SurfaceCounterFlagBitsEXT 0x00000001 conNameSurfaceCounterFlagBitsEXT :: String conNameSurfaceCounterFlagBitsEXT = "SurfaceCounterFlagBitsEXT" enumPrefixSurfaceCounterFlagBitsEXT :: String enumPrefixSurfaceCounterFlagBitsEXT = "SURFACE_COUNTER_VBLANK_BIT_EXT" showTableSurfaceCounterFlagBitsEXT :: [(SurfaceCounterFlagBitsEXT, String)] showTableSurfaceCounterFlagBitsEXT = [(SURFACE_COUNTER_VBLANK_BIT_EXT, "")] instance Show SurfaceCounterFlagBitsEXT where showsPrec = enumShowsPrec enumPrefixSurfaceCounterFlagBitsEXT showTableSurfaceCounterFlagBitsEXT conNameSurfaceCounterFlagBitsEXT (\(SurfaceCounterFlagBitsEXT x) -> x) (\x -> showString "0x" . showHex x) instance Read SurfaceCounterFlagBitsEXT where readPrec = enumReadPrec enumPrefixSurfaceCounterFlagBitsEXT showTableSurfaceCounterFlagBitsEXT conNameSurfaceCounterFlagBitsEXT SurfaceCounterFlagBitsEXT type EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION = 1 -- No documentation found for TopLevel "VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION" pattern EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION :: forall a . Integral a => a pattern EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION = 1 type EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME = "VK_EXT_display_surface_counter" -- No documentation found for TopLevel "VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME" pattern EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a pattern EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME = "VK_EXT_display_surface_counter"
expipiplus1/vulkan
src/Vulkan/Extensions/VK_EXT_display_surface_counter.hs
bsd-3-clause
23,142
1
17
4,512
3,119
1,878
1,241
-1
-1
-- -- Module : TimeScheme -- Copyright : (c) Conrad Parker 2006 -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable module Codec.Container.Ogg.TimeScheme ( TimeScheme (..), guessTimeScheme, -- | Some standard time schemes npt, smpte24, smpte24drop, smpte25, smpte25drop, smpte30, smpte30drop, smpte50, smpte60, smpte60drop ) where import Data.Char import Data.Maybe import Data.Ratio ------------------------------------------------------------ -- TimeScheme -- data TimeScheme = TimeScheme { timeSchemeName :: String, timeSchemeRate :: Rational } -- | Standard TimeSchemes knownTimeSchemes :: [TimeScheme] knownTimeSchemes = [npt, smpte24, smpte24drop, smpte25, smpte25drop, smpte30, smpte30drop, smpte50, smpte60, smpte60drop] -- | Guess the TimeScheme by rate guessTimeScheme :: Rational -> Maybe TimeScheme guessTimeScheme r = listToMaybe $ filter sameRate knownTimeSchemes where sameRate = \x -> timeSchemeRate x == r ------------------------------------------------------------ -- Read -- instance Read TimeScheme where readsPrec _ = readsTimeScheme readsTimeScheme :: ReadS TimeScheme readsTimeScheme [] = [] readsTimeScheme str@(c:_) | isDigit c = [(npt, str)] | otherwise = [(scheme, tail rest) | scheme <- matches] where (tok, rest) = span (\x -> isAlphaNum x || x == '-') str matches = filter sameName knownTimeSchemes sameName = \x -> l (timeSchemeName x) == l tok l = map toLower ------------------------------------------------------------ -- Show -- instance Show TimeScheme where show = timeSchemeName ------------------------------------------------------------ -- Known TimeSchemes -- npt :: TimeScheme npt = TimeScheme "npt" (1000%1) smpte24 :: TimeScheme smpte24 = TimeScheme "smpte-24" (24%1) smpte24drop :: TimeScheme smpte24drop = TimeScheme "smpte-24-drop" (24000%1001) smpte25 :: TimeScheme smpte25 = TimeScheme "smpte-25" (25%1) smpte25drop :: TimeScheme smpte25drop = TimeScheme "smpte-25-drop" (25000%1001) smpte30 :: TimeScheme smpte30 = TimeScheme "smpte-30" (30%1) smpte30drop :: TimeScheme smpte30drop = TimeScheme "smpte-30-drop" (30000%1001) smpte50 :: TimeScheme smpte50 = TimeScheme "smpte-50" (50%1) smpte60 :: TimeScheme smpte60 = TimeScheme "smpte-60" (60%1) smpte60drop :: TimeScheme smpte60drop = TimeScheme "smpte-60" (60000%1001)
kfish/hogg
Codec/Container/Ogg/TimeScheme.hs
bsd-3-clause
2,475
0
12
427
615
351
264
59
1
module ScrabbleScoreKata.Day10Spec (spec) where import Test.Hspec import ScrabbleScoreKata.Day10 (score) spec :: Spec spec = do it "is zero when given an empty input" $ do score "" `shouldBe` 0 it "is 1 when given lowercase 'a'" $ do score "a" `shouldBe` 1 it "is 1 when given uppercase 'A'" $ do score "A" `shouldBe` 1 it "is 4 when given 'f'" $ do score "f" `shouldBe` 4 it "is 2 when given the word 'at'" $ do score "at" `shouldBe` 2 it "is 12 when given the word 'zoo'" $ do score "zoo" `shouldBe` 12 it "is 6 when given the word 'street'" $ do score "street" `shouldBe` 6 it "is 22 when given the word 'quirky'" $ do score "quirky" `shouldBe` 22 it "is 41 when given the word 'OxyphenButazone'" $ do score "OxyphenButazone" `shouldBe` 41 it "scores only english-like letters" $ do score "pinata" `shouldBe` 8 score "piñata" `shouldBe` 7
Alex-Diez/haskell-tdd-kata
old-katas/test/ScrabbleScoreKata/Day10Spec.hs
bsd-3-clause
1,082
0
11
391
268
128
140
26
1
-- !!! Testing error checking in qualified names (type variables) -- No qualified type variables module TestQual2 where x :: A.a x = x
FranklinChen/Hugs
tests/static/qual2.hs
bsd-3-clause
138
1
4
27
18
12
6
-1
-1
{-# LANGUAGE NoMonomorphismRestriction #-} module Watch where import Control.Concurrent import Control.Monad import Control.Monad.IO.Class import Data.Bits import Data.List import qualified Data.Map as M import System.Console.ANSI import System.Directory import System.FilePath import System.FilePath.Glob import System.OSX.FSEvents import System.Posix.Files import Text.Printf import Watch.Investigate import Watch.Logger import Watch.Matches import Watch.Options import Watch.Spew watch root opts = do setCurrentDirectory root parent <- canonicalizePath root reset <- newEmptyMVar let l = runLogger (wLevel opts) forever $ do l $ quiet $ putStr "(Re)building dependency list..." concerns <- concernMap opts let ms = matches concerns l $ deafening $ putChar '\n' >> showConcerns concerns l $ quiet $ putStrLn "done." stream <- eventStreamCreate [parent] 1.0 True True True (l . handleEv reset parent ms) l $ quiet $ putStrLn "Starting watch..." () <- takeMVar reset eventStreamDestroy stream handleEv reset root matches ev = do let rel = makeRelative root (eventPath ev) key = takeDirectory rel pat = takeFileName rel unless ("~" `isSuffixOf` pat) $ do if takeExtension pat == ".hs" then do loud $ showEvent False ev quiet $ putStrLn "Source file modified." liftIO $ putMVar reset () else do let toTouch = map snd . filter (\ (gl, _) -> gl `match` pat) $ M.findWithDefault [] key matches loud $ showEvent (null toTouch) ev quiet $ case toTouch of [] -> return () [x] -> putStrLn $ " forcing recompile for: " ++ x xs -> do putStrLn " forcing recompile for:" mapM_ (putStrLn . (" * " ++)) xs liftIO $ mapM_ touchFile toTouch showEvent t ev | itemRemoved ev = pid ev >> red "[removed] " >> dump ev | itemModified ev = pid ev >> yellow "[modified] " >> dump ev | itemRenamed ev = pid ev >> yellow "[renamed] " >> dump ev | itemCreated ev = pid ev >> green "[created] " >> dump ev | mtime ev = pid ev >> yellow "[metadata] " >> dump ev | otherwise = pid ev >> putStrLn ("unknown event for " ++ eventPath ev) where dump ev = if t then putStrLn (eventPath ev) else color White (eventPath ev ++ "\n") pid ev = color Black (printf "%#0x " (eventId ev)) red = color Red green = color Green yellow = color Yellow color r s = setSGR [SetColor Foreground Dull r] >> putStr s >> setSGR [Reset] itemCreated ev = eventFlagItemCreated .&. eventFlags ev == eventFlagItemCreated && not (itemRemoved ev) itemRemoved ev = eventFlagItemRemoved .&. eventFlags ev == eventFlagItemRemoved itemRenamed ev = eventFlagItemRenamed .&. eventFlags ev == eventFlagItemRenamed itemModified ev = eventFlagItemModified .&. eventFlags ev == eventFlagItemModified isFile ev = eventFlagItemIsFile .&. eventFlags ev == eventFlagItemIsFile mtime ev = eventFlagItemInodeMetaMod .&. eventFlags ev == eventFlagItemInodeMetaMod
pikajude/src-watch
src/Watch.hs
bsd-3-clause
3,331
0
22
987
1,022
488
534
80
4
------------------------------------------------------------------------------- -- PARSING FUNCTIONS ------------------------------------------------------------------------------- module Parsing where import Instances import Haskell import Language.Haskell.Syntax import Language.Haskell.Parser import Data.List import Data.Word import Data.Char import Data.Time import Data.Maybe import System.IO import Control.Monad (liftM) import Niz showConcept :: Concept -> String showConcept (lang,arity,freq,start,last,exp) = "(" ++ lang ++ "," ++ show arity ++ "," ++ show freq ++ "," ++ show start ++ "," ++ show last ++ "," ++ showExp exp ++ ")" -- get all unit concepts from the set of examples makeUnitConcepts :: UTCTime -> [IP] -> [Concept] makeUnitConcepts time ips = nub $ concatMap concepts' ips where concepts' (IP lang lhs rhs _) = [(lang,0,1,time,time,exp) | exp <- getSubExp lhs] ++ [(lang,0,1,time,time,exp) | exp <- getSubExp rhs] -- get all unary concepts (unary functions) from the set of examples makeUnaryConcepts :: UTCTime -> [IP] -> [Concept] makeUnaryConcepts time ips = nub $ concatMap concepts' ips where concepts' (IP lang lhs rhs _) = [(lang,1,1,time,time,f) | (HsApp f@(HsVar _) _) <- getSubExp lhs] ++ [(lang,1,1,time,time,f) | (HsApp f@(HsCon _) _) <- getSubExp lhs] ++ [(lang,1,1,time,time,f) | (HsApp f@(HsVar _) _) <- getSubExp rhs] ++ [(lang,1,1,time,time,f) | (HsApp f@(HsCon _) _) <- getSubExp rhs] -- get all binary concepts (infix functions) from the set of examples makeBinaryConcepts :: UTCTime -> [IP] -> [Concept] makeBinaryConcepts time ips = nub $ concatMap concepts' ips where concepts' (IP lang lhs rhs _) = [(lang,2,1,time,time,HsVar f) | (HsInfixApp _ (HsQVarOp f) _) <- getSubExp lhs] ++ [(lang,2,1,time,time,HsVar f) | (HsInfixApp _ (HsQConOp f) _) <- getSubExp lhs] ++ [(lang,2,1,time,time,HsVar f) | (HsInfixApp _ (HsQVarOp f) _) <- getSubExp rhs] ++ [(lang,2,1,time,time,HsVar f) | (HsInfixApp _ (HsQConOp f) _) <- getSubExp rhs] insertInConcepts :: Concept -> [Concept] -> [Concept] insertInConcepts c [] = [c] insertInConcepts c@(l,a,f,first',last',e) cs = let nf = f + sum [f' | (l',a',f',_,_,e') <- cs, l' == l, a' == a, e' == e] firsts = [f | (l',a',f',f,_,e') <- cs, l' == l, a' == a, e' == e] first = if null firsts then first' else head firsts c' = (l,a,nf,first,last',e) result = c' : [x | x@(l',a',f',_,_,e') <- cs, (l' /= l || a' /= a || e' /= e)] in result -- | Parse the IP file, return positive and negative examples parseConceptsFile :: FilePath -> IO [Concept] parseConceptsFile file = do text <- readFileSafe file utf8 let inputs = concatMap parseCInput . map (\(x:xs) -> init xs) . filter (\(x:xs) -> x == '(' && (take 1 $ reverse xs) == ")") . filter (not . null) . map strip $ lines text c' <- mapM parseConcept $ inputs let c = concat c' putStrLn $ "Concepts: " ++ show (length c) return c -- | Parse the IP file, return positive and negative examples parseTrainingFile :: FilePath -> IO ([IP],[IP]) parseTrainingFile file = do text <- readFileSafe file utf8 let inputs = concatMap parseInput . map (\(x:xs) -> init xs) . filter (\(x:xs) -> x == '(' && (take 1 $ reverse xs) == ")") . filter (not . null) . map strip $ lines text putStrLn $ show (length inputs) ++ " axioms read from IP file." p <- mapM parseIP $ inputs let tags = nub $ map getTag $ concat p if length tags > 1 then do putStrLn $ "Error in IP: multiple tags used." return ([],[]) else do let p' = concat p let pos = filter (\x -> val x >= 0) p' let neg = filter (\x -> val x < 0) p' putStrLn $ "pos: " putStrLn $ concat $ intersperse "," $ map (" " ++) $ map prettyIP pos putStrLn $ "neg: " putStrLn $ concat $ intersperse "," $ map (" " ++) $ map prettyIP neg return $ (pos,neg) parseConcept :: Concept' -> IO [Concept] parseConcept (lang,0,freq,first,last,str) = do let a' = parseModule $ "main = " ++ str if not (parseSuccess a') then return [] else do let a1' = (\(HsModule _ _ _ _ dec) -> dec) $ getModule a' let exps = [lhs | (HsPatBind _ _ (HsUnGuardedRhs lhs) _) <- a1'] if null exps then return [] else do let exp = head exps return [(lang,0,freq,first,last,normalExp False exp)] parseConcept (lang,arit,freq,first,last,str@(s:_)) | isLetter s = return [(lang,arit,freq,first,last,HsVar (UnQual (HsIdent str)))] parseConcept (lang,arit,freq,first,last,str) = return [(lang,arit,freq,first,last,HsVar (UnQual (HsSymbol str)))] parseIP :: IP' -> IO [IP] parseIP (name,a,b,c) = do let a' = parseModule $ "main = " ++ a let b' = parseModule $ "main = " ++ b if not (parseSuccess a') || not (parseSuccess b') then return [] else do let a1' = (\(HsModule _ _ _ _ dec) -> dec) $ getModule a' let b1' = (\(HsModule _ _ _ _ dec) -> dec) $ getModule b' if null a1' || null b1' then return [] else do let a1 = head a1' let b1 = head b1' --putStrLn $ show a1 --putStrLn $ show b1 let (p,e) = mergeHsDecl a1 b1 let result = [IP name (normalExp True p) (normalExp True e) c] return result -- parses a line from Agent file, 3*0 ->>_Arith 0 parseEq :: String -> IO [Axiom] parseEp "" = return [] parseEq s = do let index1 = findInfixIndex " ->>_" s if index1 == Nothing then do let index2 = findInfixIndex " ->_" s if index2 == Nothing then return [] else do let a = take (fromJust index2) s let rest = drop (fromJust index2 + 4) s let s = takeWhile (/= ' ') rest let b = dropWhile (== ' ') $ dropWhile (/= ' ') rest if null a || null b || null s then return [] else do let a' = parseModule $ "main = " ++ a let b' = parseModule $ "main = " ++ b if not (parseSuccess a') || not (parseSuccess b') then return [] else do let a1 = (\(HsModule _ _ _ _ dec) -> dec) $ getModule a' let b1 = (\(HsModule _ _ _ _ dec) -> dec) $ getModule b' let a1' = [e | (HsPatBind _ _ (HsUnGuardedRhs e) _) <- a1] let b1' = [e | (HsPatBind _ _ (HsUnGuardedRhs e) _) <- b1] let result = [SArrow s (normalExp False (head a1')) (normalExp False (head b1'))] --appendFile "temp.txt" (show result ++ "\n") return result else do let a = take (fromJust index1) s let rest = drop (fromJust index1 + 5) s let s = takeWhile (/= ' ') rest let b = dropWhile (== ' ') $ dropWhile (/= ' ') rest if null a || null b || null s then do return [] else do let a' = parseModule $ "main = " ++ a let b' = parseModule $ "main = " ++ b if not (parseSuccess a') || not (parseSuccess b') then return [] else do let a1 = (\(HsModule _ _ _ _ dec) -> dec) $ getModule a' let b1 = (\(HsModule _ _ _ _ dec) -> dec) $ getModule b' let a1' = [e | (HsPatBind _ _ (HsUnGuardedRhs e) _) <- a1] let b1' = [e | (HsPatBind _ _ (HsUnGuardedRhs e) _) <- b1] let result = [DArrow s (normalExp False (head a1')) (normalExp False (head b1'))] --appendFile "temp.txt" (show result ++ "\n") return result {- let a1' = [ e | e@(HsPatBind _ _ (HsUnGuardedRhs (HsApp (HsVar (UnQual n)) p)) _) <- a1] let result1 = [HsMatch (SrcLoc "" 0 0) n [expToPat p] (HsUnGuardedRhs rhs) [] | (HsPatBind _ _ (HsUnGuardedRhs (HsApp (HsVar (UnQual n)) p)) _) <- a1', (HsPatBind _ _ (HsUnGuardedRhs rhs) _) <- b1] let result2 = [HsPatBind (SrcLoc "" 0 0) (expToPat lhs) (HsUnGuardedRhs rhs) [] | (HsPatBind _ _ (HsUnGuardedRhs lhs) _) <- a1 \\ a1', (HsPatBind _ _ (HsUnGuardedRhs rhs) _) <- b1] return $ [HsFunBind [normalMatch x] | x <- result1] ++ result2 -} isHsPatBind (HsPatBind _ _ (HsUnGuardedRhs _) _) = True isHsPatBind _ = False parseCInput :: String -> [Concept'] parseCInput "" = [] parseCInput s | not (',' `elem` s) = [] parseCInput s = if not (null name) && (>=0) arity && (>=0) freq && not (null value) then [(name,arity,freq,first,last,value)] else [] where name = strip $ takeWhile (/= ',') s s1 = strip $ drop 1 $ dropWhile (/= ',') s arity' = strip $ takeWhile (/=',') s1 arity = if not (null arity') && all isDigit arity' then (read arity' :: Int) else -1 s2 = strip $ drop 1 $ dropWhile (/= ',') s1 freq' = strip $ takeWhile (/=',') s2 freq = if not (null freq') && all isDigit freq' then (read freq' :: Int) else -1 s3 = strip $ drop 1 $ dropWhile (/= ',') s2 first' = strip $ takeWhile (/=',') s3 first = read first' :: UTCTime s4 = strip $ drop 1 $ dropWhile (/= ',') s3 last' = strip $ takeWhile (/=',') s4 last = read last' :: UTCTime value = strip $ drop 1 $ dropWhile (/= ',') s4 parseInput :: String -> [IP'] parseInput "" = [] parseInput s | not (',' `elem` s) = [] parseInput s' = if not (null name) && not (null term1) && not (null term2) -- && (/=0) value then [(name,term1, term2, value)] else [] where name = strip $ takeWhile (/= ',') s' s = strip $ drop 1 $ dropWhile (/= ',') s' (a',b') = break (==',') $ reverse s b = reverse (if not (null b') then tail b' else b') a = reverse a' value = if null a then 0 else if head a == '-' then if all isDigit (tail a) then (read a :: Int) else 0 else if all isDigit a then (read a :: Int) else 0 (x',y') = if (take 1 $ reverse b) == "]" then let (p,q) = break (=='[') $ reverse b in if null q then ([],[]) else (p ++ [head q], tail q) else break (==',') $ reverse b term1 = reverse $ if not (null y') then tail y' else y' term2 = reverse x' -- | Read and parse an agent from a file parseAgent :: FilePath -> IO (Maybe Agent) parseAgent f = do text <- readFileSafe f utf8 let com = takeWhile (\x -> not (isPrefixOf "-}" (strip x))) $ filter (not . null . strip) $ lines text let rest = dropWhile (\x -> not (isPrefixOf "-}" x)) $ filter (not . null) $ map strip $ lines text if null rest then do putStrLn $ "Module not found. Failed to parse file " ++ f ++ "." return Nothing else do let (modLine:restLines) = rest if null restLines then do putStrLn $ "Error: Empty agent file " ++ f ++ "." putStrLn $ " Agent file must define width, depth, solution, filename." return Nothing else do let axiomLines = filter (\x -> findInfixIndex " ->>_" x /= Nothing || findInfixIndex " ->_" x /= Nothing) restLines let conceptLines = filter (\x -> findInfixIndex " ->>_" x == Nothing && findInfixIndex " ->_" x == Nothing) restLines axioms' <- mapM parseEq axiomLines let axioms = nub $ concat axioms' let width = getWidth axioms let depth = getDepth axioms let sol = getSolution axioms let inputs = concatMap parseCInput . map (\(x:xs) -> init xs) . filter (\(x:xs) -> x == '(' && (take 1 $ reverse xs) == ")") . filter (not . null) . map strip $ conceptLines concepts <- (liftM concat) $ mapM parseConcept $ inputs --concepts <- parseConceptsFile cfile if width == 0 || depth == 0 then do putStrLn $ "Error: Width and depth parameters must be greater than zero." return Nothing else do putStrLn $ "Parsed agent " putStrLn $ "Width = " ++ show width putStrLn $ "Depth = " ++ show depth return $ Just $ Agent (unlines com) (width, depth, sol) (axioms,concepts) --return Nothing parseSuccess :: ParseResult HsModule -> Bool parseSuccess (ParseOk m) = True parseSuccess _ = False getWidth :: [Axiom] -> Int getWidth axioms = let r = [x | (DArrow "Param" (HsVar (UnQual (HsIdent "Width"))) (HsLit (HsInt x))) <- axioms] in if null r then 0 else (fromIntegral $ head r) getDepth :: [Axiom] -> Int getDepth axioms = let r = [x | (DArrow "Param" (HsVar (UnQual (HsIdent "Depth"))) (HsLit (HsInt x))) <- axioms] in if null r then 0 else (fromIntegral $ head r) getSolution :: [Axiom] -> Int getSolution axioms = let r = [x | (DArrow "Param" (HsVar (UnQual (HsIdent "Solution"))) (HsLit (HsInt x))) <- axioms] in if null r then 0 else (fromIntegral $ head r) -- merge the two HsDecl, one as lhs and one as rhs mergeHsDecl :: HsDecl -> HsDecl -> (HsExp,HsExp) mergeHsDecl (HsPatBind _ _ (HsUnGuardedRhs (HsApp (HsVar (UnQual name)) pat)) _) (HsPatBind _ _ (HsUnGuardedRhs exp) _) -- = normalDec $ HsFunBind [HsMatch (SrcLoc "" 0 0) name [expToPat pat] (HsUnGuardedRhs exp) []] = ((HsApp (HsVar (UnQual name)) pat), exp) mergeHsDecl (HsPatBind _ _ (HsUnGuardedRhs lhs) _) (HsPatBind _ _ (HsUnGuardedRhs exp) _) -- = normalDec $ HsPatBind (SrcLoc "" 0 0) (expToPat lhs) (HsUnGuardedRhs exp) [] = (lhs, exp) getModule :: ParseResult HsModule -> HsModule getModule (ParseOk m) = m findInfixIndex :: (Eq a) => [a] -> [a] -> Maybe Int findInfixIndex needle haystack = (\x -> if null x then Nothing else Just (fst $ head x)) . dropWhile (\(_,x) -> not $ isPrefixOf needle x) $ zip [0..] (tails haystack) prettyIP :: IP -> String prettyIP (IP name p e v) = name ++ ": " ++ showExp p ++ " = " ++ showExp e ++ ", " ++ show v
abdulrahimnizamani/OccamStar
Parsing.hs
gpl-2.0
14,670
61
38
4,732
5,636
2,915
2,721
-1
-1
{- This source file is a part of the noisefunge programming environment. Copyright (C) 2015 Rev. Johnny Healey <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. -} {-# LANGUAGE TemplateHaskell, FlexibleContexts #-} module Language.NoiseFunge.Befunge (BefungeCommand(..), OperatorParams(..), BefungeThread, PID, BefungeStats, beatIn, commIn, deltaOut, errOut, tick, startBefungeThread) where import Control.Applicative import Control.Concurrent hiding (yield) import Control.Concurrent.STM import Control.Lens import Control.Monad import Control.Monad.State import Control.Monad.Writer import Control.Monad.RWS import Data.Default import qualified Data.Map as M import Language.NoiseFunge.Beat import Language.NoiseFunge.Befunge.VM import Language.NoiseFunge.Befunge.Process import Language.NoiseFunge.Befunge.Operator type BefungeStats = VMStats ProcessStats tick :: Fungine () tick = do q <- use quote fs <- use fnStack let opFn = case (q, fs) of (True, _) -> quoteOp (_, Just _) -> fnStackOp _ -> runOp getOp >>= opFn >> move ticks += 1 yield data BefungeCommand = AddProcess ProgArray String String String (Maybe (MVar PID)) | KillProcess (PID -> Bool) (Maybe String) data BefungeThread = BefungeThread { _beatIn :: TMVar Beat, _commIn :: TChan BefungeCommand, _deltaOut :: TChan (Beat, [(PID, ProcessState, Delta)], [(PID, Maybe String)], [BefungeStats]), _errOut :: TChan String } $(makeLenses ''BefungeThread) data BefungeState = BefungeState { _bfsVM :: FungeVM, _bfsOps :: OpSet, _bfsBeats :: [Beat], _bfsLast :: Beat } $(makeLenses ''BefungeState) startBefungeThread :: Tempo -> OperatorParams -> IO (BefungeThread) startBefungeThread temp params = do bfth <- BefungeThread <$> newEmptyTMVarIO <*> newTChanIO <*> newTChanIO <*> newTChanIO vm <- newVM let bst = BefungeState vm operators (beats temp) def void . forkIO $ void $ runStateT (befungeRunner temp params bfth) bst return bfth flattenDeltas :: Deltas s -> [(PID, s, Delta)] flattenDeltas ds = trip <$> M.toList flat where maps = [M.singleton p (s, d) | (p, s, d) <- ds []] unionm (_, d1) (s, d2) = (s, d1 <> d2) flat = foldr (M.unionWith unionm) M.empty maps trip (a,(b,c)) = (a,b,c) befungeRunner :: Tempo -> OperatorParams -> BefungeThread -> StateT BefungeState IO () befungeRunner temp params bfth = forever $ readIn >>= handle where bin = bfth^.beatIn cin = bfth^.commIn dout = bfth^.deltaOut readIn = liftIO $ atomically $ (Right <$> takeTMVar bin) `orElse` (Left <$> readTChan cin) beat5 = let f = (temp ##) in f . f . f . f . f handle (Right btin) = do nextBeats <- bfsBeats %%= (span (beat5 btin /=)) forM_ nextBeats $ \bt -> do vm <- use bfsVM ops <- use bfsOps let (vm', ops', ds) = runRWS (advance bt vm) params ops dead = [(pid, msg) | (pid, msg, _) <- vm'^.deadProcesses] vmstats = fmap (^.processStats) <$> vm'^.vmStats liftIO . atomically $ writeTChan dout (bt, (flattenDeltas ds), dead, vmstats) bfsVM .= vm' bfsOps .= ops' bfsLast .= btin handle (Left (AddProcess arr name inbuf outbuf mv)) = do let p = befungeProgram arr inbuf outbuf newp <- bfsVM %%= (addProcess name p) let pid = newp^.procID case mv of Nothing -> return () Just mv' -> liftIO $ putMVar mv' pid handle (Left (KillProcess fn r)) = do let r' = ("Killed" ++) <$> (((": "++) <$> r) <|> Just "") fn' p = if fn (p^.procID) then kill r' p else p killall = fmap fn' zoom bfsVM $ do processQueue %= killall zoom (buffers.traverse) $ do readQueue %= killall writeQueue %= killall befungeProgram :: ProgArray -> String -> String -> FungeProgram befungeProgram arr inp out = program ps $ do tellMem forever tick where ps = makeProcessState arr inp out
wolfspyre/noisefunge
src/Language/NoiseFunge/Befunge.hs
gpl-3.0
5,049
3
25
1,523
1,426
755
671
108
5
module Network.Haskoin.Crypto.Base58.Tests (tests) where import Data.String (fromString) import Data.String.Conversions (cs) import Network.Haskoin.Crypto import Network.Haskoin.Test import Test.Framework import Test.Framework.Providers.QuickCheck2 import Test.QuickCheck tests :: [Test] tests = [ testGroup "Address and Base58" [ testProperty "decode58 . encode58 == id" $ forAll arbitraryBS $ \bs -> decodeBase58 (encodeBase58 bs) == Just bs , testProperty "decode58Chk . encode58Chk == id" $ forAll arbitraryBS $ \bs -> decodeBase58Check (encodeBase58Check bs) == Just bs , testProperty "base58ToAddr . addrToBase58 == id" $ forAll arbitraryAddress $ \a -> base58ToAddr (addrToBase58 a) == Just a , testProperty "Read/Show address" $ forAll arbitraryAddress $ \a -> read (show a) == a , testProperty "From string address" $ forAll arbitraryAddress $ \a -> fromString (cs $ addrToBase58 a) == a ] ]
xenog/haskoin
test/bitcoin/Network/Haskoin/Crypto/Base58/Tests.hs
unlicense
1,214
0
14
435
273
144
129
26
1
----------------------------------------------------------------------------- -- | -- Module : Distribution.Simple.CCompiler -- Copyright : 2011, Dan Knapp -- -- Maintainer : [email protected] -- Portability : portable -- -- This simple package provides types and functions for interacting with -- C compilers. Currently it's just a type enumerating extant C-like -- languages, which we call dialects. {- Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Isaac Jones nor the names of other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Distribution.Simple.CCompiler ( CDialect(..), cSourceExtensions, cDialectFilenameExtension, filenameCDialect ) where import Data.Monoid ( Monoid(..) ) import System.FilePath ( takeExtension ) -- | Represents a dialect of C. The Monoid instance expresses backward -- compatibility, in the sense that 'mappend a b' is the least inclusive -- dialect which both 'a' and 'b' can be correctly interpreted as. data CDialect = C | ObjectiveC | CPlusPlus | ObjectiveCPlusPlus deriving (Eq, Show) instance Monoid CDialect where mempty = C mappend C anything = anything mappend ObjectiveC CPlusPlus = ObjectiveCPlusPlus mappend CPlusPlus ObjectiveC = ObjectiveCPlusPlus mappend _ ObjectiveCPlusPlus = ObjectiveCPlusPlus mappend ObjectiveC _ = ObjectiveC mappend CPlusPlus _ = CPlusPlus mappend ObjectiveCPlusPlus _ = ObjectiveCPlusPlus -- | A list of all file extensions which are recognized as possibly containing -- some dialect of C code. Note that this list is only for source files, -- not for header files. cSourceExtensions :: [String] cSourceExtensions = ["c", "i", "ii", "m", "mi", "mm", "M", "mii", "cc", "cp", "cxx", "cpp", "CPP", "c++", "C"] -- | Takes a dialect of C and whether code is intended to be passed through -- the preprocessor, and returns a filename extension for containing that -- code. cDialectFilenameExtension :: CDialect -> Bool -> String cDialectFilenameExtension C True = "c" cDialectFilenameExtension C False = "i" cDialectFilenameExtension ObjectiveC True = "m" cDialectFilenameExtension ObjectiveC False = "mi" cDialectFilenameExtension CPlusPlus True = "cpp" cDialectFilenameExtension CPlusPlus False = "ii" cDialectFilenameExtension ObjectiveCPlusPlus True = "mm" cDialectFilenameExtension ObjectiveCPlusPlus False = "mii" -- | Infers from a filename's extension the dialect of C which it contains, -- and whether it is intended to be passed through the preprocessor. filenameCDialect :: String -> Maybe (CDialect, Bool) filenameCDialect filename = do extension <- case takeExtension filename of '.':ext -> Just ext _ -> Nothing case extension of "c" -> return (C, True) "i" -> return (C, False) "ii" -> return (CPlusPlus, False) "m" -> return (ObjectiveC, True) "mi" -> return (ObjectiveC, False) "mm" -> return (ObjectiveCPlusPlus, True) "M" -> return (ObjectiveCPlusPlus, True) "mii" -> return (ObjectiveCPlusPlus, False) "cc" -> return (CPlusPlus, True) "cp" -> return (CPlusPlus, True) "cxx" -> return (CPlusPlus, True) "cpp" -> return (CPlusPlus, True) "CPP" -> return (CPlusPlus, True) "c++" -> return (CPlusPlus, True) "C" -> return (CPlusPlus, True) _ -> Nothing
plumlife/cabal
Cabal/Distribution/Simple/CCompiler.hs
bsd-3-clause
4,922
0
11
1,130
630
353
277
57
17
{-# LANGUAGE TypeOperators, BangPatterns, ScopedTypeVariables #-} -- --------------------------------------------------------------------------- -- | -- Module : Data.Vector.Algorithms.Intro -- Copyright : (c) 2008-2011 Dan Doel -- Maintainer : Dan Doel <[email protected]> -- Stability : Experimental -- Portability : Non-portable (type operators, bang patterns) -- -- This module implements various algorithms based on the introsort algorithm, -- originally described by David R. Musser in the paper /Introspective Sorting -- and Selection Algorithms/. It is also in widespread practical use, as the -- standard unstable sort used in the C++ Standard Template Library. -- -- Introsort is at its core a quicksort. The version implemented here has the -- following optimizations that make it perform better in practice: -- -- * Small segments of the array are left unsorted until a final insertion -- sort pass. This is faster than recursing all the way down to -- one-element arrays. -- -- * The pivot for segment [l,u) is chosen as the median of the elements at -- l, u-1 and (u+l)/2. This yields good behavior on mostly sorted (or -- reverse-sorted) arrays. -- -- * The algorithm tracks its recursion depth, and if it decides it is -- taking too long (depth greater than 2 * lg n), it switches to a heap -- sort to maintain O(n lg n) worst case behavior. (This is what makes the -- algorithm introsort). {-@ LIQUID "--real" @-} module Data.Vector.Algorithms.Intro ( -- * Sorting sort , sortBy , sortByBounds -- * Selecting , select , selectBy , selectByBounds -- * Partial sorting , partialSort , partialSortBy , partialSortByBounds , Comparison ) where import Prelude hiding (read, length) import Control.Monad import Control.Monad.Primitive import Data.Bits import Data.Vector.Generic.Mutable import Data.Vector.Algorithms.Common (Comparison, shiftRI) import qualified Data.Vector.Algorithms.Insertion as I import qualified Data.Vector.Algorithms.Optimal as O import qualified Data.Vector.Algorithms.Heap as H -- | Sorts an entire array using the default ordering. {-@ sort :: (PrimMonad m, MVector v e, Ord e) => {v: (v (PrimState m) e) | 0 < (vsize v)} -> m () @-} sort :: (PrimMonad m, MVector v e, Ord e) => v (PrimState m) e -> m () sort = sortBy compare {-# INLINABLE sort #-} -- | Sorts an entire array using a custom ordering. {-@ sortBy :: (PrimMonad m, MVector v e) => Comparison e -> {v: (v (PrimState m) e) | 0 < (vsize v)} -> m () @-} sortBy :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> m () sortBy cmp a = sortByBounds cmp a 0 (length a) {-# INLINE sortBy #-} -- | Sorts a portion of an array [l,u) using a custom ordering {-@ sortByBounds :: (PrimMonad m, MVector v e) => Comparison e -> vec:(v (PrimState m) e) -> l:(OkIdx vec) -> u:{v:Nat | (InRngL v l (vsize vec))} -> m () @-} sortByBounds :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> Int -> m () sortByBounds cmp a l u | len < 2 = return () | len == 2 = O.sort2ByOffset cmp a l | len == 3 = O.sort3ByOffset cmp a l | len == 4 = O.sort4ByOffset cmp a l | otherwise = introsort cmp a (ilg len) l u where len = u - l {-# INLINE sortByBounds #-} -- Internal version of the introsort loop which allows partial -- sort functions to call with a specified bound on iterations. {-@ introsort :: (PrimMonad m, MVector v e) => Comparison e -> vec:(v (PrimState m) e) -> Nat -> l:(OkIdx vec) -> u:{v:Nat | (InRng v l (vsize vec))} -> m () @-} introsort :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> Int -> Int -> m () introsort cmp a i l u = sort i l u >> I.sortByBounds cmp a l u where sort 0 l u = H.sortByBounds cmp a l u {- LIQUID WITNESS -} sort (d :: Int) l u | len < threshold = return () | otherwise = do O.sort3ByIndex cmp a c l (u-1) -- sort the median into the lowest position p <- unsafeRead a l mid <- partitionBy cmp a p (l+1) u unsafeSwap a l (mid - 1) sort (d-1) mid u sort (d-1) l (mid - 1) where len = u - l c = (u + l) `shiftRI` 1 -- `div` 2 {-# INLINE introsort #-} -- | Moves the least k elements to the front of the array in -- no particular order. {-@ select :: (PrimMonad m, MVector v e, Ord e) => (NeVec v m e) -> Pos -> m () @-} select :: (PrimMonad m, MVector v e, Ord e) => v (PrimState m) e -> Int -> m () select = selectBy compare {-# INLINE select #-} -- | Moves the least k elements (as defined by the comparison) to -- the front of the array in no particular order. {-@ selectBy :: (PrimMonad m, MVector v e) => (Comparison e) -> (NeVec v m e) -> Pos -> m () @-} selectBy :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> m () selectBy cmp a k = selectByBounds cmp a k 0 (length a) {-# INLINE selectBy #-} -- | Moves the least k elements in the interval [l,u) to the positions -- [l,k+l) in no particular order. {-@ selectByBounds :: (PrimMonad m, MVector v e) => Comparison e -> vec:(NeVec v m e) -> Pos -> l:(OkIdx vec) -> u:{v:Nat | (InRngL v l (vsize vec))} -> m () @-} selectByBounds :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> Int -> Int -> m () selectByBounds cmp a k l u | m >= u = return () -- LIQUID: changed, possible bugfix! (or did I add a bug?) | otherwise = go (ilg len) l u where len = u - l m = l + k {- LIQUID WITNESS -} go (0 :: Int) l u = H.selectByBounds cmp a k l u go n l u = do O.sort3ByIndex cmp a c l (u-1) p <- unsafeRead a l mid <- partitionBy cmp a p (l+1) u unsafeSwap a l (mid - 1) if m > mid then go (n-1) mid u else if m < mid - 1 then go (n-1) l (mid - 1) else return () where c = (u + l) `shiftRI` 1 -- `div` 2 {-# INLINE selectByBounds #-} -- | Moves the least k elements to the front of the array, sorted. {-@ partialSort :: (PrimMonad m, MVector v e, Ord e) => vec:(NeVec v m e) -> Pos -> m () @-} partialSort :: (PrimMonad m, MVector v e, Ord e) => v (PrimState m) e -> Int -> m () partialSort = partialSortBy compare {-# INLINE partialSort #-} -- | Moves the least k elements (as defined by the comparison) to -- the front of the array, sorted. {-@ partialSortBy :: (PrimMonad m, MVector v e) => (Comparison e) -> vec:(NeVec v m e) -> Pos -> m () @-} partialSortBy :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> m () partialSortBy cmp a k = partialSortByBounds cmp a k 0 (length a) {-# INLINE partialSortBy #-} -- | Moves the least k elements in the interval [l,u) to the positions -- [l,k+l), sorted. {-@ partialSortByBounds :: (PrimMonad m, MVector v e) => Comparison e -> vec:(NeVec v m e) -> k:Pos -> l:(OkIdx vec) -> u:{v:Nat | (InRngL v l (vsize vec))} -> m () @-} partialSortByBounds :: (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> Int -> Int -> Int -> m () partialSortByBounds cmp a k l u | m0 >= u = return () -- LIQUID: changed, possible bugfix! (or did I add a bug?) | otherwise = go (ilg len) l u m0 where isort = introsort cmp a {-# INLINE [1] isort #-} len = u - l m0 = l + k {-@ Decrease go 1 3 @-} go 0 l n _ = H.partialSortByBounds cmp a k l u go n l u (m :: Int) | l == m = return () | otherwise = do O.sort3ByIndex cmp a c l (u-1) p <- unsafeRead a l mid <- partitionBy cmp a p (l+1) u unsafeSwap a l (mid - 1) case compare m mid of GT -> do isort (n-1) l (mid - 1) go (n-1) mid u m EQ -> isort (n-1) l m LT -> go n l (mid - 1) m where c = (u + l) `shiftRI` 1 -- `div` 2 {-# INLINE partialSortByBounds #-} {-@ partitionBy :: forall m v e. (PrimMonad m, MVector v e) => Comparison e -> vec:(v (PrimState m) e) -> e -> l:(OkIdx vec) -> u:{v:Nat | (InRng v l (vsize vec))} -> m {v:Int | (InRng v l u)} @-} partitionBy :: forall m v e. (PrimMonad m, MVector v e) => Comparison e -> v (PrimState m) e -> e -> Int -> Int -> m Int partitionBy cmp a p l u = partUp p l u (u-l) where -- 6.10 panics without the signatures for partUp and partDown, 6.12 and later -- versions don't need them {-@ Decrease partUp 4 @-} {-@ Decrease partDown 4 @-} partUp :: e -> Int -> Int -> Int -> m Int partUp p l u _ | l < u = do e <- unsafeRead a l case cmp e p of LT -> partUp p (l+1) u (u-l-1) _ -> partDown p l (u-1) (u-l-1) | otherwise = return l partDown :: e -> Int -> Int -> Int -> m Int partDown p l u _ | l < u = do e <- unsafeRead a u case cmp p e of LT -> partDown p l (u-1) (u-l-1) _ -> unsafeSwap a l u >> partUp p (l+1) u (u-l-1) | otherwise = return l {-# INLINE partitionBy #-} -- computes the number of recursive calls after which heapsort should -- be invoked given the lower and upper indices of the array to be sorted {-@ ilg :: Pos -> Nat @-} ilg :: Int -> Int ilg m = 2 * loop m 0 where {-@ loop :: n:Nat -> {v:Nat | ((n = 0) => (v > 0))} -> Nat @-} loop :: Int -> Int -> Int loop 0 !k = k - 1 loop n !k = loop (n `shiftRI` 1) (k+1) -- the size of array at which the introsort algorithm switches to insertion sort threshold :: Int threshold = 18 {-# INLINE threshold #-}
abakst/liquidhaskell
benchmarks/vector-algorithms-0.5.4.2/Data/Vector/Algorithms/Intro.hs
bsd-3-clause
10,160
0
17
3,142
2,401
1,247
1,154
134
4
{-# OPTIONS_GHC -fno-implicit-prelude #-} ----------------------------------------------------------------------------- -- | -- Module : Data.Bool -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- The 'Bool' type and related functions. -- ----------------------------------------------------------------------------- module Data.Bool ( -- * Booleans Bool(..), -- ** Operations (&&), -- :: Bool -> Bool -> Bool (||), -- :: Bool -> Bool -> Bool not, -- :: Bool -> Bool otherwise, -- :: Bool ) where #ifdef __GLASGOW_HASKELL__ import GHC.Base #endif #ifdef __NHC__ import Prelude import Prelude ( Bool(..) , (&&) , (||) , not , otherwise ) #endif
alekar/hugs
packages/base/Data/Bool.hs
bsd-3-clause
867
0
6
173
92
72
20
7
0
module GuardsIn3 where format :: [a] -> [[a]] -> [[a]] format sep xs = if len xs < 2 then xs else (head xs ++ sep) : format sep (tail xs) {- fmt = format "\n" -} fmt | len "blll" < 2 = (\xs -> xs) | otherwise = (\xs -> ((head xs) ++ "\n") : (format "\n" (tail xs))) len x = length x {- map2 xs = map len xs -} map2 xs = (case (len, xs) of (f, []) -> [] (f, (x : xs)) -> (f x) : (map f xs)) fac45 = fac 45 fac 1 = 1 fac n = n * fac (n-1)
kmate/HaRe
old/testing/generativeFold/GuardsIn3.hs
bsd-3-clause
499
0
12
173
281
150
131
16
2
{-# OPTIONS_GHC -cpp #-} {-# OPTIONS -fglasgow-exts #-} {-# LANGUAGE MultiParamTypeClasses, OverlappingInstances, UndecidableInstances, FunctionalDependencies, NoMonomorphismRestriction #-} module ExceptM (module ExceptM, HasExcept(..)) where import MT import Control_Monad_Fix import Control.Monad (liftM) {- use newtype to avoid conflicts with class instances! -} newtype ExceptM l r = ExceptM {removeExcept :: Either l r} {- not used removeExcept :: ExceptM x a -> Either x a removeExcept = id mapEither f g = either (Left . f) (Right . g) seqEither x = either (fmap Left) (fmap Right) x fromEither f (Right x)= x fromEither f (Left x) = f x unLeft (Left x) = x unLeft _ = error "unLeft" -} unRight (ExceptM (Right x)) = x unRight _ = error "unRight" instance Functor (ExceptM x) where fmap = liftM instance Monad (ExceptM x) where return = ExceptM . Right ExceptM (Right x) >>= f = f x ExceptM (Left x) >>= f = ExceptM (Left x) ExceptM (Right _) >> m = m ExceptM (Left x) >> m = ExceptM (Left x) instance HasExcept (ExceptM x) x where raise = ExceptM . Left handle h (ExceptM (Left x)) = h x handle h (ExceptM (Right x)) = ExceptM (Right x) instance MonadFix (ExceptM x) where mfix f = let a = f (unRight a) in a instance HasBaseMonad (ExceptM x) (ExceptM x) where inBase = id
kmate/HaRe
old/tools/base/lib/Monads/ExceptM.hs
bsd-3-clause
1,488
0
12
431
382
196
186
26
1
{-# LANGUAGE TypeFamilies, ConstraintKinds, PatternSynonyms, RankNTypes #-} module T13752 where newtype Arrange = Arrange {getArrange :: [Int] -> [Int]} pattern Heh :: (c ~ ((~) Int)) => (forall a. c a => [a] -> [a]) -> Arrange -- pattern Heh :: (forall a. (Int ~ a) => [a] -> [a]) -> Arrange pattern Heh f <- Arrange f
shlevy/ghc
testsuite/tests/patsyn/should_compile/T13752.hs
bsd-3-clause
323
0
12
61
98
57
41
-1
-1
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving, NoImplicitPrelude #-} module GHC.Event.Unique ( UniqueSource , Unique(..) , newSource , newUnique ) where import Data.Int (Int64) import GHC.Base import GHC.Conc.Sync (TVar, atomically, newTVarIO, readTVar, writeTVar) import GHC.Num (Num(..)) import GHC.Show (Show(..)) -- We used to use IORefs here, but Simon switched us to STM when we -- found that our use of atomicModifyIORef was subject to a severe RTS -- performance problem when used in a tight loop from multiple -- threads: http://hackage.haskell.org/trac/ghc/ticket/3838 -- -- There seems to be no performance cost to using a TVar instead. newtype UniqueSource = US (TVar Int64) newtype Unique = Unique { asInt64 :: Int64 } deriving (Eq, Ord, Num) instance Show Unique where show = show . asInt64 newSource :: IO UniqueSource newSource = US `fmap` newTVarIO 0 newUnique :: UniqueSource -> IO Unique newUnique (US ref) = atomically $ do u <- readTVar ref let !u' = u+1 writeTVar ref u' return $ Unique u' {-# INLINE newUnique #-}
beni55/haste-compiler
libraries/ghc-7.8/base/GHC/Event/Unique.hs
bsd-3-clause
1,124
0
11
216
254
146
108
27
1
---------------------------------------------------------------- -- -- | aartifact -- http://www.aartifact.org/ -- -- Contributors to this module: -- David House -- Andrei Lapets -- -- @src\/MapUsingRBTree.hs@ -- -- Red-black tree implementation of data structure for -- no-removal finite maps. -- ---------------------------------------------------------------- -- module MapUsingRBTree where --(Map, emp, app, ran, preImg, set, domSize,ranSize, def, mapRan, appInDom, inDom) import Set ((\/)) ---------------------------------------------------------------- -- | Implementation. type Map a b = Tree a b -- Define the red-black tree. data Color = R | Bl deriving Show data Tree a b = NULL | Bran Color (Tree a b) (a,b) (Tree a b) deriving Show emp :: Tree a b emp = NULL domSize :: Tree a b -> Integer domSize t = sz t where sz (Bran _ t _ t') = 1 + sz t + sz t' sz NULL = 0 ranSize :: Eq b => Tree a b -> Integer ranSize t = toInteger $ length $ u t where u (Bran _ t (_,y) t') = [y] \/ u t \/ u t' u NULL = [] inDom :: (Eq a, Ord a) => Tree a b -> a -> Bool inDom t x = isIndex x t where isIndex e NULL = False isIndex e (Bran _ a (e1,_) b) | e < e1 = isIndex e a | e == e1 = True | e > e1 = isIndex e b appInDom :: (Eq a, Ord a) => a -> Tree a b -> b appInDom x (Bran _ t (x',y) t') | x < x' = appInDom x t | x == x' = y | x > x' = appInDom x t' app :: (Eq a, Ord a) => a -> Tree a b -> Maybe b app x t = lookup x (getIndex' x t) where -- findIndexB helper function getIndex' :: Ord a => a -> Tree a b -> [(a,b)] getIndex' e NULL = [] getIndex' e (Bran _ a (e1,i) b) | e < e1 = getIndex' e a | e == e1 = [(e,i)] | e > e1 = getIndex' e b ran :: (Eq a, Ord a) => Tree a b -> [b] ran t = acc [] t where acc l NULL = l acc l (Bran _ t (_,y) t') = y:acc (acc l t) t' mapRan :: (Eq a, Ord a) => (a -> b -> b) -> Tree a b -> Tree a b mapRan f t = mp t where mp (Bran c t (x,y) t') = Bran c (mp t) (x,f x y) (mp t') mp _ = NULL preImg :: (Eq a, Eq b, Ord a) => b -> Tree a b -> [a] preImg y t = acc [] t where acc l NULL = l acc l (Bran _ t (x,y') t') = if y==y' then x:acc (acc l t) t' else acc (acc l t) t' list :: (Eq a, Eq b, Ord a) => Tree a b -> [(a,b)] list t = case t of NULL -> [] Bran _ t1 xy t2 -> xy:(list t1 ++ list t2) set :: (Eq a, Ord a) => a -> b -> Map a b -> Map a b set x y m = def x y (\_ _->y) m -- Balanced by a-inequalities. def :: (Eq a, Ord a) => a -> b -> (a -> b -> b) -> Tree a b -> Tree a b def e i f s = blackify (ins s) where ins NULL = Bran R NULL (e,i) NULL ins (Bran color a (e1,y) b) | e < e1 = bal color (ins a) (e1,y) b | e == e1 = Bran color a (e1,f e1 y) b | e > e1 = bal color a (e1,y) (ins b) blackify(Bran _ a (e2,y) b) = Bran Bl a (e2,y) b -- Balancing function. bal Bl (Bran R (Bran R a x b) y c) z d = Bran R (Bran Bl a x b) y (Bran Bl c z d) bal Bl (Bran R a x (Bran R b y c)) z d = Bran R (Bran Bl a x b) y (Bran Bl c z d) bal Bl a x (Bran R (Bran R b y c) z d) = Bran R (Bran Bl a x b) y (Bran Bl c z d) bal Bl a x (Bran R b y (Bran R c z d)) = Bran R (Bran Bl a x b) y (Bran Bl c z d) bal color a x b = Bran color a x b --eof
aartifact/aartifact-verifier
src/MapUsingRBTree.hs
mit
3,249
0
11
944
1,840
941
899
67
6
{-# LANGUAGE OverloadedStrings, TypeFamilies, RankNTypes, DataKinds, TypeOperators, FlexibleContexts #-} module Session where import Web.Spock import Control.Monad.Trans import Config import Data.HVect (HVect(..), ListContains(..)) import Data.IORef import Data.Monoid import Data.Text (Text) import qualified Data.Text as T import Data.Text.Encoding(decodeUtf8) import Database.Persist.Postgresql (SqlBackend) import Network.Wai(rawPathInfo) import Routes import Utils.Password (PasswordHash, Password(..)) import qualified Utils.Password as Pwd type SiteApp = SpockM SqlBackend SiteSession SiteState () type SiteAction ctx a = SpockActionCtx ctx SqlBackend SiteSession SiteState a type SiteAdminAction a = forall n xs . ListContains n IsAdmin xs => SiteAction (HVect xs) a newtype SiteSession = SiteSession { logon :: LogonStatus } data SiteState = SiteState { appConfig :: AppConfig } data LogonStatus = Guest | Admin deriving (Eq, Show) emptySession :: SiteSession emptySession = SiteSession Guest adminLogon :: Maybe RedirectTo -> PasswordHash -> Password -> SiteAction ctx () adminLogon redirectTo hash pwd = if Pwd.verifyPassword hash pwd then do modifySession $ \ session -> session { logon = Admin } redirect redTo else do modifySession $ \ session -> session { logon = Guest } redirect $ renderRoute loginR where redTo = case redirectTo of (Just (RedirectTo url)) -> url Nothing -> "/" logout :: SiteAction ctx () logout = do modifySession $ \ session -> session { logon = Guest } redirect "/" data IsAdmin = IsAdmin adminHook :: m ~ WebStateM con SiteSession st => ActionCtxT (HVect xs) m (HVect (IsAdmin ': xs)) adminHook = do url <- decodeUtf8 . rawPathInfo <$> request oldCtx <- getContext sess <- readSession case logon sess of Guest -> redirect $ renderRoute loginR `T.append` "?redirect=" `T.append` url Admin -> return (IsAdmin :&: oldCtx)
CarstenKoenig/MyWebSite
src/Session.hs
mit
1,977
0
13
386
594
327
267
56
3
----------------------------------------------------------------------------- -- Copyright : (c) Hanzhong Xu, Meng Meng 2016, -- License : MIT License -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable ----------------------------------------------------------------------------- {-# LANGUAGE Arrows #-} module Main where import Control.Arrow import Control.Monad import Data.List import Data.SF.SFM import Data.SF.IOSFM data Op = Stop | Up | Down | Time deriving Eq data EleEvt = EvStop | EvUp | EvDown | PushBottom Int deriving Eq type Elevator = SFM IO Op (Maybe EleEvt) newElevator :: Int -> IO Elevator newElevator n = simpleSFM f (n,1,Stop) where f :: (Int, Int, Op) -> Op -> IO ((Int, Int, Op), Maybe EleEvt) f (n,x,s) op = do if op == Stop then return ((n,x,Stop), Just EvStop) else if op == Up then return ((n,x,Up), Nothing) else if op == Down then return ((n,x,Down), Nothing) else if s == Up then if x+1 <= n then return ((n,x+1,Up),Just EvUp) else return ((n,n,Stop),Just EvStop) else if s == Down then if x-1 >= 1 then return ((n,x-1,Down),Just EvDown) else return ((n,1,Stop),Just EvStop) else return ((n,x,s), Nothing) type Scheduler = SFM IO EleEvt (Maybe Op) pushBottom :: Int -> Int -> [Int] -> [Int] pushBottom n x xs = if x > 0 then (take (x - 1) xs) ++ [1] ++ (drop x xs) else (take (n - x - 1) xs) ++ [1] ++ (drop (n - x) xs) stop :: Op -> Int -> Int -> [Int] -> [Int] stop op n x xs = if op == Up then (take (x - 1) xs) ++ [0] ++ (drop x xs) else (take (n + x - 1) xs) ++ [0] ++ (drop (n + x) xs) check :: Op -> Int -> Int -> [Int] -> Op check Up n x xs = if xs!!(x-1) == 0 then Up else Stop check Down n x xs = if xs!!(n+x-1) == 0 then Down else Stop check Stop n x xs = scheduler x xs scheduler :: Int -> [Int] -> Op scheduler x xs = case findIndex (==1) xs of Just y -> if y < x then Down else if y > x then Up else Stop Nothing -> Stop newScheduler :: Int -> IO Scheduler newScheduler n = simpleSFM f (n,1,Stop,replicate (2*n) 0) where f :: (Int, Int, Op, [Int]) -> EleEvt -> IO ((Int, Int, Op, [Int]), Maybe Op) f (n,x,op,xs) e = do if e == EvStop then do putStrLn $ "stop at " ++ show x let op' = check op n x xs in return ((n,x,op',stop op n x xs), Just op') else if e == EvUp then do putStrLn $ "going up " ++ show x let op' = check op n x xs in return ((n,x,op',xs), Just op') else if e == EvDown then do putStrLn $ "going down " ++ show x let op' = check op n x xs in return ((n,x,op',xs), Just op') else case e of PushBottom t -> do { putStrLn $ "push bottom: " ++ (if t > 0 then "Up " ++ show x else "Down " ++ show (-x)) ; let xs' = pushBottom n t xs; op' = check op n x xs' in return ((n,x,op',xs'), Just op') } -- timer :: Double -> a -> IO [a] -- timer = simpleSrcM main = do return ()
PseudoPower/AFSM
examples/SFM/Elevator.hs
mit
3,125
0
23
917
1,430
785
645
67
8
{-# LANGUAGE ExistentialQuantification #-} module SiteArchiver.DataStore(DataStore, createDataStore, disconnectDataStore, saveResponse, createJob, loadResponse, getStartUrlForJob, getSite, allSites, insertSite, jobsForSite) where import Control.Monad import Database.HDBC import Database.HDBC.Sqlite3 import SiteArchiver.Types import Data.Time.Clock.POSIX import Data.Maybe import Network.URI import Data.ByteString.Char8(unpack, readInt) import qualified Data.ByteString as BS import qualified Crypto.Hash.MD5 as MD5 data DataStore = forall conn. IConnection conn => DataStore conn createDataStore :: FilePath -> IO DataStore createDataStore dbPath = do conn <- connectSqlite3 dbPath createTables conn return $ DataStore conn disconnectDataStore :: DataStore -> IO () disconnectDataStore (DataStore conn) = disconnect conn createTables :: (IConnection c) => c -> IO () createTables conn = do mapM_ (runRaw conn) [createSiteSql, createArchiveJobSql, createResponseSql, createResponseBodySql] commit conn getStartUrlForJob :: DataStore -> ArchiveJobId -> IO (Maybe String) getStartUrlForJob (DataStore conn) jobId = do startUrlResponse <- quickQuery' conn "SELECT StartUrl FROM Site s JOIN ArchiveJob b ON s.Id = b.SiteId WHERE b.Id = ?" [SqlInt32 $ fromIntegral jobId] return $ case startUrlResponse of [[SqlByteString s]] -> Just (unpack s) _ -> Nothing getSite :: DataStore -> Int -> IO (Maybe Site) getSite ds siteId = fmap (listToMaybe . filter matchesId) (allSites ds) where matchesId (Site s _ _) = s == siteId allSites :: DataStore -> IO [Site] allSites (DataStore conn) = do sites <- quickQuery' conn "SELECT Id, Name, StartUrl FROM Site" [] return $ map parseSite sites where parseSite [siteId, SqlByteString name, SqlByteString url] = Site (fromSql siteId) (unpack name) (fromJust $ parseAbsoluteURI $ unpack url) insertSite :: DataStore -> String -> URI -> IO () insertSite (DataStore conn) name startUrl = run conn "INSERT INTO Site (Name, StartUrl) VALUES (?, ?)" [SqlString name, SqlString $ show startUrl] >> commit conn jobsForSite :: DataStore -> Int -> IO [ArchiveJob] jobsForSite (DataStore conn) siteId = do jobs <- quickQuery' conn "SELECT Id, Time FROM ArchiveJob WHERE SiteId = ?" [SqlInt32 $ fromIntegral siteId] return $ map parseJob jobs where parseJob [idValue, timeValue] = ArchiveJob (fromSql idValue) (fromSql timeValue) siteId saveResponse :: DataStore -> SavedResponse -> ArchiveJobId -> POSIXTime -> URI -> IO () saveResponse (DataStore conn) (SavedResponse code reason headers body) jobId time url = do let hash = MD5.hash body existingBody <- getSavedBody conn hash -- Save body if it is not already saved case existingBody of Nothing -> void $ run conn "INSERT INTO ResponseBody (Hash, Body) VALUES (?, ?)" [SqlByteString hash, SqlByteString body] Just _ -> return () --save Response void $ run conn "INSERT INTO Response (Url, Time, Headers, Reason, Code, BodyHash, ArchiveJobId) VALUES (?,?,?,?,?,?,?)" [SqlString $ show url, SqlPOSIXTime time, SqlString $ serialiseHeaders headers, SqlString reason, SqlInt32 $ fromIntegral code, SqlByteString hash, SqlInt32 $ fromIntegral jobId] commit conn loadResponse :: DataStore -> URI -> ArchiveJobId -> IO (Maybe SavedResponse) loadResponse (DataStore conn) url jobId = do responseData <- quickQuery' conn "SELECT Headers, Reason, Code, Body FROM Response r JOIN ResponseBody b on r.BodyHash = b.Hash WHERE ArchiveJobId = ? AND Url = ?" [SqlInt32 $ fromIntegral jobId, SqlString $ show url] return $ case responseData of [[SqlByteString h, SqlByteString r, SqlByteString c, SqlByteString b]] -> parseSqlResponse h r c b _ -> Nothing where parseSqlResponse h r c b = do code <- fmap fst (readInt c) let headers = parseHeaders (unpack h) let reason = unpack r Just $ SavedResponse code reason headers b serialiseHeaders :: [(String, String)] -> String serialiseHeaders = unlines . map (\(k, v) -> k ++ ':':v) parseHeaders :: String -> [(String, String)] parseHeaders = map (splitAtElement ':') . lines -- splitAtElement 4 [2,3,4,5,6,4,3] = ([2,3], [5,6,4,3]) splitAtElement :: (Eq a) => a -> [a] -> ([a], [a]) splitAtElement _ [] = ([], []) splitAtElement m (x:xs) | x == m = ([],xs) | otherwise = let (xs', xs'') = splitAtElement m xs in (x:xs', xs'') createJob :: DataStore -> POSIXTime -> Int -> IO Int createJob (DataStore conn) time siteId = do void $ run conn "INSERT INTO ArchiveJob (Time, SiteId) VALUES (?,?)" [SqlPOSIXTime time, SqlInt32 $ fromIntegral siteId] [[SqlInt64 jobId]] <- quickQuery' conn "SELECT last_insert_rowid()" [] commit conn return $ fromIntegral jobId getSavedBody :: IConnection a => a -> BS.ByteString -> IO (Maybe BS.ByteString) getSavedBody conn hash = do existingBody <- quickQuery' conn "SELECT Body FROM ResponseBody WHERE Hash = ?" [SqlByteString hash] case existingBody of [[SqlByteString body]] -> return $ Just body _ -> return Nothing -- TABLE CREATION createSiteSql :: String createSiteSql = "CREATE TABLE IF NOT EXISTS Site( \ \Id INTEGER,\ \Name TEXT,\ \StartUrl TEXT,\ \PRIMARY KEY(Id)\ \)" createArchiveJobSql :: String createArchiveJobSql = "CREATE TABLE IF NOT EXISTS ArchiveJob( \ \Id INTEGER PRIMARY KEY,\ \Time INTEGER,\ \SiteId INTEGER,\ \FOREIGN KEY(SiteId) REFERENCES Site(Id)\ \)" createResponseSql :: String createResponseSql = "CREATE TABLE IF NOT EXISTS Response( \ \Url TEXT,\ \Time INTEGER,\ \Headers TEXT,\ \Reason TEXT,\ \Code TEXT,\ \BodyHash BLOB,\ \ArchiveJobId INTEGER,\ \FOREIGN KEY(ArchiveJobId) REFERENCES ArchiveJob(Id)\ \FOREIGN KEY(BodyHash) REFERENCES ResponseBody(Id)\ \)" createResponseBodySql :: String createResponseBodySql = "CREATE TABLE IF NOT EXISTS ResponseBody( \ \Hash BLOB,\ \Body BLOB,\ \PRIMARY KEY(Hash)\ \)"
iansullivan88/site-archiver
src/SiteArchiver/DataStore.hs
mit
5,958
0
14
1,093
1,656
838
818
96
2
{-# LANGUAGE OverloadedStrings #-} module Y2018.M07.D09.Solution where import Data.Aeson import Data.Aeson.Encode.Pretty (encodePretty) import Data.ByteString.Lazy.Char8 (ByteString) import qualified Data.ByteString.Lazy.Char8 as BL import Data.Map (Map) import Data.Maybe (fromJust) -- Today we look at some JSON: exDir, mondoJSON :: FilePath exDir = "Y2018/M07/D09/" mondoJSON = "vp_new_280k_july5.json" {-- The structure of the JSON is ... Actually, I don't know what the structure of this JSON is. It's big, it's all in one line, and it's not what the structure is reported to be. What it's reported to be is this: * vp280kcomplete_ALL : {'Totals': blah_dict , 'Per_Article': glah_dict} * vp280kcomplete_ALL['Totals'] --> {all extracted entities and the document uid in which they are found along with the individual document frequencies} * vp280kcomplete_ALL['Totals']['entity_blah']['Total'] --> inverse doc frequency of entity = 'entity_blah' * vp280kcomplete_ALL['Totals']['entity_blah']['Article_Freq'] --> articles the entity occurs in and the frequency in given document * vp280kcomplete_ALL['Per_Article'] --> key into particular uid and the value is another dictionary of the entities in that article and the document frequency But you see from the first few characters: {"Total": {"": {"Total": 15076828, "Articles_Freq": {"269066": 1150, ... That the reported structure and the actual structure do not match. Today's Haskell problem. Prettify the above JSON input so we can start to see what the actual structure of the JSON is. --} toMappage :: ByteString -> Map String Value toMappage = fromJust . decode prettify :: FilePath -> IO () prettify file = BL.readFile file >>= putStrLn . take 300 . BL.unpack . encodePretty . toMappage {-- >>> prettify (exDir ++ mondoJSON) { "Per_Article": { "270382": { "": 1648 }, "275206": { "": 580 }, "263193": { "": 1140 }, "271392": { "": 1734 }, "268091": { "": 1752 }, "262673": --} {-- BONUS ----------------------------------------------------------------- So, we know some of the actual structure from the first few characters: "Total" -> "" -> { ("Total", int), ("Articles_Freq" -> {(String,int)}) } Is there more structures in the JSON? What is it? --} structures :: FilePath -> IO () structures json = undefined -- output your results as a hierarchy of structure {-- So answering prettify answered structures, we have the "Total" structure (by inspecting the raw JSON), and now we have one more key-value pairing: "Per_Article" -> Map String (Map String Integer) Verified with: >>> json <- BL.readFile (exDir ++ mondoJSON) >>> mapo = toMappage json >>> length mapo 2 >>> Map.keys mapo ["Per_Article","Total"] TA-DAH! --}
geophf/1HaskellADay
exercises/HAD/Y2018/M07/D09/Solution.hs
mit
2,880
0
11
592
188
111
77
18
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} module Main where import Control.Concurrent import Control.Exception (try) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as BL import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.Read as T import Network.Wai (Application) import Network.Wai.Application.Static import Network.Wai.Handler.Warp (run) import Network.Wai.Handler.WebSockets import Network.WebSockets import Static (mkEmbedded) import System.Environment (getArgs) import System.IO.Error import System.Posix.Daemonize (daemonize) import System.Posix.Pty import System.Process (ProcessHandle, terminateProcess, waitForProcess) import WaiAppStatic.Storage.Embedded import WaiAppStatic.Types main :: IO () main = getArgs >>= parse where parse :: [String] -> IO () parse ["-h"] = usage parse ["-v"] = version parse [port] = case parseTextNum $ T.pack port of Just p -> do putStrLn $ "hterm started @" ++ port putStrLn "daemonizing..." daemonize $ hterm p Nothing -> usage parse _ = usage usage = putStrLn "Usage: hterm [-vh] port" version = putStrLn "hterm 0.4.0" hterm :: Int -> IO () hterm port = run port $ websocketsOr defaultConnectionOptions socketServerApp staticServerApp staticServerApp :: Application staticServerApp = staticApp settingsWithIndex settingsWithIndex :: StaticSettings settingsWithIndex = settings { ssLookupFile = indexLookUp $ ssLookupFile settings } where settings = $(mkSettings mkEmbedded) indexLookUp :: (Pieces -> IO LookupResult) -> Pieces -> IO LookupResult indexLookUp lookup p = case p of [] -> lookup [unsafeToPiece "index.html"] p' -> lookup p' initPty :: IO (Pty, ProcessHandle) initPty = do (pty, hd) <- spawnWithPty (Just env) True "login" [] (100, 10) attrs <- getTerminalAttributes pty setTerminalAttributes pty (setCCs attrs) Immediately return (pty, hd) where setCCs = withCCs [ (Erase, '\DEL') , (Kill , '\NAK') ] withCCs ccs tty = foldl withCC tty ccs env = [("TERM", "xterm"), ("LC_ALL", "C")] socketServerApp :: PendingConnection -> IO () socketServerApp pc = do c <- acceptRequest pc forkPingThread c 30 (pty, hd) <- initPty pid <- forkIO $ respondToWs c (pty, hd) readFromWS c (pty, hd) pid where readFromWS :: Connection -> (Pty, ProcessHandle) -> ThreadId -> IO () readFromWS c (pty, hd) pid = do msg <- try $ receiveDataMessage c :: IO (Either ConnectionException DataMessage) case msg of Right (Text m) -> sendToPty pty m >> readFromWS c (pty, hd) pid _ -> writePty pty $ BS.singleton '\ETB' respondToWs :: Connection -> (Pty, ProcessHandle) -> IO () respondToWs c (pty, hd) = do res <- tryIOError $ readPty pty case res of Left _ -> cleanUp hd Right res' -> sendByteString c res' >> respondToWs c (pty, hd) where sendByteString c bs = catchIOError (send c . DataMessage . Text $ BL.fromStrict bs) $ \_ -> cleanUp hd cleanUp :: ProcessHandle -> IO () cleanUp hd = terminateProcess hd >> waitForProcess hd >> return () sendToPty :: Pty -> BL.ByteString -> IO () sendToPty pty input = do let input' = T.decodeUtf8 $ BL.toStrict input let first = T.head input' case first of 'R' -> case parsePtySize $ T.tail input' of Just size -> resizePty pty size Nothing -> return () 'S' -> writePty pty $ BL.toStrict $ BL.tail input _ -> return () parsePtySize :: T.Text -> Maybe (Int, Int) parsePtySize t = case map parseTextNum (T.splitOn "," t) of [Just w, Just h] -> Just (w, h) _ -> Nothing parseTextNum :: T.Text -> Maybe Int parseTextNum x = case T.decimal x of Right (x', _) -> Just x' _ -> Nothing
bitemyapp/hterm
Main.hs
mit
4,512
0
16
1,499
1,348
699
649
104
7
import qualified Data.Matrix as Matrix import qualified Data.Map as Map import Data.Maybe relation_mat = Matrix.fromList 2 2 [4,1,1,0] init_mat = Matrix.fromList 2 1 [8,2] ------------------------------------------------------- -- compute_vector calculates the calculates a vector, -- given a relation matrix 'x' by multiplying it with -- 'init_mat' [8,2]. This function is to compute nth -- element of the even fibonacci series. ------------------------------------------------------- compute_vector :: Matrix.Matrix Integer -> [Integer] compute_vector x = Matrix.toList(x * init_mat) -------------------------------------------------------------------- -- exponentiate_until: -- Input: -- -> limit Integer -- -> pow Integer -- -> y Matrix.Matrix Integer -- -> z Map Integer (Matrix Integer) -- y is the last computed relation matrix -- z is a dictionary in which all the computed powers of initial -- matrix are stored and accessed from. pow shows to which power -- we have exponentiated. It is used as key in z. Value in a pair in -- z is a matrix of Integer(s), which is computed by exponentiating, -- with the previous matrix 'y' from the dictionary. -- Output: -- It recursively exponentiates the intial matrix until a value in -- the series is reached which: fn > limit -------------------------------------------------------------------- exponentiate_until :: Integer -> Integer -> Matrix.Matrix Integer -> Map.Map (Integer) (Matrix.Matrix Integer) -> Map.Map (Integer) (Matrix.Matrix Integer) exponentiate_until limit pow y z | fn > limit = z3 | otherwise = exponentiate_until limit p2 y2 z2 where y2 = y*y fn = last(compute_vector(y2)) p2 = 2*pow z2 = Map.insert pow y z z3 = Map.insert p2 y2 z2 -------------------------------------------------------------------- -------------------------------------------------------------------- -- binary_search gets the value for which -- >> f(n) < limit && f(n+1) >= limit -- by searching between two powers provided in array 'pow'. -- It also computes new powers of matrices between exponentiated -- powers in the 'dict'. -------------------------------------------------------------------- binary_search :: [Integer] -> Map.Map (Integer) (Matrix.Matrix Integer) -> Integer -> [Integer] binary_search pow dict limit | (last f_n < limit && head f_n >= limit) = f_n | (last f_nhalf < limit) = binary_search [p+p_half, p2] new_dict limit | otherwise = binary_search [p, p+p_half] new_dict limit where p = head pow p2 = last pow m_n = snd $ Map.elemAt (fromJust $ Map.lookupIndex p dict) dict f_2n = compute_vector $ snd $ Map.elemAt (fromJust $ Map.lookupIndex p2 dict) dict f_n = compute_vector $ m_n p_half = toInteger $ ceiling $ fromIntegral (p2-p)/2 m_nhalf = m_n * (snd $ Map.elemAt (fromJust $ Map.lookupIndex p_half dict) dict) f_nhalf = compute_vector $ m_nhalf new_dict = Map.insert (p + p_half) m_nhalf dict -------------------------------------------------------------------- -------------------------------------------------------------------- -- find_sum_even_fibonacci calculates sum of even fibonacci values -- which are less than 'limit'. It first exponentiates and then -- binary searches between f(m) and f(2m), such that f(n) < limit -- and f(n+1) >= limit and m < n <= 2m. -------------------------------------------------------------------- find_sum_even_fibonacci :: Integer -> Integer find_sum_even_fibonacci limit | limit <= 2 = 0 | otherwise = s where new_dict = Map.empty :: Map.Map (Integer) (Matrix.Matrix Integer) dict = exponentiate_until limit 1 relation_mat new_dict max_power = fst $ Map.findMax dict max_power_by_2 = toInteger $ ceiling $ fromIntegral max_power/2 fn_and_fn_plus_1 = binary_search [max_power_by_2, max_power] dict limit s = toInteger $ ceiling $ fromIntegral (sum(fn_and_fn_plus_1)- 2)/4 -------------------------------------------------------------------- main = do let limit = 4000000 putStrLn $ show $ find_sum_even_fibonacci limit
LuqmanSahaf/Solve-Project-Euler
Problem2/haskell/problem2.hs
mit
4,147
41
18
745
838
446
392
43
1
{-# LANGUAGE CPP, TypeFamilies, DeriveDataTypeable #-} module PGIP.GraphQL.Result.OMSSimple where import Data.Data data OMSSimple = OMSSimple { description :: Maybe String , displayName :: String , labelHasFree :: Bool , labelHasHiding :: Bool , locId :: String , name :: String , nameExtension :: String , nameExtensionIndex :: Int , origin :: String } deriving (Show, Typeable, Data)
spechub/Hets
PGIP/GraphQL/Result/OMSSimple.hs
gpl-2.0
632
0
9
303
95
60
35
13
0
{-# LANGUAGE TemplateHaskell #-} module Test.QuickFuzz.Derive.NFData where import Control.DeepSeq import Data.Derive.NFData import Data.DeriveTH import Language.Haskell.TH import Language.Haskell.TH.Syntax import Megadeth.Prim isArbInsName = isinsName ''NFData devNFData :: Name -> Q [Dec] devNFData = megaderive (derive makeNFData) isArbInsName
CIFASIS/QuickFuzz
src/Test/QuickFuzz/Derive/NFData.hs
gpl-3.0
352
0
7
41
84
50
34
11
1
module Connection (getConnection) where import Network.Connection import Network.IMAP import Network.IMAP.Types import Config (readConfig) import Types import Data.Yaml import Control.Monad.IO.Class (liftIO) import Data.Maybe import qualified Data.Text as T getConnection :: AccountConfig -> IO IMAPConnection getConnection acc = do let tls = TLSSettingsSimple False False False let params = ConnectionParams (T.unpack $ accountServer acc) (fromInteger $ accountPort acc) (Just tls) Nothing conn <- connectServer params Nothing login conn (accountLogin acc) (accountPassword acc) return conn
mkawalec/email
src/Connection.hs
gpl-3.0
635
0
13
115
188
99
89
19
1
module Dmp.SourceUpdate (updateSource) where import Text.ParserCombinators.Parsec qqString :: String qqString = "\n{-# LANGUAGE QuasiQuotes #-}\n" lhString :: String lhString = "\n\nimport LiquidHaskell\n" updateSource :: String -> Either ParseError String updateSource = parse pSourceTop "" pSourceTop :: CharParser st String pSourceTop = do header <- pHeader imports <- pImports return $ qqString ++ header ++ lhString ++ imports pIsImport = lookAhead $ try $ do string "\n" spaces string "import " pModuleDecl = try $ do string "module" anyChar `manyTill` try (string "where") pHeader = anyChar `manyTill` (pModuleDecl <|> pIsImport) pImports = many anyChar pBody = undefined -- TODO: Find some way to tell when import list has ended -- this is needed to eventually move options pragmas
christetreault/liquid-haskell-converter
lhconverter/Dmp/SourceUpdate.hs
gpl-3.0
843
0
11
170
203
106
97
24
1
{- Merch.Race.MapGen.Substep - Substep implementation for map generation code. Copyright 2013 Alan Manuel K. Gloria This file is part of Merchant's Race. Merchant's Race is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Merchant's Race is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Merchant's Race. If not, see <http://www.gnu.org/licenses/>. -} module Merch.Race.MapGen.Substep ( SubMG , substep ) where import Merch.Race.MapGen.Monad newtype SubMG m a = SubMG { run :: (Rational -> Rational -> m a) } instance Monad m => Monad (SubMG m) where return = lift . return fail = lift . fail ma >>= f = SubMG $ \mult add -> run ma mult add >>= flip uncurry (mult, add) . run . f lift :: m a -> SubMG m a lift ma = SubMG $ \_ _ -> ma instance MapGenM m => MapGenM (SubMG m) where mgMapBounds = lift mgMapBounds mgGetTerrain = lift . mgGetTerrain mgPutTerrain h t = lift $ mgPutTerrain h t mgGetRoad = lift . mgGetRoad mgPutRoad h r = lift $ mgPutRoad h r mgAddSettlement s st h = lift $ mgAddSettlement s st h mgPutDistance s1 s2 d = lift $ mgPutDistance s1 s2 d mgStep = lift . mgStep mgProgress p = SubMG $ \mult add -> do mgProgress $ p * mult + add mgRandom = lift mgRandom mgNameGenerator = lift mgNameGenerator mgSettlementGenerator = lift mgSettlementGenerator mgRequiredTerrain = lift . mgRequiredTerrain substep :: MapGenM m => Rational -> Rational -> SubMG m a -> m a substep add mult ma = run ma mult add
AmkG/merchants-race
Merch/Race/MapGen/Substep.hs
gpl-3.0
1,908
0
12
399
438
226
212
31
1
{-# LANGUAGE TemplateHaskell #-} module NetEngine ( NetEngineOutput(..), NetEngineInput(..) , netEngine , gNEOMove, gNEOPacket, gNEOSetIterTimer , gNEOPeerConnected, gNEOGameIteration ) where import Control.Applicative import Control.Category import Control.FilterCategory import Codec.Compression.Zlib (decompress, compress) import Control.Monad ((>=>)) import Data.ADT.Getters import Data.ByteString.Lazy.Char8 (ByteString, pack, unpack) import Data.Function (on) import Data.List (isPrefixOf, partition) import Data.Map (Map, delete, fromList, insert, lookup, toList) import Data.Monoid (Monoid(..)) import FRP.Peakachu.Program import Network.Socket (SockAddr) import Prelude hiding ((.), id, lookup) data NetEngineState moveType idType = NetEngineState { neLocalQueue :: [(Integer, moveType)] , neQueue :: Map (Integer, idType) moveType , nePeers :: [idType] , nePeerAddrs :: [SockAddr] , neWaitingForPeers :: Bool , neGameIteration :: Integer , neOutputMove :: moveType , neLatencyIters :: Integer , neMyPeerId :: idType } data NetEngineOutput moveType idType = NEOMove moveType | NEOPeerConnected idType | NEOPacket String SockAddr | NEOSetIterTimer | NEOGameIteration Integer deriving Show $(mkADTGetters ''NetEngineOutput) data NetEngineInput moveType = NEIMove Integer moveType | NEIPacket String SockAddr | NEIMatching [SockAddr] | NEIIterTimer | NEITransmitTimer $(mkADTGetters ''NetEngineInput) data NetEngineMid moveType idType = AInput (NetEngineInput moveType) | AState (NetEngineState moveType idType) $(mkADTGetters ''NetEngineMid) data HelloType = LetsPlay | WereOn deriving (Read, Show, Eq) data NetEngPacket m i = Moves (Map (Integer, i) m) | Hello i HelloType deriving (Read, Show) magic :: String magic = "dtkffod!" atP :: FilterCategory cat => (a -> Maybe b) -> cat a b atP = mapMaybeC outPacket :: (Show a, Show i) => NetEngPacket a i -> SockAddr -> NetEngineOutput a i outPacket = NEOPacket . (magic ++) . withPack compress . show withPrev :: Program a (a, a) withPrev = (,) <$> delayP (1::Int) <*> id atChgOf :: Eq b => (a -> b) -> Program a a atChgOf onfunc = arrC snd . filterC (uncurry (on (/=) onfunc)) . withPrev netEngine :: (Monoid moveType, Ord peerIdType, Read moveType, Read peerIdType, Show moveType, Show peerIdType) => peerIdType -> Program (NetEngineInput moveType) (NetEngineOutput moveType peerIdType) netEngine myPeerId = mconcat [ NEOSetIterTimer <$ singleValueP , mconcat [ mconcat [ NEOMove . neOutputMove <$> id , NEOSetIterTimer <$ id , NEOGameIteration . neGameIteration <$> id ] . atChgOf neGameIteration . atP gAState , mconcat [ outPacket (Hello myPeerId WereOn) <$> flattenC . arrC nePeerAddrs , arrC NEOPeerConnected . filterC (/= myPeerId) . flattenC . arrC nePeers ] . atChgOf nePeerAddrs . atP gAState , flattenC . ( sendMoves <$> (lstP gAState <* atP (gAInput >=> gNEITransmitTimer)) ) ] . mconcat [ AState <$> scanlP netEngineStep (startState myPeerId) , arrC AInput ] , outPacket (Hello myPeerId LetsPlay) <$> flattenC . atP gNEIMatching -- for warnings , unwarn gNEIIterTimer , unwarn gNEIMove , unwarn gNEIPacket ] where unwarn x = undefined <$> filterC (const False) . atP x sendMoves state = outPacket (Moves (neQueue state)) <$> nePeerAddrs state startState :: (Monoid moveType, Ord peerIdType) => peerIdType -> NetEngineState moveType peerIdType startState myPeerId = NetEngineState { neLocalQueue = [] , neQueue = fromList [ ((i, myPeerId), mempty) | i <- [0 .. latencyIters-1] ] , nePeers = [myPeerId] , nePeerAddrs = mempty , neWaitingForPeers = False , neGameIteration = 0 , neOutputMove = mempty , neLatencyIters = latencyIters , neMyPeerId = myPeerId } where latencyIters = 5 netEngineStep :: (Monoid a, Ord i, Read a, Read i) => NetEngineState a i -> NetEngineInput a -> NetEngineState a i netEngineStep state (NEIMove iter move) = state { neLocalQueue = (iter, move) : neLocalQueue state } netEngineStep state NEIIterTimer = netEngineNextIter state netEngineStep state (NEIPacket contents sender) | magic `isPrefixOf` contents = processPacket state sender . read . withPack decompress . drop (length magic) $ contents | otherwise = state netEngineStep state _ = state withPack :: (ByteString -> ByteString) -> String -> String withPack = (unpack .) . (. pack) processPacket :: (Monoid a, Ord i) => NetEngineState a i -> SockAddr -> NetEngPacket a i -> NetEngineState a i processPacket state sender (Hello peerId _) | length (nePeers state) > 1 = state | otherwise = (startState myPeerId) { nePeers = [myPeerId, peerId] , nePeerAddrs = [sender] } where myPeerId = neMyPeerId state processPacket state _ (Moves moves) | neWaitingForPeers state = netEngineNextIter updState | otherwise = updState where updState = state { neQueue = mappend (neQueue state) . fromList . filter ((>= neGameIteration state) . fst . fst) . toList $ moves } netEngineNextIter :: (Monoid a, Ord i) => NetEngineState a i -> NetEngineState a i netEngineNextIter ne = case peerMoves of Nothing -> ne { neWaitingForPeers = True } Just move -> ne { neLocalQueue = futureMoves , neQueue = insert moveKey (mconcat (snd <$> nextMoves)) $ foldr delete (neQueue ne) delKeys , neGameIteration = iter + 1 , neOutputMove = move , neWaitingForPeers = False } where (nextMoves, futureMoves) = partition ((<= nextMoveIter) . fst) . neLocalQueue $ ne nextMoveIter = iter + neLatencyIters ne moveKey = (nextMoveIter, neMyPeerId ne) iter = neGameIteration ne moveKeys = (,) iter <$> nePeers ne delKeys = (,) (iter - neLatencyIters ne) <$> nePeers ne peerMoves = mconcat <$> sequence ((`lookup` neQueue ne) <$> moveKeys)
yairchu/defend
src/NetEngine.hs
gpl-3.0
6,122
0
18
1,400
1,971
1,069
902
180
2
module Utils ( module Utils.Generation , module Utils.Patch , module Utils.Console , module Utils.Shrink , module Utils.Unique ) where import Utils.Generation import Utils.Patch import Utils.Console import Utils.Shrink import Utils.Unique
elopez/QuickFuzz
app/Utils.hs
gpl-3.0
241
0
5
31
61
39
22
11
0
{-# LANGUAGE ScopedTypeVariables #-} import Control.Applicative import Control.Monad.Trans import Control.Monad import Control.Concurrent import Data.IORef import Data.String import Network.HTTP.Client import System.Posix.Directory import System.Glib.GType import Graphics.UI.Gtk import Graphics.UI.Gtk.WebKit.WebView import Graphics.UI.Gtk.WebKit.WebFrame import Graphics.UI.Gtk.WebKit.WebInspector import Graphics.UI.Gtk.WebKit.WebSettings import Graphics.UI.Gtk.WebKit.DOM.Node import Graphics.UI.Gtk.WebKit.DOM.Document import Graphics.UI.Gtk.WebKit.DOM.Element import Graphics.UI.Gtk.WebKit.DOM.HTMLInputElement import Graphics.UI.Gtk.WebKit.DOM.HTMLScriptElement import Graphics.UI.Gtk.WebKit.DOM.EventM import Graphics.UI.Gtk.WebKit.DOM.UIEvent import Graphics.UI.Gtk.WebKit.DOM.MouseEvent webScrolledWindow wv = do sw <- scrolledWindowNew Nothing Nothing sw `containerAdd` wv return sw addressBar w wv uri = do addressBar <- entryNew entrySetText addressBar uri webViewLoadUri wv uri onEntryActivate addressBar $ do uri' <- entryGetText addressBar :: IO String webViewLoadUri wv uri' wv `on` loadCommitted $ \frame -> do muri' <- webFrameGetUri frame :: IO (Maybe String) case muri' of Just uri' -> entrySetText addressBar uri' Nothing -> return () return addressBar browserVBox ab sw = do box <- vBoxNew False 0 boxPackStart box ab PackNatural 0 boxPackStart box sw PackGrow 0 return box browserWindow uri = do w <- windowNew wv <- webViewNew sw <- webScrolledWindow wv ab <- addressBar w wv uri box <- browserVBox ab sw w `containerAdd` box w `onDestroy` mainQuit return (w,wv,ab) showTheFuckingWebInspector wv = do ws <- webViewGetWebSettings wv set ws [ webSettingsEnableDeveloperExtras := True ] webViewSetWebSettings wv ws i <- webViewGetInspector wv on i inspectWebView $ \_iwv -> do iwv <- webViewNew iw <- webScrolledWindow iwv widgetShowAll iw return iwv webInspectorShow i withMainGUI action = do _ <- initGUI w <- action widgetShowAll w mainGUI main = withMainGUI $ do cwd <- getWorkingDirectory (w,wv,ab) <- browserWindow "http://google.at" domCookies <- read <$> readFile "domCookies" :: IO [String] withDocument wv $ \d -> do putStrLn . ("Cookies: "++) =<< documentGetCookie d withDocumentOnce wv $ \d -> do putStrLn . ("Cookies before injection: "++) =<< documentGetCookie d putStrLn $ "Cookies to be injected:\n>>>>>\n" ++ jsSetDomCookies domCookies ++ "<<<<" webViewExecuteScript wv $ (jsSetDomCookies domCookies) webViewReloadBypassCache wv return w -- _ <- documentOnsubmit d $ do -- liftIO $ putStrLn "documentOnsubmit" -- _ <- documentOnclick d $ do -- ctrl <- mouseCtrlKey -- coords@(x,y) <- liftM2 (,) uiPageX uiPageY -- liftIO $ do -- Just el <- documentElementFromPoint d x y -- -- putStrLn $ typeName $ typeFromInstance n -- nn <- nodeGetNodeName el -- t <- nodeGetTextContent el -- putStrLn ("textContent: " ++ t) -- putStrLn ("nodename: " ++ nn) -- print coords jsSetDomCookies :: [String] -> String jsSetDomCookies cs = concat $ map (\c -> "document.cookie = \"" ++ c ++ "\";\n") cs jsReload :: String jsReload = "window.location.reload(true);\n" withDocumentOnce :: WebView -> (Document -> IO ()) -> IO () withDocumentOnce wv action = void $ once wv documentLoadFinished $ \mask _ -> do mask Just d <- webViewGetDomDocument wv action d withDocument :: WebView -> (Document -> IO ()) -> IO () withDocument wv action = void $ on wv documentLoadFinished $ \_ -> do Just d <- webViewGetDomDocument wv action d injectScript :: Document -> String -> IO () injectScript d src = do Just body <- documentGetBody d Just sc' <- documentCreateElement d "script" let sc = castToHTMLScriptElement sc' htmlScriptElementSetText sc src nodeAppendChild body (Just sc) return () once :: forall object callback. GObjectClass object => object -> Graphics.UI.Gtk.Signal object callback -> (IO () -> callback) -> IO () once object signal action = do ref <- newIORef undefined :: IO (IORef (ConnectId object)) cid <- on object signal (action (readIORef ref >>= signalDisconnect)) writeIORef ref cid
DanielG/ohs
client-old/GTK.hs
gpl-3.0
4,577
0
15
1,116
1,249
622
627
112
2
module HEP.Automation.MadGraph.Dataset.Set20110710set4 where import HEP.Storage.WebDAV.Type import HEP.Automation.MadGraph.Model import HEP.Automation.MadGraph.Machine import HEP.Automation.MadGraph.UserCut import HEP.Automation.MadGraph.SetupType import HEP.Automation.MadGraph.Model.C8V import HEP.Automation.MadGraph.Dataset.Processes import HEP.Automation.JobQueue.JobType processSetup :: ProcessSetup C8V processSetup = PS { model = C8V , process = preDefProcess TTBar0or1J , processBrief = "TTBar0or1J" , workname = "710_C8V_TTBar0or1J_TEV" } paramSet :: [ModelParam C8V] paramSet = [ C8VParam { mnp = m, gnpR = g, gnpL = 0 } | m <- [600], g <- [1] ] sets :: [Int] sets = [1] ucut :: UserCut ucut = UserCut { uc_metcut = 15.0 , uc_etacutlep = 2.7 , uc_etcutlep = 18.0 , uc_etacutjet = 2.7 , uc_etcutjet = 15.0 } eventsets :: [EventSet] eventsets = [ EventSet processSetup (RS { param = p , numevent = 1000 , machine = TeVatron , rgrun = Fixed , rgscale = 200.0 , match = MLM , cut = DefCut , pythia = RunPYTHIA , usercut = UserCutDef ucut -- NoUserCutDef -- , pgs = RunPGS , jetalgo = Cone 0.4 , uploadhep = NoUploadHEP , setnum = num }) | p <- paramSet , num <- sets ] webdavdir :: WebDAVRemoteDir webdavdir = WebDAVRemoteDir "paper3/test"
wavewave/madgraph-auto-dataset
src/HEP/Automation/MadGraph/Dataset/Set20110710set4.hs
gpl-3.0
1,623
0
10
563
368
232
136
47
1
module LocatorSpec where import OHS.Types import OHS.FormSubmission import Data.List import Text.XML.HXT.Core import qualified Text.XML.HXT.DOM.ShowXml as XS import Test.Hspec spec = describe "Locator" $ do return () referenceSitesHaventChanged = undefined runLArrow arrow = runLA (xshow (hread >>> arrow >>> indentDoc)) someLocator = [ LocatorNode "html" 0 (Just "id1") [] , LocatorNode "body" 1 (Just "id2") [] , LocatorNode "form" 0 (Just "id3") ["box", "important", "pretty"] ] someHtml = "<html><head></head><body><a><b><c>\ \<form id=\"id3\" class=\"pretty red imporant\">Hello world</form>\ \</c></b></a><z class=\"important box\">OMG BY OUR SHOES</z></body></html>" foo :: (ArrowXml a) => a XmlTree XmlTree foo = deep (hasName "form") -- test = do -- let doc = readString[withParseHTML yes, withWarnings no] someHtml -- res <- (runX $ doc >>> locate someLocator) -- let sres = map (second (XS.xshow . (:[]))) res -- nres = sort >>> group >>> map (length &&& head) >>> sortBy (\a b -> fst a `compare` fst b) $ sres -- return nres -- test2 :: [Locator] -- test2 = runLA (hread >>> locatorFromId "id3") someHtml -- id: "SignInForm"
DanielG/ohs
tests/LocatorSpec.hs
gpl-3.0
1,211
0
10
241
214
122
92
18
1
module Syntax ( Name , Operation(..) , BindingKind(..) , BitCount , TupleFiled(Field) , Type(..) , isArray, isPointer, isTuple, isEmptyTuple, isArrayPointer, isFunction , ValueBinding(..), bindingName, bindingType, FunctionDeclaration(..) , funDeclToType, nameOfFunDecl , Expression(..), tagOfExpr , Statement(..) ) where import qualified Data.List as List import qualified Data.Maybe as Maybe type Name = String data Operation = Add | Sub | Mul | Div -- basic arithmetic | BitAnd | BitOr -- bitwise operations | LogNot -- logical not | Eq | Neq | Lt | Lte | Gt | Gte -- comparison | ArrayLen -- array lenght | ValRef | PtrDeRef -- value reference and pointer drreference | MemberOf | DeRefMemberOf -- . and -> deriving (Eq, Ord) -- TODO: keep this somehow in sync with similar table Parser instance Show Operation where show Add = "+" show Sub = "-" show Mul = "*" show Div = "/" show BitAnd = "&" show BitOr = "|" show LogNot = "not" show Eq = "==" show Neq = "/=" show Lt = "<" show Lte = "<=" show Gt = ">" show Gte = ">=" show ArrayLen = "#" show ValRef = "@" show PtrDeRef = "$" show MemberOf = "." show DeRefMemberOf = "->" data BindingKind = Immutable | Mutable deriving (Eq, Ord, Show) type BitCount = Int data TupleFiled = Field Name Type deriving (Eq, Ord, Show) data Type = TypeFloating BitCount | TypeInteger BitCount | TypeBoolean | TypeUnit | TypeArray Type | TypePointer Type | TypeFunction [Type] Type | TypeTuple Name [TupleFiled] | TypeUnknow Name | TypeBottom -- used by type checker for typing errors | TypeAuto -- used for type inference deriving (Eq, Ord) printFloatingType n | n == 64 = "double_t" | n == 32 = "float_t" | otherwise = "float" ++ show n ++ "_t" printIntegerType n | n == 64 = "int_t" | n == 8 = "byte_t" | otherwise = "int" ++ show n ++ "_t" showCommaSep :: Show a => [a] -> String showCommaSep = List.intercalate ", " . map show instance Show Type where show (TypeFloating n) = printFloatingType n show (TypeInteger n) = printIntegerType n show TypeBoolean = "bool_t" show TypeUnit = "unit_t" show (TypeArray t) = "[" ++ show t ++ "]" show (TypePointer t) = "^" ++ show t show (TypeUnknow name) = name ++ "?" show (TypeFunction args ret) = "(" ++ showCommaSep args ++ ") -> " ++ show ret show (TypeTuple name fields) = if name /= "" then name else "(" ++ fieldsStr ++ ")" where fieldsStr = showCommaSep $ map (\(Field _ t) -> t) fields show TypeBottom = "_bottom_" show TypeAuto = "_auto_" isArray :: Type -> Bool isArray TypeArray{} = True isArray _ = False isPointer :: Type -> Bool isPointer TypePointer{} = True isPointer _ = False isTuple :: Type -> Bool isTuple TypeTuple{} = True isTuple _ = False isEmptyTuple :: Type -> Bool isEmptyTuple (TypeTuple _ []) = True isEmptyTuple _ = False isArrayPointer :: Type -> Bool isArrayPointer (TypePointer ty) = isArray ty isArrayPointer _ = False isFunction :: Type -> Bool isFunction TypeFunction{} = True isFunction _ = False data ValueBinding = ValBind BindingKind Name Type deriving (Eq, Ord) bindingName :: ValueBinding -> Name bindingName (ValBind _ n _) = n bindingType :: ValueBinding -> Type bindingType (ValBind _ _ t) = t showBindingKind :: BindingKind -> String showBindingKind Immutable = "" showBindingKind Mutable = "!" instance Show ValueBinding where show (ValBind k n t) = showBindingKind k ++ n ++ " " ++ show t data FunctionDeclaration = FunDecl Name [ValueBinding] Type deriving (Eq, Ord, Show) funDeclToType :: FunctionDeclaration -> Type funDeclToType (FunDecl _ args retType) = TypeFunction (map (\(ValBind _ _ ty) -> ty) args) retType nameOfFunDecl :: FunctionDeclaration -> String nameOfFunDecl (FunDecl name _ _) = name data Expression tag = UnitExpr tag | UndefinedExpr tag | BooleanExpr Bool tag | IntegerExpr Int tag | CharacterExpr Char tag | FloatExpr Double tag | ArrayExpr [Expression tag] tag | AnonTupleExpr [Expression tag] tag | PrefixOpExpr Operation (Expression tag) tag | BinOpExpr Operation (Expression tag) (Expression tag) tag | ElementOfExpr (Expression tag) (Expression tag) tag | VarExpr Name tag | ValDeclExpr ValueBinding (Expression tag) tag | ValDestructuringExpr [ValueBinding] (Expression tag) tag | FunDeclExpr FunctionDeclaration (Statement tag) tag | ExtFunDeclExpr FunctionDeclaration tag | NamedTupleDeclExpr Name [ValueBinding] tag | CallExpr Name [Expression tag] tag | CastExpr Type (Expression tag) tag deriving (Eq, Ord) isCharLiteral :: Expression a -> Bool isCharLiteral CharacterExpr{} = True isCharLiteral _ = False isStringLiteral :: Expression a -> Bool isStringLiteral (ArrayExpr es _) = all isCharLiteral es isStringLiteral _ = False getLiteralChar :: Expression a -> Maybe Char getLiteralChar (CharacterExpr c _) = Just c getLiteralChar _ = Nothing getLiteralString :: Expression a -> String getLiteralString (ArrayExpr es _) = Maybe.mapMaybe getLiteralChar es getLiteralString _ = "" instance Show (Expression a) where show (BooleanExpr b _) = if b then "true" else "false" show (IntegerExpr i _) = show i show (CharacterExpr c _) = show c show (FloatExpr d _) = show d show e@(ArrayExpr es _) | isStringLiteral e = "\"" ++ getLiteralString e ++ "\"" | otherwise = "[" ++ showCommaSep es ++ "]" show (AnonTupleExpr es _) = "(" ++ showCommaSep es ++ ")" show (PrefixOpExpr op e _) = show op ++ show e show (BinOpExpr op e1 e2 _) = show e1 ++ " " ++ show op ++ " " ++ show e2 show (ElementOfExpr arr e _) = show arr ++ "[" ++ show e ++ "]" show (VarExpr n _) = n show (ValDeclExpr b e _) = "val " ++ show b ++ " = " ++ show e show (ValDestructuringExpr bs e _) = "val (" ++ showCommaSep bs ++ ") = " ++ show e show (FunDeclExpr decl stmt _) = "<TODO: fun decl>" show (NamedTupleDeclExpr n bs _) = "<TODO: tuple decl>" show (CallExpr n es _) = n ++ "(" ++ showCommaSep es ++ ")" show (CastExpr t e _) = show t ++ "(" ++ show e ++ ")" show expr = "<failed to print expression>" tagOfExpr :: Expression a -> a tagOfExpr (UnitExpr tag) = tag tagOfExpr (UndefinedExpr tag) = tag tagOfExpr (BooleanExpr _ tag) = tag tagOfExpr (IntegerExpr _ tag) = tag tagOfExpr (FloatExpr _ tag) = tag tagOfExpr (ArrayExpr _ tag) = tag tagOfExpr (AnonTupleExpr _ tag) = tag tagOfExpr (PrefixOpExpr _ _ tag) = tag tagOfExpr (BinOpExpr _ _ _ tag) = tag tagOfExpr (ElementOfExpr _ _ tag) = tag tagOfExpr (VarExpr _ tag) = tag tagOfExpr (ValDeclExpr _ _ tag) = tag tagOfExpr (ValDestructuringExpr _ _ tag) = tag tagOfExpr (FunDeclExpr _ _ tag) = tag tagOfExpr (ExtFunDeclExpr _ tag) = tag tagOfExpr (CallExpr _ _ tag) = tag tagOfExpr (CastExpr _ _ tag) = tag data Statement a = ReturnStmt (Expression a) | ExpressionStmt (Expression a) | BlockStmt [Statement a] | IfStmt (Expression a) (Statement a) | WhileStmt (Expression a) (Maybe (Statement a)) (Statement a) | AssignmentStmt (Expression a) (Expression a) deriving (Eq, Ord) instance Show (Statement a) where show s = printStmt 0 s indentation :: Int -> String indentation n = concat $ replicate n " " printStmt :: Int -> Statement a -> String printStmt n (ReturnStmt e) = indentation n ++ "return " ++ show e printStmt n (ExpressionStmt e) = indentation n ++ show e printStmt n (BlockStmt stmts) = "{\n" ++ List.intercalate "\n" (map (printStmt (n + 1)) stmts) ++ "\n" ++ indentation n ++ "}" printStmt n (IfStmt cond body) = indentation n ++ "if " ++ show cond ++ " " ++ printStmt n body printStmt n (WhileStmt cond update body) = indentation n ++ "while " ++ show cond ++ Maybe.maybe "" (\s -> "; " ++ printStmt 0 s) update ++ " " ++ printStmt n body printStmt n (AssignmentStmt lhs rhs) = indentation n ++ show lhs ++ " = " ++ show rhs
mbelicki/valdemar
src/Syntax.hs
gpl-3.0
8,396
0
15
2,190
2,968
1,530
1,438
218
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Blogger.Posts.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Get a post by ID. -- -- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API Reference> for @blogger.posts.get@. module Network.Google.Resource.Blogger.Posts.Get ( -- * REST Resource PostsGetResource -- * Creating a Request , postsGet , PostsGet -- * Request Lenses , pggFetchBody , pggFetchImages , pggBlogId , pggMaxComments , pggView , pggPostId ) where import Network.Google.Blogger.Types import Network.Google.Prelude -- | A resource alias for @blogger.posts.get@ method which the -- 'PostsGet' request conforms to. type PostsGetResource = "blogger" :> "v3" :> "blogs" :> Capture "blogId" Text :> "posts" :> Capture "postId" Text :> QueryParam "fetchBody" Bool :> QueryParam "fetchImages" Bool :> QueryParam "maxComments" (Textual Word32) :> QueryParam "view" PostsGetView :> QueryParam "alt" AltJSON :> Get '[JSON] Post' -- | Get a post by ID. -- -- /See:/ 'postsGet' smart constructor. data PostsGet = PostsGet' { _pggFetchBody :: !Bool , _pggFetchImages :: !(Maybe Bool) , _pggBlogId :: !Text , _pggMaxComments :: !(Maybe (Textual Word32)) , _pggView :: !(Maybe PostsGetView) , _pggPostId :: !Text } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'PostsGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pggFetchBody' -- -- * 'pggFetchImages' -- -- * 'pggBlogId' -- -- * 'pggMaxComments' -- -- * 'pggView' -- -- * 'pggPostId' postsGet :: Text -- ^ 'pggBlogId' -> Text -- ^ 'pggPostId' -> PostsGet postsGet pPggBlogId_ pPggPostId_ = PostsGet' { _pggFetchBody = True , _pggFetchImages = Nothing , _pggBlogId = pPggBlogId_ , _pggMaxComments = Nothing , _pggView = Nothing , _pggPostId = pPggPostId_ } -- | Whether the body content of the post is included (default: true). This -- should be set to false when the post bodies are not required, to help -- minimize traffic. pggFetchBody :: Lens' PostsGet Bool pggFetchBody = lens _pggFetchBody (\ s a -> s{_pggFetchBody = a}) -- | Whether image URL metadata for each post is included (default: false). pggFetchImages :: Lens' PostsGet (Maybe Bool) pggFetchImages = lens _pggFetchImages (\ s a -> s{_pggFetchImages = a}) -- | ID of the blog to fetch the post from. pggBlogId :: Lens' PostsGet Text pggBlogId = lens _pggBlogId (\ s a -> s{_pggBlogId = a}) -- | Maximum number of comments to pull back on a post. pggMaxComments :: Lens' PostsGet (Maybe Word32) pggMaxComments = lens _pggMaxComments (\ s a -> s{_pggMaxComments = a}) . mapping _Coerce -- | Access level with which to view the returned result. Note that some -- fields require elevated access. pggView :: Lens' PostsGet (Maybe PostsGetView) pggView = lens _pggView (\ s a -> s{_pggView = a}) -- | The ID of the post pggPostId :: Lens' PostsGet Text pggPostId = lens _pggPostId (\ s a -> s{_pggPostId = a}) instance GoogleRequest PostsGet where type Rs PostsGet = Post' type Scopes PostsGet = '["https://www.googleapis.com/auth/blogger", "https://www.googleapis.com/auth/blogger.readonly"] requestClient PostsGet'{..} = go _pggBlogId _pggPostId (Just _pggFetchBody) _pggFetchImages _pggMaxComments _pggView (Just AltJSON) bloggerService where go = buildClient (Proxy :: Proxy PostsGetResource) mempty
rueshyna/gogol
gogol-blogger/gen/Network/Google/Resource/Blogger/Posts/Get.hs
mpl-2.0
4,573
0
18
1,189
720
420
300
104
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.DoubleClickSearch.Types.Product -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.DoubleClickSearch.Types.Product where import Network.Google.DoubleClickSearch.Types.Sum import Network.Google.Prelude -- | A row in a DoubleClick Search report. -- -- /See:/ 'reportRow' smart constructor. newtype ReportRow = ReportRow' { _rrAddtional :: HashMap Text JSONValue } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRow' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrAddtional' reportRow :: HashMap Text JSONValue -- ^ 'rrAddtional' -> ReportRow reportRow pRrAddtional_ = ReportRow' {_rrAddtional = _Coerce # pRrAddtional_} -- | Indicates the columns that are represented in this row. That is, each -- key corresponds to a column with a non-empty cell in this row. rrAddtional :: Lens' ReportRow (HashMap Text JSONValue) rrAddtional = lens _rrAddtional (\ s a -> s{_rrAddtional = a}) . _Coerce instance FromJSON ReportRow where parseJSON = withObject "ReportRow" (\ o -> ReportRow' <$> (parseJSONObject o)) instance ToJSON ReportRow where toJSON = toJSON . _rrAddtional -- | A request object used to create a DoubleClick Search report. -- -- /See:/ 'reportRequest' smart constructor. data ReportRequest = ReportRequest' { _rrMaxRowsPerFile :: !(Maybe (Textual Int32)) , _rrReportScope :: !(Maybe ReportRequestReportScope) , _rrStatisticsCurrency :: !(Maybe Text) , _rrTimeRange :: !(Maybe ReportRequestTimeRange) , _rrOrderBy :: !(Maybe [ReportRequestOrderByItem]) , _rrFilters :: !(Maybe [ReportRequestFiltersItem]) , _rrIncludeRemovedEntities :: !(Maybe Bool) , _rrIncludeDeletedEntities :: !(Maybe Bool) , _rrDownloadFormat :: !(Maybe Text) , _rrStartRow :: !(Maybe (Textual Int32)) , _rrColumns :: !(Maybe [ReportAPIColumnSpec]) , _rrReportType :: !(Maybe Text) , _rrVerifySingleTimeZone :: !(Maybe Bool) , _rrRowCount :: !(Maybe (Textual Int32)) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRequest' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrMaxRowsPerFile' -- -- * 'rrReportScope' -- -- * 'rrStatisticsCurrency' -- -- * 'rrTimeRange' -- -- * 'rrOrderBy' -- -- * 'rrFilters' -- -- * 'rrIncludeRemovedEntities' -- -- * 'rrIncludeDeletedEntities' -- -- * 'rrDownloadFormat' -- -- * 'rrStartRow' -- -- * 'rrColumns' -- -- * 'rrReportType' -- -- * 'rrVerifySingleTimeZone' -- -- * 'rrRowCount' reportRequest :: ReportRequest reportRequest = ReportRequest' { _rrMaxRowsPerFile = Nothing , _rrReportScope = Nothing , _rrStatisticsCurrency = Nothing , _rrTimeRange = Nothing , _rrOrderBy = Nothing , _rrFilters = Nothing , _rrIncludeRemovedEntities = Nothing , _rrIncludeDeletedEntities = Nothing , _rrDownloadFormat = Nothing , _rrStartRow = Nothing , _rrColumns = Nothing , _rrReportType = Nothing , _rrVerifySingleTimeZone = Nothing , _rrRowCount = Nothing } -- | Asynchronous report only. The maximum number of rows per report file. A -- large report is split into many files based on this field. Acceptable -- values are \`1000000\` to \`100000000\`, inclusive. rrMaxRowsPerFile :: Lens' ReportRequest (Maybe Int32) rrMaxRowsPerFile = lens _rrMaxRowsPerFile (\ s a -> s{_rrMaxRowsPerFile = a}) . mapping _Coerce -- | The reportScope is a set of IDs that are used to determine which subset -- of entities will be returned in the report. The full lineage of IDs from -- the lowest scoped level desired up through agency is required. rrReportScope :: Lens' ReportRequest (Maybe ReportRequestReportScope) rrReportScope = lens _rrReportScope (\ s a -> s{_rrReportScope = a}) -- | Specifies the currency in which monetary will be returned. Possible -- values are: \`usd\`, \`agency\` (valid if the report is scoped to agency -- or lower), \`advertiser\` (valid if the report is scoped to * advertiser -- or lower), or \`account\` (valid if the report is scoped to engine -- account or lower). rrStatisticsCurrency :: Lens' ReportRequest (Maybe Text) rrStatisticsCurrency = lens _rrStatisticsCurrency (\ s a -> s{_rrStatisticsCurrency = a}) -- | If metrics are requested in a report, this argument will be used to -- restrict the metrics to a specific time range. rrTimeRange :: Lens' ReportRequest (Maybe ReportRequestTimeRange) rrTimeRange = lens _rrTimeRange (\ s a -> s{_rrTimeRange = a}) -- | Synchronous report only. A list of columns and directions defining -- sorting to be performed on the report rows.\\ The maximum number of -- orderings per request is 300. rrOrderBy :: Lens' ReportRequest [ReportRequestOrderByItem] rrOrderBy = lens _rrOrderBy (\ s a -> s{_rrOrderBy = a}) . _Default . _Coerce -- | A list of filters to be applied to the report.\\ The maximum number of -- filters per request is 300. rrFilters :: Lens' ReportRequest [ReportRequestFiltersItem] rrFilters = lens _rrFilters (\ s a -> s{_rrFilters = a}) . _Default . _Coerce -- | Determines if removed entities should be included in the report. -- Defaults to \`false\`. rrIncludeRemovedEntities :: Lens' ReportRequest (Maybe Bool) rrIncludeRemovedEntities = lens _rrIncludeRemovedEntities (\ s a -> s{_rrIncludeRemovedEntities = a}) -- | Determines if removed entities should be included in the report. -- Defaults to \`false\`. Deprecated, please use \`includeRemovedEntities\` -- instead. rrIncludeDeletedEntities :: Lens' ReportRequest (Maybe Bool) rrIncludeDeletedEntities = lens _rrIncludeDeletedEntities (\ s a -> s{_rrIncludeDeletedEntities = a}) -- | Format that the report should be returned in. Currently \`csv\` or -- \`tsv\` is supported. rrDownloadFormat :: Lens' ReportRequest (Maybe Text) rrDownloadFormat = lens _rrDownloadFormat (\ s a -> s{_rrDownloadFormat = a}) -- | Synchronous report only. Zero-based index of the first row to return. -- Acceptable values are \`0\` to \`50000\`, inclusive. Defaults to \`0\`. rrStartRow :: Lens' ReportRequest (Maybe Int32) rrStartRow = lens _rrStartRow (\ s a -> s{_rrStartRow = a}) . mapping _Coerce -- | The columns to include in the report. This includes both DoubleClick -- Search columns and saved columns. For DoubleClick Search columns, only -- the \`columnName\` parameter is required. For saved columns only the -- \`savedColumnName\` parameter is required. Both \`columnName\` and -- \`savedColumnName\` cannot be set in the same stanza.\\ The maximum -- number of columns per request is 300. rrColumns :: Lens' ReportRequest [ReportAPIColumnSpec] rrColumns = lens _rrColumns (\ s a -> s{_rrColumns = a}) . _Default . _Coerce -- | Determines the type of rows that are returned in the report. For -- example, if you specify \`reportType: keyword\`, each row in the report -- will contain data about a keyword. See the [Types of -- Reports](\/search-ads\/v2\/report-types\/) reference for the columns -- that are available for each type. rrReportType :: Lens' ReportRequest (Maybe Text) rrReportType = lens _rrReportType (\ s a -> s{_rrReportType = a}) -- | If \`true\`, the report would only be created if all the requested stat -- data are sourced from a single timezone. Defaults to \`false\`. rrVerifySingleTimeZone :: Lens' ReportRequest (Maybe Bool) rrVerifySingleTimeZone = lens _rrVerifySingleTimeZone (\ s a -> s{_rrVerifySingleTimeZone = a}) -- | Synchronous report only. The maximum number of rows to return; -- additional rows are dropped. Acceptable values are \`0\` to \`10000\`, -- inclusive. Defaults to \`10000\`. rrRowCount :: Lens' ReportRequest (Maybe Int32) rrRowCount = lens _rrRowCount (\ s a -> s{_rrRowCount = a}) . mapping _Coerce instance FromJSON ReportRequest where parseJSON = withObject "ReportRequest" (\ o -> ReportRequest' <$> (o .:? "maxRowsPerFile") <*> (o .:? "reportScope") <*> (o .:? "statisticsCurrency") <*> (o .:? "timeRange") <*> (o .:? "orderBy" .!= mempty) <*> (o .:? "filters" .!= mempty) <*> (o .:? "includeRemovedEntities") <*> (o .:? "includeDeletedEntities") <*> (o .:? "downloadFormat") <*> (o .:? "startRow") <*> (o .:? "columns" .!= mempty) <*> (o .:? "reportType") <*> (o .:? "verifySingleTimeZone") <*> (o .:? "rowCount")) instance ToJSON ReportRequest where toJSON ReportRequest'{..} = object (catMaybes [("maxRowsPerFile" .=) <$> _rrMaxRowsPerFile, ("reportScope" .=) <$> _rrReportScope, ("statisticsCurrency" .=) <$> _rrStatisticsCurrency, ("timeRange" .=) <$> _rrTimeRange, ("orderBy" .=) <$> _rrOrderBy, ("filters" .=) <$> _rrFilters, ("includeRemovedEntities" .=) <$> _rrIncludeRemovedEntities, ("includeDeletedEntities" .=) <$> _rrIncludeDeletedEntities, ("downloadFormat" .=) <$> _rrDownloadFormat, ("startRow" .=) <$> _rrStartRow, ("columns" .=) <$> _rrColumns, ("reportType" .=) <$> _rrReportType, ("verifySingleTimeZone" .=) <$> _rrVerifySingleTimeZone, ("rowCount" .=) <$> _rrRowCount]) -- -- /See:/ 'reportRequestOrderByItem' smart constructor. data ReportRequestOrderByItem = ReportRequestOrderByItem' { _rrobiSortOrder :: !(Maybe Text) , _rrobiColumn :: !(Maybe ReportAPIColumnSpec) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRequestOrderByItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrobiSortOrder' -- -- * 'rrobiColumn' reportRequestOrderByItem :: ReportRequestOrderByItem reportRequestOrderByItem = ReportRequestOrderByItem' {_rrobiSortOrder = Nothing, _rrobiColumn = Nothing} -- | The sort direction, which is either \`ascending\` or \`descending\`. rrobiSortOrder :: Lens' ReportRequestOrderByItem (Maybe Text) rrobiSortOrder = lens _rrobiSortOrder (\ s a -> s{_rrobiSortOrder = a}) -- | Column to perform the sort on. This can be a DoubleClick Search-defined -- column or a saved column. rrobiColumn :: Lens' ReportRequestOrderByItem (Maybe ReportAPIColumnSpec) rrobiColumn = lens _rrobiColumn (\ s a -> s{_rrobiColumn = a}) instance FromJSON ReportRequestOrderByItem where parseJSON = withObject "ReportRequestOrderByItem" (\ o -> ReportRequestOrderByItem' <$> (o .:? "sortOrder") <*> (o .:? "column")) instance ToJSON ReportRequestOrderByItem where toJSON ReportRequestOrderByItem'{..} = object (catMaybes [("sortOrder" .=) <$> _rrobiSortOrder, ("column" .=) <$> _rrobiColumn]) -- | A DoubleClick Search report. This object contains the report request, -- some report metadata such as currency code, and the generated report -- rows or report files. -- -- /See:/ 'report' smart constructor. data Report = Report' { _rKind :: !(Maybe Text) , _rRows :: !(Maybe [ReportRow]) , _rStatisticsCurrencyCode :: !(Maybe Text) , _rIsReportReady :: !(Maybe Bool) , _rFiles :: !(Maybe [ReportFilesItem]) , _rId :: !(Maybe Text) , _rStatisticsTimeZone :: !(Maybe Text) , _rRowCount :: !(Maybe (Textual Int32)) , _rRequest :: !(Maybe ReportRequest) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Report' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rKind' -- -- * 'rRows' -- -- * 'rStatisticsCurrencyCode' -- -- * 'rIsReportReady' -- -- * 'rFiles' -- -- * 'rId' -- -- * 'rStatisticsTimeZone' -- -- * 'rRowCount' -- -- * 'rRequest' report :: Report report = Report' { _rKind = Nothing , _rRows = Nothing , _rStatisticsCurrencyCode = Nothing , _rIsReportReady = Nothing , _rFiles = Nothing , _rId = Nothing , _rStatisticsTimeZone = Nothing , _rRowCount = Nothing , _rRequest = Nothing } -- | Identifies this as a Report resource. Value: the fixed string -- \`doubleclicksearch#report\`. rKind :: Lens' Report (Maybe Text) rKind = lens _rKind (\ s a -> s{_rKind = a}) -- | Synchronous report only. Generated report rows. rRows :: Lens' Report [ReportRow] rRows = lens _rRows (\ s a -> s{_rRows = a}) . _Default . _Coerce -- | The currency code of all monetary values produced in the report, -- including values that are set by users (e.g., keyword bid settings) and -- metrics (e.g., cost and revenue). The currency code of a report is -- determined by the \`statisticsCurrency\` field of the report request. rStatisticsCurrencyCode :: Lens' Report (Maybe Text) rStatisticsCurrencyCode = lens _rStatisticsCurrencyCode (\ s a -> s{_rStatisticsCurrencyCode = a}) -- | Asynchronous report only. True if and only if the report has completed -- successfully and the report files are ready to be downloaded. rIsReportReady :: Lens' Report (Maybe Bool) rIsReportReady = lens _rIsReportReady (\ s a -> s{_rIsReportReady = a}) -- | Asynchronous report only. Contains a list of generated report files once -- the report has successfully completed. rFiles :: Lens' Report [ReportFilesItem] rFiles = lens _rFiles (\ s a -> s{_rFiles = a}) . _Default . _Coerce -- | Asynchronous report only. Id of the report. rId :: Lens' Report (Maybe Text) rId = lens _rId (\ s a -> s{_rId = a}) -- | If all statistics of the report are sourced from the same time zone, -- this would be it. Otherwise the field is unset. rStatisticsTimeZone :: Lens' Report (Maybe Text) rStatisticsTimeZone = lens _rStatisticsTimeZone (\ s a -> s{_rStatisticsTimeZone = a}) -- | The number of report rows generated by the report, not including -- headers. rRowCount :: Lens' Report (Maybe Int32) rRowCount = lens _rRowCount (\ s a -> s{_rRowCount = a}) . mapping _Coerce -- | The request that created the report. Optional fields not specified in -- the original request are filled with default values. rRequest :: Lens' Report (Maybe ReportRequest) rRequest = lens _rRequest (\ s a -> s{_rRequest = a}) instance FromJSON Report where parseJSON = withObject "Report" (\ o -> Report' <$> (o .:? "kind") <*> (o .:? "rows" .!= mempty) <*> (o .:? "statisticsCurrencyCode") <*> (o .:? "isReportReady") <*> (o .:? "files" .!= mempty) <*> (o .:? "id") <*> (o .:? "statisticsTimeZone") <*> (o .:? "rowCount") <*> (o .:? "request")) instance ToJSON Report where toJSON Report'{..} = object (catMaybes [("kind" .=) <$> _rKind, ("rows" .=) <$> _rRows, ("statisticsCurrencyCode" .=) <$> _rStatisticsCurrencyCode, ("isReportReady" .=) <$> _rIsReportReady, ("files" .=) <$> _rFiles, ("id" .=) <$> _rId, ("statisticsTimeZone" .=) <$> _rStatisticsTimeZone, ("rowCount" .=) <$> _rRowCount, ("request" .=) <$> _rRequest]) -- -- /See:/ 'reportFilesItem' smart constructor. data ReportFilesItem = ReportFilesItem' { _rfiURL :: !(Maybe Text) , _rfiByteCount :: !(Maybe (Textual Int64)) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportFilesItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rfiURL' -- -- * 'rfiByteCount' reportFilesItem :: ReportFilesItem reportFilesItem = ReportFilesItem' {_rfiURL = Nothing, _rfiByteCount = Nothing} -- | Use this url to download the report file. rfiURL :: Lens' ReportFilesItem (Maybe Text) rfiURL = lens _rfiURL (\ s a -> s{_rfiURL = a}) -- | The size of this report file in bytes. rfiByteCount :: Lens' ReportFilesItem (Maybe Int64) rfiByteCount = lens _rfiByteCount (\ s a -> s{_rfiByteCount = a}) . mapping _Coerce instance FromJSON ReportFilesItem where parseJSON = withObject "ReportFilesItem" (\ o -> ReportFilesItem' <$> (o .:? "url") <*> (o .:? "byteCount")) instance ToJSON ReportFilesItem where toJSON ReportFilesItem'{..} = object (catMaybes [("url" .=) <$> _rfiURL, ("byteCount" .=) <$> _rfiByteCount]) -- -- /See:/ 'reportRequestFiltersItem' smart constructor. data ReportRequestFiltersItem = ReportRequestFiltersItem' { _rrfiOperator :: !(Maybe Text) , _rrfiValues :: !(Maybe [JSONValue]) , _rrfiColumn :: !(Maybe ReportAPIColumnSpec) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRequestFiltersItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrfiOperator' -- -- * 'rrfiValues' -- -- * 'rrfiColumn' reportRequestFiltersItem :: ReportRequestFiltersItem reportRequestFiltersItem = ReportRequestFiltersItem' {_rrfiOperator = Nothing, _rrfiValues = Nothing, _rrfiColumn = Nothing} -- | Operator to use in the filter. See the filter reference for a list of -- available operators. rrfiOperator :: Lens' ReportRequestFiltersItem (Maybe Text) rrfiOperator = lens _rrfiOperator (\ s a -> s{_rrfiOperator = a}) -- | A list of values to filter the column value against.\\ The maximum -- number of filter values per request is 300. rrfiValues :: Lens' ReportRequestFiltersItem [JSONValue] rrfiValues = lens _rrfiValues (\ s a -> s{_rrfiValues = a}) . _Default . _Coerce -- | Column to perform the filter on. This can be a DoubleClick Search column -- or a saved column. rrfiColumn :: Lens' ReportRequestFiltersItem (Maybe ReportAPIColumnSpec) rrfiColumn = lens _rrfiColumn (\ s a -> s{_rrfiColumn = a}) instance FromJSON ReportRequestFiltersItem where parseJSON = withObject "ReportRequestFiltersItem" (\ o -> ReportRequestFiltersItem' <$> (o .:? "operator") <*> (o .:? "values" .!= mempty) <*> (o .:? "column")) instance ToJSON ReportRequestFiltersItem where toJSON ReportRequestFiltersItem'{..} = object (catMaybes [("operator" .=) <$> _rrfiOperator, ("values" .=) <$> _rrfiValues, ("column" .=) <$> _rrfiColumn]) -- | A message containing availability data relevant to DoubleClick Search. -- -- /See:/ 'availability' smart constructor. data Availability = Availability' { _aAgencyId :: !(Maybe (Textual Int64)) , _aAdvertiserId :: !(Maybe (Textual Int64)) , _aSegmentationId :: !(Maybe (Textual Int64)) , _aSegmentationName :: !(Maybe Text) , _aAvailabilityTimestamp :: !(Maybe (Textual Int64)) , _aSegmentationType :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Availability' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'aAgencyId' -- -- * 'aAdvertiserId' -- -- * 'aSegmentationId' -- -- * 'aSegmentationName' -- -- * 'aAvailabilityTimestamp' -- -- * 'aSegmentationType' availability :: Availability availability = Availability' { _aAgencyId = Nothing , _aAdvertiserId = Nothing , _aSegmentationId = Nothing , _aSegmentationName = Nothing , _aAvailabilityTimestamp = Nothing , _aSegmentationType = Nothing } -- | DS agency ID. aAgencyId :: Lens' Availability (Maybe Int64) aAgencyId = lens _aAgencyId (\ s a -> s{_aAgencyId = a}) . mapping _Coerce -- | DS advertiser ID. aAdvertiserId :: Lens' Availability (Maybe Int64) aAdvertiserId = lens _aAdvertiserId (\ s a -> s{_aAdvertiserId = a}) . mapping _Coerce -- | The numeric segmentation identifier (for example, DoubleClick Search -- Floodlight activity ID). aSegmentationId :: Lens' Availability (Maybe Int64) aSegmentationId = lens _aSegmentationId (\ s a -> s{_aSegmentationId = a}) . mapping _Coerce -- | The friendly segmentation identifier (for example, DoubleClick Search -- Floodlight activity name). aSegmentationName :: Lens' Availability (Maybe Text) aSegmentationName = lens _aSegmentationName (\ s a -> s{_aSegmentationName = a}) -- | The time by which all conversions have been uploaded, in epoch millis -- UTC. aAvailabilityTimestamp :: Lens' Availability (Maybe Int64) aAvailabilityTimestamp = lens _aAvailabilityTimestamp (\ s a -> s{_aAvailabilityTimestamp = a}) . mapping _Coerce -- | The segmentation type that this availability is for (its default value -- is \`FLOODLIGHT\`). aSegmentationType :: Lens' Availability (Maybe Text) aSegmentationType = lens _aSegmentationType (\ s a -> s{_aSegmentationType = a}) instance FromJSON Availability where parseJSON = withObject "Availability" (\ o -> Availability' <$> (o .:? "agencyId") <*> (o .:? "advertiserId") <*> (o .:? "segmentationId") <*> (o .:? "segmentationName") <*> (o .:? "availabilityTimestamp") <*> (o .:? "segmentationType")) instance ToJSON Availability where toJSON Availability'{..} = object (catMaybes [("agencyId" .=) <$> _aAgencyId, ("advertiserId" .=) <$> _aAdvertiserId, ("segmentationId" .=) <$> _aSegmentationId, ("segmentationName" .=) <$> _aSegmentationName, ("availabilityTimestamp" .=) <$> _aAvailabilityTimestamp, ("segmentationType" .=) <$> _aSegmentationType]) -- | The request to update availability. -- -- /See:/ 'updateAvailabilityRequest' smart constructor. newtype UpdateAvailabilityRequest = UpdateAvailabilityRequest' { _uarAvailabilities :: Maybe [Availability] } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'UpdateAvailabilityRequest' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'uarAvailabilities' updateAvailabilityRequest :: UpdateAvailabilityRequest updateAvailabilityRequest = UpdateAvailabilityRequest' {_uarAvailabilities = Nothing} -- | The availabilities being requested. uarAvailabilities :: Lens' UpdateAvailabilityRequest [Availability] uarAvailabilities = lens _uarAvailabilities (\ s a -> s{_uarAvailabilities = a}) . _Default . _Coerce instance FromJSON UpdateAvailabilityRequest where parseJSON = withObject "UpdateAvailabilityRequest" (\ o -> UpdateAvailabilityRequest' <$> (o .:? "availabilities" .!= mempty)) instance ToJSON UpdateAvailabilityRequest where toJSON UpdateAvailabilityRequest'{..} = object (catMaybes [("availabilities" .=) <$> _uarAvailabilities]) -- | A message containing the custom metric. -- -- /See:/ 'customMetric' smart constructor. data CustomMetric = CustomMetric' { _cmValue :: !(Maybe (Textual Double)) , _cmName :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'CustomMetric' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cmValue' -- -- * 'cmName' customMetric :: CustomMetric customMetric = CustomMetric' {_cmValue = Nothing, _cmName = Nothing} -- | Custom metric numeric value. cmValue :: Lens' CustomMetric (Maybe Double) cmValue = lens _cmValue (\ s a -> s{_cmValue = a}) . mapping _Coerce -- | Custom metric name. cmName :: Lens' CustomMetric (Maybe Text) cmName = lens _cmName (\ s a -> s{_cmName = a}) instance FromJSON CustomMetric where parseJSON = withObject "CustomMetric" (\ o -> CustomMetric' <$> (o .:? "value") <*> (o .:? "name")) instance ToJSON CustomMetric where toJSON CustomMetric'{..} = object (catMaybes [("value" .=) <$> _cmValue, ("name" .=) <$> _cmName]) -- | A list of conversions. -- -- /See:/ 'conversionList' smart constructor. data ConversionList = ConversionList' { _clKind :: !(Maybe Text) , _clConversion :: !(Maybe [Conversion]) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ConversionList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'clKind' -- -- * 'clConversion' conversionList :: ConversionList conversionList = ConversionList' {_clKind = Nothing, _clConversion = Nothing} -- | Identifies this as a ConversionList resource. Value: the fixed string -- doubleclicksearch#conversionList. clKind :: Lens' ConversionList (Maybe Text) clKind = lens _clKind (\ s a -> s{_clKind = a}) -- | The conversions being requested. clConversion :: Lens' ConversionList [Conversion] clConversion = lens _clConversion (\ s a -> s{_clConversion = a}) . _Default . _Coerce instance FromJSON ConversionList where parseJSON = withObject "ConversionList" (\ o -> ConversionList' <$> (o .:? "kind") <*> (o .:? "conversion" .!= mempty)) instance ToJSON ConversionList where toJSON ConversionList'{..} = object (catMaybes [("kind" .=) <$> _clKind, ("conversion" .=) <$> _clConversion]) -- | A request object used to create a DoubleClick Search report. -- -- /See:/ 'reportAPIColumnSpec' smart constructor. data ReportAPIColumnSpec = ReportAPIColumnSpec' { _racsCustomDimensionName :: !(Maybe Text) , _racsSavedColumnName :: !(Maybe Text) , _racsGroupByColumn :: !(Maybe Bool) , _racsCustomMetricName :: !(Maybe Text) , _racsEndDate :: !(Maybe Text) , _racsProductReportPerspective :: !(Maybe Text) , _racsStartDate :: !(Maybe Text) , _racsHeaderText :: !(Maybe Text) , _racsPlatformSource :: !(Maybe Text) , _racsColumnName :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportAPIColumnSpec' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'racsCustomDimensionName' -- -- * 'racsSavedColumnName' -- -- * 'racsGroupByColumn' -- -- * 'racsCustomMetricName' -- -- * 'racsEndDate' -- -- * 'racsProductReportPerspective' -- -- * 'racsStartDate' -- -- * 'racsHeaderText' -- -- * 'racsPlatformSource' -- -- * 'racsColumnName' reportAPIColumnSpec :: ReportAPIColumnSpec reportAPIColumnSpec = ReportAPIColumnSpec' { _racsCustomDimensionName = Nothing , _racsSavedColumnName = Nothing , _racsGroupByColumn = Nothing , _racsCustomMetricName = Nothing , _racsEndDate = Nothing , _racsProductReportPerspective = Nothing , _racsStartDate = Nothing , _racsHeaderText = Nothing , _racsPlatformSource = Nothing , _racsColumnName = Nothing } -- | Segments a report by a custom dimension. The report must be scoped to an -- advertiser or lower, and the custom dimension must already be set up in -- DoubleClick Search. The custom dimension name, which appears in -- DoubleClick Search, is case sensitive.\\ If used in a conversion report, -- returns the value of the specified custom dimension for the given -- conversion, if set. This column does not segment the conversion report. racsCustomDimensionName :: Lens' ReportAPIColumnSpec (Maybe Text) racsCustomDimensionName = lens _racsCustomDimensionName (\ s a -> s{_racsCustomDimensionName = a}) -- | Name of a saved column to include in the report. The report must be -- scoped at advertiser or lower, and this saved column must already be -- created in the DoubleClick Search UI. racsSavedColumnName :: Lens' ReportAPIColumnSpec (Maybe Text) racsSavedColumnName = lens _racsSavedColumnName (\ s a -> s{_racsSavedColumnName = a}) -- | Synchronous report only. Set to \`true\` to group by this column. -- Defaults to \`false\`. racsGroupByColumn :: Lens' ReportAPIColumnSpec (Maybe Bool) racsGroupByColumn = lens _racsGroupByColumn (\ s a -> s{_racsGroupByColumn = a}) -- | Name of a custom metric to include in the report. The report must be -- scoped to an advertiser or lower, and the custom metric must already be -- set up in DoubleClick Search. The custom metric name, which appears in -- DoubleClick Search, is case sensitive. racsCustomMetricName :: Lens' ReportAPIColumnSpec (Maybe Text) racsCustomMetricName = lens _racsCustomMetricName (\ s a -> s{_racsCustomMetricName = a}) -- | Inclusive day in YYYY-MM-DD format. When provided, this overrides the -- overall time range of the report for this column only. Must be provided -- together with \`startDate\`. racsEndDate :: Lens' ReportAPIColumnSpec (Maybe Text) racsEndDate = lens _racsEndDate (\ s a -> s{_racsEndDate = a}) -- | Returns metrics only for a specific type of product activity. Accepted -- values are: - \"\`sold\`\": returns metrics only for products that were -- sold - \"\`advertised\`\": returns metrics only for products that were -- advertised in a Shopping campaign, and that might or might not have been -- sold racsProductReportPerspective :: Lens' ReportAPIColumnSpec (Maybe Text) racsProductReportPerspective = lens _racsProductReportPerspective (\ s a -> s{_racsProductReportPerspective = a}) -- | Inclusive date in YYYY-MM-DD format. When provided, this overrides the -- overall time range of the report for this column only. Must be provided -- together with \`endDate\`. racsStartDate :: Lens' ReportAPIColumnSpec (Maybe Text) racsStartDate = lens _racsStartDate (\ s a -> s{_racsStartDate = a}) -- | Text used to identify this column in the report output; defaults to -- \`columnName\` or \`savedColumnName\` when not specified. This can be -- used to prevent collisions between DoubleClick Search columns and saved -- columns with the same name. racsHeaderText :: Lens' ReportAPIColumnSpec (Maybe Text) racsHeaderText = lens _racsHeaderText (\ s a -> s{_racsHeaderText = a}) -- | The platform that is used to provide data for the custom dimension. -- Acceptable values are \"floodlight\". racsPlatformSource :: Lens' ReportAPIColumnSpec (Maybe Text) racsPlatformSource = lens _racsPlatformSource (\ s a -> s{_racsPlatformSource = a}) -- | Name of a DoubleClick Search column to include in the report. racsColumnName :: Lens' ReportAPIColumnSpec (Maybe Text) racsColumnName = lens _racsColumnName (\ s a -> s{_racsColumnName = a}) instance FromJSON ReportAPIColumnSpec where parseJSON = withObject "ReportAPIColumnSpec" (\ o -> ReportAPIColumnSpec' <$> (o .:? "customDimensionName") <*> (o .:? "savedColumnName") <*> (o .:? "groupByColumn") <*> (o .:? "customMetricName") <*> (o .:? "endDate") <*> (o .:? "productReportPerspective") <*> (o .:? "startDate") <*> (o .:? "headerText") <*> (o .:? "platformSource") <*> (o .:? "columnName")) instance ToJSON ReportAPIColumnSpec where toJSON ReportAPIColumnSpec'{..} = object (catMaybes [("customDimensionName" .=) <$> _racsCustomDimensionName, ("savedColumnName" .=) <$> _racsSavedColumnName, ("groupByColumn" .=) <$> _racsGroupByColumn, ("customMetricName" .=) <$> _racsCustomMetricName, ("endDate" .=) <$> _racsEndDate, ("productReportPerspective" .=) <$> _racsProductReportPerspective, ("startDate" .=) <$> _racsStartDate, ("headerText" .=) <$> _racsHeaderText, ("platformSource" .=) <$> _racsPlatformSource, ("columnName" .=) <$> _racsColumnName]) -- | If metrics are requested in a report, this argument will be used to -- restrict the metrics to a specific time range. -- -- /See:/ 'reportRequestTimeRange' smart constructor. data ReportRequestTimeRange = ReportRequestTimeRange' { _rrtrEndDate :: !(Maybe Text) , _rrtrChangedAttributesSinceTimestamp :: !(Maybe Text) , _rrtrStartDate :: !(Maybe Text) , _rrtrChangedMetricsSinceTimestamp :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRequestTimeRange' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrtrEndDate' -- -- * 'rrtrChangedAttributesSinceTimestamp' -- -- * 'rrtrStartDate' -- -- * 'rrtrChangedMetricsSinceTimestamp' reportRequestTimeRange :: ReportRequestTimeRange reportRequestTimeRange = ReportRequestTimeRange' { _rrtrEndDate = Nothing , _rrtrChangedAttributesSinceTimestamp = Nothing , _rrtrStartDate = Nothing , _rrtrChangedMetricsSinceTimestamp = Nothing } -- | Inclusive date in YYYY-MM-DD format. rrtrEndDate :: Lens' ReportRequestTimeRange (Maybe Text) rrtrEndDate = lens _rrtrEndDate (\ s a -> s{_rrtrEndDate = a}) -- | Inclusive UTC timestamp in RFC format, e.g., -- \`2013-07-16T10:16:23.555Z\`. See additional references on how changed -- attribute reports work. rrtrChangedAttributesSinceTimestamp :: Lens' ReportRequestTimeRange (Maybe Text) rrtrChangedAttributesSinceTimestamp = lens _rrtrChangedAttributesSinceTimestamp (\ s a -> s{_rrtrChangedAttributesSinceTimestamp = a}) -- | Inclusive date in YYYY-MM-DD format. rrtrStartDate :: Lens' ReportRequestTimeRange (Maybe Text) rrtrStartDate = lens _rrtrStartDate (\ s a -> s{_rrtrStartDate = a}) -- | Inclusive UTC timestamp in RFC format, e.g., -- \`2013-07-16T10:16:23.555Z\`. See additional references on how changed -- metrics reports work. rrtrChangedMetricsSinceTimestamp :: Lens' ReportRequestTimeRange (Maybe Text) rrtrChangedMetricsSinceTimestamp = lens _rrtrChangedMetricsSinceTimestamp (\ s a -> s{_rrtrChangedMetricsSinceTimestamp = a}) instance FromJSON ReportRequestTimeRange where parseJSON = withObject "ReportRequestTimeRange" (\ o -> ReportRequestTimeRange' <$> (o .:? "endDate") <*> (o .:? "changedAttributesSinceTimestamp") <*> (o .:? "startDate") <*> (o .:? "changedMetricsSinceTimestamp")) instance ToJSON ReportRequestTimeRange where toJSON ReportRequestTimeRange'{..} = object (catMaybes [("endDate" .=) <$> _rrtrEndDate, ("changedAttributesSinceTimestamp" .=) <$> _rrtrChangedAttributesSinceTimestamp, ("startDate" .=) <$> _rrtrStartDate, ("changedMetricsSinceTimestamp" .=) <$> _rrtrChangedMetricsSinceTimestamp]) -- | A conversion containing data relevant to DoubleClick Search. -- -- /See:/ 'conversion' smart constructor. data Conversion = Conversion' { _cAdGroupId :: !(Maybe (Textual Int64)) , _cConversionModifiedTimestamp :: !(Maybe (Textual Int64)) , _cState :: !(Maybe Text) , _cEngineAccountId :: !(Maybe (Textual Int64)) , _cAgencyId :: !(Maybe (Textual Int64)) , _cCurrencyCode :: !(Maybe Text) , _cStoreId :: !(Maybe Text) , _cDsConversionId :: !(Maybe (Textual Int64)) , _cConversionId :: !(Maybe Text) , _cAdvertiserId :: !(Maybe (Textual Int64)) , _cSegmentationId :: !(Maybe (Textual Int64)) , _cChannel :: !(Maybe Text) , _cProductCountry :: !(Maybe Text) , _cCampaignId :: !(Maybe (Textual Int64)) , _cCriterionId :: !(Maybe (Textual Int64)) , _cConversionTimestamp :: !(Maybe Text) , _cAttributionModel :: !(Maybe Text) , _cSegmentationName :: !(Maybe Text) , _cProductLanguage :: !(Maybe Text) , _cCustomMetric :: !(Maybe [CustomMetric]) , _cCountMillis :: !(Maybe (Textual Int64)) , _cQuantityMillis :: !(Maybe (Textual Int64)) , _cAdId :: !(Maybe (Textual Int64)) , _cDeviceType :: !(Maybe Text) , _cType :: !(Maybe Text) , _cCustomDimension :: !(Maybe [CustomDimension]) , _cFloodlightOrderId :: !(Maybe Text) , _cRevenueMicros :: !(Maybe Text) , _cClickId :: !(Maybe Text) , _cInventoryAccountId :: !(Maybe (Textual Int64)) , _cSegmentationType :: !(Maybe Text) , _cProductId :: !(Maybe Text) , _cProductGroupId :: !(Maybe (Textual Int64)) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'Conversion' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cAdGroupId' -- -- * 'cConversionModifiedTimestamp' -- -- * 'cState' -- -- * 'cEngineAccountId' -- -- * 'cAgencyId' -- -- * 'cCurrencyCode' -- -- * 'cStoreId' -- -- * 'cDsConversionId' -- -- * 'cConversionId' -- -- * 'cAdvertiserId' -- -- * 'cSegmentationId' -- -- * 'cChannel' -- -- * 'cProductCountry' -- -- * 'cCampaignId' -- -- * 'cCriterionId' -- -- * 'cConversionTimestamp' -- -- * 'cAttributionModel' -- -- * 'cSegmentationName' -- -- * 'cProductLanguage' -- -- * 'cCustomMetric' -- -- * 'cCountMillis' -- -- * 'cQuantityMillis' -- -- * 'cAdId' -- -- * 'cDeviceType' -- -- * 'cType' -- -- * 'cCustomDimension' -- -- * 'cFloodlightOrderId' -- -- * 'cRevenueMicros' -- -- * 'cClickId' -- -- * 'cInventoryAccountId' -- -- * 'cSegmentationType' -- -- * 'cProductId' -- -- * 'cProductGroupId' conversion :: Conversion conversion = Conversion' { _cAdGroupId = Nothing , _cConversionModifiedTimestamp = Nothing , _cState = Nothing , _cEngineAccountId = Nothing , _cAgencyId = Nothing , _cCurrencyCode = Nothing , _cStoreId = Nothing , _cDsConversionId = Nothing , _cConversionId = Nothing , _cAdvertiserId = Nothing , _cSegmentationId = Nothing , _cChannel = Nothing , _cProductCountry = Nothing , _cCampaignId = Nothing , _cCriterionId = Nothing , _cConversionTimestamp = Nothing , _cAttributionModel = Nothing , _cSegmentationName = Nothing , _cProductLanguage = Nothing , _cCustomMetric = Nothing , _cCountMillis = Nothing , _cQuantityMillis = Nothing , _cAdId = Nothing , _cDeviceType = Nothing , _cType = Nothing , _cCustomDimension = Nothing , _cFloodlightOrderId = Nothing , _cRevenueMicros = Nothing , _cClickId = Nothing , _cInventoryAccountId = Nothing , _cSegmentationType = Nothing , _cProductId = Nothing , _cProductGroupId = Nothing } -- | DS ad group ID. cAdGroupId :: Lens' Conversion (Maybe Int64) cAdGroupId = lens _cAdGroupId (\ s a -> s{_cAdGroupId = a}) . mapping _Coerce -- | The time at which the conversion was last modified, in epoch millis UTC. cConversionModifiedTimestamp :: Lens' Conversion (Maybe Int64) cConversionModifiedTimestamp = lens _cConversionModifiedTimestamp (\ s a -> s{_cConversionModifiedTimestamp = a}) . mapping _Coerce -- | The state of the conversion, that is, either \`ACTIVE\` or \`REMOVED\`. -- Note: state DELETED is deprecated. cState :: Lens' Conversion (Maybe Text) cState = lens _cState (\ s a -> s{_cState = a}) -- | DS engine account ID. cEngineAccountId :: Lens' Conversion (Maybe Int64) cEngineAccountId = lens _cEngineAccountId (\ s a -> s{_cEngineAccountId = a}) . mapping _Coerce -- | DS agency ID. cAgencyId :: Lens' Conversion (Maybe Int64) cAgencyId = lens _cAgencyId (\ s a -> s{_cAgencyId = a}) . mapping _Coerce -- | The currency code for the conversion\'s revenue. Should be in ISO 4217 -- alphabetic (3-char) format. cCurrencyCode :: Lens' Conversion (Maybe Text) cCurrencyCode = lens _cCurrencyCode (\ s a -> s{_cCurrencyCode = a}) -- | The ID of the local store for which the product was advertised. -- Applicable only when the channel is \"\`local\`\". cStoreId :: Lens' Conversion (Maybe Text) cStoreId = lens _cStoreId (\ s a -> s{_cStoreId = a}) -- | ID that DoubleClick Search generates for each conversion. cDsConversionId :: Lens' Conversion (Maybe Int64) cDsConversionId = lens _cDsConversionId (\ s a -> s{_cDsConversionId = a}) . mapping _Coerce -- | For offline conversions, advertisers provide this ID. Advertisers can -- specify any ID that is meaningful to them. Each conversion in a request -- must specify a unique ID, and the combination of ID and timestamp must -- be unique amongst all conversions within the advertiser. For online -- conversions, DS copies the \`dsConversionId\` or \`floodlightOrderId\` -- into this property depending on the advertiser\'s Floodlight -- instructions. cConversionId :: Lens' Conversion (Maybe Text) cConversionId = lens _cConversionId (\ s a -> s{_cConversionId = a}) -- | DS advertiser ID. cAdvertiserId :: Lens' Conversion (Maybe Int64) cAdvertiserId = lens _cAdvertiserId (\ s a -> s{_cAdvertiserId = a}) . mapping _Coerce -- | The numeric segmentation identifier (for example, DoubleClick Search -- Floodlight activity ID). cSegmentationId :: Lens' Conversion (Maybe Int64) cSegmentationId = lens _cSegmentationId (\ s a -> s{_cSegmentationId = a}) . mapping _Coerce -- | Sales channel for the product. Acceptable values are: - \"\`local\`\": a -- physical store - \"\`online\`\": an online store cChannel :: Lens' Conversion (Maybe Text) cChannel = lens _cChannel (\ s a -> s{_cChannel = a}) -- | The country registered for the Merchant Center feed that contains the -- product. Use an ISO 3166 code to specify a country. cProductCountry :: Lens' Conversion (Maybe Text) cProductCountry = lens _cProductCountry (\ s a -> s{_cProductCountry = a}) -- | DS campaign ID. cCampaignId :: Lens' Conversion (Maybe Int64) cCampaignId = lens _cCampaignId (\ s a -> s{_cCampaignId = a}) . mapping _Coerce -- | DS criterion (keyword) ID. cCriterionId :: Lens' Conversion (Maybe Int64) cCriterionId = lens _cCriterionId (\ s a -> s{_cCriterionId = a}) . mapping _Coerce -- | The time at which the conversion took place, in epoch millis UTC. cConversionTimestamp :: Lens' Conversion (Maybe Text) cConversionTimestamp = lens _cConversionTimestamp (\ s a -> s{_cConversionTimestamp = a}) -- | Available to advertisers only after contacting DoubleClick Search -- customer support. cAttributionModel :: Lens' Conversion (Maybe Text) cAttributionModel = lens _cAttributionModel (\ s a -> s{_cAttributionModel = a}) -- | The friendly segmentation identifier (for example, DoubleClick Search -- Floodlight activity name). cSegmentationName :: Lens' Conversion (Maybe Text) cSegmentationName = lens _cSegmentationName (\ s a -> s{_cSegmentationName = a}) -- | The language registered for the Merchant Center feed that contains the -- product. Use an ISO 639 code to specify a language. cProductLanguage :: Lens' Conversion (Maybe Text) cProductLanguage = lens _cProductLanguage (\ s a -> s{_cProductLanguage = a}) -- | Custom metrics for the conversion. cCustomMetric :: Lens' Conversion [CustomMetric] cCustomMetric = lens _cCustomMetric (\ s a -> s{_cCustomMetric = a}) . _Default . _Coerce -- | Available to advertisers only after contacting DoubleClick Search -- customer support. cCountMillis :: Lens' Conversion (Maybe Int64) cCountMillis = lens _cCountMillis (\ s a -> s{_cCountMillis = a}) . mapping _Coerce -- | The quantity of this conversion, in millis. cQuantityMillis :: Lens' Conversion (Maybe Int64) cQuantityMillis = lens _cQuantityMillis (\ s a -> s{_cQuantityMillis = a}) . mapping _Coerce -- | DS ad ID. cAdId :: Lens' Conversion (Maybe Int64) cAdId = lens _cAdId (\ s a -> s{_cAdId = a}) . mapping _Coerce -- | The type of device on which the conversion occurred. cDeviceType :: Lens' Conversion (Maybe Text) cDeviceType = lens _cDeviceType (\ s a -> s{_cDeviceType = a}) -- | The type of the conversion, that is, either \`ACTION\` or -- \`TRANSACTION\`. An \`ACTION\` conversion is an action by the user that -- has no monetarily quantifiable value, while a \`TRANSACTION\` conversion -- is an action that does have a monetarily quantifiable value. Examples -- are email list signups (\`ACTION\`) versus ecommerce purchases -- (\`TRANSACTION\`). cType :: Lens' Conversion (Maybe Text) cType = lens _cType (\ s a -> s{_cType = a}) -- | Custom dimensions for the conversion, which can be used to filter data -- in a report. cCustomDimension :: Lens' Conversion [CustomDimension] cCustomDimension = lens _cCustomDimension (\ s a -> s{_cCustomDimension = a}) . _Default . _Coerce -- | The Floodlight order ID provided by the advertiser for the conversion. cFloodlightOrderId :: Lens' Conversion (Maybe Text) cFloodlightOrderId = lens _cFloodlightOrderId (\ s a -> s{_cFloodlightOrderId = a}) -- | The revenue amount of this \`TRANSACTION\` conversion, in micros (value -- multiplied by 1000000, no decimal). For example, to specify a revenue -- value of \"10\" enter \"10000000\" (10 million) in your request. cRevenueMicros :: Lens' Conversion (Maybe Text) cRevenueMicros = lens _cRevenueMicros (\ s a -> s{_cRevenueMicros = a}) -- | DS click ID for the conversion. cClickId :: Lens' Conversion (Maybe Text) cClickId = lens _cClickId (\ s a -> s{_cClickId = a}) -- | ID that DS generates and uses to uniquely identify the inventory account -- that contains the product. cInventoryAccountId :: Lens' Conversion (Maybe Int64) cInventoryAccountId = lens _cInventoryAccountId (\ s a -> s{_cInventoryAccountId = a}) . mapping _Coerce -- | The segmentation type of this conversion (for example, \`FLOODLIGHT\`). cSegmentationType :: Lens' Conversion (Maybe Text) cSegmentationType = lens _cSegmentationType (\ s a -> s{_cSegmentationType = a}) -- | The product ID (SKU). cProductId :: Lens' Conversion (Maybe Text) cProductId = lens _cProductId (\ s a -> s{_cProductId = a}) -- | DS product group ID. cProductGroupId :: Lens' Conversion (Maybe Int64) cProductGroupId = lens _cProductGroupId (\ s a -> s{_cProductGroupId = a}) . mapping _Coerce instance FromJSON Conversion where parseJSON = withObject "Conversion" (\ o -> Conversion' <$> (o .:? "adGroupId") <*> (o .:? "conversionModifiedTimestamp") <*> (o .:? "state") <*> (o .:? "engineAccountId") <*> (o .:? "agencyId") <*> (o .:? "currencyCode") <*> (o .:? "storeId") <*> (o .:? "dsConversionId") <*> (o .:? "conversionId") <*> (o .:? "advertiserId") <*> (o .:? "segmentationId") <*> (o .:? "channel") <*> (o .:? "productCountry") <*> (o .:? "campaignId") <*> (o .:? "criterionId") <*> (o .:? "conversionTimestamp") <*> (o .:? "attributionModel") <*> (o .:? "segmentationName") <*> (o .:? "productLanguage") <*> (o .:? "customMetric" .!= mempty) <*> (o .:? "countMillis") <*> (o .:? "quantityMillis") <*> (o .:? "adId") <*> (o .:? "deviceType") <*> (o .:? "type") <*> (o .:? "customDimension" .!= mempty) <*> (o .:? "floodlightOrderId") <*> (o .:? "revenueMicros") <*> (o .:? "clickId") <*> (o .:? "inventoryAccountId") <*> (o .:? "segmentationType") <*> (o .:? "productId") <*> (o .:? "productGroupId")) instance ToJSON Conversion where toJSON Conversion'{..} = object (catMaybes [("adGroupId" .=) <$> _cAdGroupId, ("conversionModifiedTimestamp" .=) <$> _cConversionModifiedTimestamp, ("state" .=) <$> _cState, ("engineAccountId" .=) <$> _cEngineAccountId, ("agencyId" .=) <$> _cAgencyId, ("currencyCode" .=) <$> _cCurrencyCode, ("storeId" .=) <$> _cStoreId, ("dsConversionId" .=) <$> _cDsConversionId, ("conversionId" .=) <$> _cConversionId, ("advertiserId" .=) <$> _cAdvertiserId, ("segmentationId" .=) <$> _cSegmentationId, ("channel" .=) <$> _cChannel, ("productCountry" .=) <$> _cProductCountry, ("campaignId" .=) <$> _cCampaignId, ("criterionId" .=) <$> _cCriterionId, ("conversionTimestamp" .=) <$> _cConversionTimestamp, ("attributionModel" .=) <$> _cAttributionModel, ("segmentationName" .=) <$> _cSegmentationName, ("productLanguage" .=) <$> _cProductLanguage, ("customMetric" .=) <$> _cCustomMetric, ("countMillis" .=) <$> _cCountMillis, ("quantityMillis" .=) <$> _cQuantityMillis, ("adId" .=) <$> _cAdId, ("deviceType" .=) <$> _cDeviceType, ("type" .=) <$> _cType, ("customDimension" .=) <$> _cCustomDimension, ("floodlightOrderId" .=) <$> _cFloodlightOrderId, ("revenueMicros" .=) <$> _cRevenueMicros, ("clickId" .=) <$> _cClickId, ("inventoryAccountId" .=) <$> _cInventoryAccountId, ("segmentationType" .=) <$> _cSegmentationType, ("productId" .=) <$> _cProductId, ("productGroupId" .=) <$> _cProductGroupId]) -- | A saved column -- -- /See:/ 'savedColumn' smart constructor. data SavedColumn = SavedColumn' { _scSavedColumnName :: !(Maybe Text) , _scKind :: !(Maybe Text) , _scType :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SavedColumn' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'scSavedColumnName' -- -- * 'scKind' -- -- * 'scType' savedColumn :: SavedColumn savedColumn = SavedColumn' {_scSavedColumnName = Nothing, _scKind = Nothing, _scType = Nothing} -- | The name of the saved column. scSavedColumnName :: Lens' SavedColumn (Maybe Text) scSavedColumnName = lens _scSavedColumnName (\ s a -> s{_scSavedColumnName = a}) -- | Identifies this as a SavedColumn resource. Value: the fixed string -- doubleclicksearch#savedColumn. scKind :: Lens' SavedColumn (Maybe Text) scKind = lens _scKind (\ s a -> s{_scKind = a}) -- | The type of data this saved column will produce. scType :: Lens' SavedColumn (Maybe Text) scType = lens _scType (\ s a -> s{_scType = a}) instance FromJSON SavedColumn where parseJSON = withObject "SavedColumn" (\ o -> SavedColumn' <$> (o .:? "savedColumnName") <*> (o .:? "kind") <*> (o .:? "type")) instance ToJSON SavedColumn where toJSON SavedColumn'{..} = object (catMaybes [("savedColumnName" .=) <$> _scSavedColumnName, ("kind" .=) <$> _scKind, ("type" .=) <$> _scType]) -- | A message containing the custom dimension. -- -- /See:/ 'customDimension' smart constructor. data CustomDimension = CustomDimension' { _cdValue :: !(Maybe Text) , _cdName :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'CustomDimension' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cdValue' -- -- * 'cdName' customDimension :: CustomDimension customDimension = CustomDimension' {_cdValue = Nothing, _cdName = Nothing} -- | Custom dimension value. cdValue :: Lens' CustomDimension (Maybe Text) cdValue = lens _cdValue (\ s a -> s{_cdValue = a}) -- | Custom dimension name. cdName :: Lens' CustomDimension (Maybe Text) cdName = lens _cdName (\ s a -> s{_cdName = a}) instance FromJSON CustomDimension where parseJSON = withObject "CustomDimension" (\ o -> CustomDimension' <$> (o .:? "value") <*> (o .:? "name")) instance ToJSON CustomDimension where toJSON CustomDimension'{..} = object (catMaybes [("value" .=) <$> _cdValue, ("name" .=) <$> _cdName]) -- | The response to a update availability request. -- -- /See:/ 'updateAvailabilityResponse' smart constructor. newtype UpdateAvailabilityResponse = UpdateAvailabilityResponse' { _uAvailabilities :: Maybe [Availability] } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'UpdateAvailabilityResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'uAvailabilities' updateAvailabilityResponse :: UpdateAvailabilityResponse updateAvailabilityResponse = UpdateAvailabilityResponse' {_uAvailabilities = Nothing} -- | The availabilities being returned. uAvailabilities :: Lens' UpdateAvailabilityResponse [Availability] uAvailabilities = lens _uAvailabilities (\ s a -> s{_uAvailabilities = a}) . _Default . _Coerce instance FromJSON UpdateAvailabilityResponse where parseJSON = withObject "UpdateAvailabilityResponse" (\ o -> UpdateAvailabilityResponse' <$> (o .:? "availabilities" .!= mempty)) instance ToJSON UpdateAvailabilityResponse where toJSON UpdateAvailabilityResponse'{..} = object (catMaybes [("availabilities" .=) <$> _uAvailabilities]) -- | The reportScope is a set of IDs that are used to determine which subset -- of entities will be returned in the report. The full lineage of IDs from -- the lowest scoped level desired up through agency is required. -- -- /See:/ 'reportRequestReportScope' smart constructor. data ReportRequestReportScope = ReportRequestReportScope' { _rrrsKeywordId :: !(Maybe (Textual Int64)) , _rrrsAdGroupId :: !(Maybe (Textual Int64)) , _rrrsEngineAccountId :: !(Maybe (Textual Int64)) , _rrrsAgencyId :: !(Maybe (Textual Int64)) , _rrrsAdvertiserId :: !(Maybe (Textual Int64)) , _rrrsCampaignId :: !(Maybe (Textual Int64)) , _rrrsAdId :: !(Maybe (Textual Int64)) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ReportRequestReportScope' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rrrsKeywordId' -- -- * 'rrrsAdGroupId' -- -- * 'rrrsEngineAccountId' -- -- * 'rrrsAgencyId' -- -- * 'rrrsAdvertiserId' -- -- * 'rrrsCampaignId' -- -- * 'rrrsAdId' reportRequestReportScope :: ReportRequestReportScope reportRequestReportScope = ReportRequestReportScope' { _rrrsKeywordId = Nothing , _rrrsAdGroupId = Nothing , _rrrsEngineAccountId = Nothing , _rrrsAgencyId = Nothing , _rrrsAdvertiserId = Nothing , _rrrsCampaignId = Nothing , _rrrsAdId = Nothing } -- | DS keyword ID. rrrsKeywordId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsKeywordId = lens _rrrsKeywordId (\ s a -> s{_rrrsKeywordId = a}) . mapping _Coerce -- | DS ad group ID. rrrsAdGroupId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsAdGroupId = lens _rrrsAdGroupId (\ s a -> s{_rrrsAdGroupId = a}) . mapping _Coerce -- | DS engine account ID. rrrsEngineAccountId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsEngineAccountId = lens _rrrsEngineAccountId (\ s a -> s{_rrrsEngineAccountId = a}) . mapping _Coerce -- | DS agency ID. rrrsAgencyId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsAgencyId = lens _rrrsAgencyId (\ s a -> s{_rrrsAgencyId = a}) . mapping _Coerce -- | DS advertiser ID. rrrsAdvertiserId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsAdvertiserId = lens _rrrsAdvertiserId (\ s a -> s{_rrrsAdvertiserId = a}) . mapping _Coerce -- | DS campaign ID. rrrsCampaignId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsCampaignId = lens _rrrsCampaignId (\ s a -> s{_rrrsCampaignId = a}) . mapping _Coerce -- | DS ad ID. rrrsAdId :: Lens' ReportRequestReportScope (Maybe Int64) rrrsAdId = lens _rrrsAdId (\ s a -> s{_rrrsAdId = a}) . mapping _Coerce instance FromJSON ReportRequestReportScope where parseJSON = withObject "ReportRequestReportScope" (\ o -> ReportRequestReportScope' <$> (o .:? "keywordId") <*> (o .:? "adGroupId") <*> (o .:? "engineAccountId") <*> (o .:? "agencyId") <*> (o .:? "advertiserId") <*> (o .:? "campaignId") <*> (o .:? "adId")) instance ToJSON ReportRequestReportScope where toJSON ReportRequestReportScope'{..} = object (catMaybes [("keywordId" .=) <$> _rrrsKeywordId, ("adGroupId" .=) <$> _rrrsAdGroupId, ("engineAccountId" .=) <$> _rrrsEngineAccountId, ("agencyId" .=) <$> _rrrsAgencyId, ("advertiserId" .=) <$> _rrrsAdvertiserId, ("campaignId" .=) <$> _rrrsCampaignId, ("adId" .=) <$> _rrrsAdId]) -- | A list of saved columns. Advertisers create saved columns to report on -- Floodlight activities, Google Analytics goals, or custom KPIs. To -- request reports with saved columns, you\'ll need the saved column names -- that are available from this list. -- -- /See:/ 'savedColumnList' smart constructor. data SavedColumnList = SavedColumnList' { _sclKind :: !(Maybe Text) , _sclItems :: !(Maybe [SavedColumn]) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'SavedColumnList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'sclKind' -- -- * 'sclItems' savedColumnList :: SavedColumnList savedColumnList = SavedColumnList' {_sclKind = Nothing, _sclItems = Nothing} -- | Identifies this as a SavedColumnList resource. Value: the fixed string -- doubleclicksearch#savedColumnList. sclKind :: Lens' SavedColumnList (Maybe Text) sclKind = lens _sclKind (\ s a -> s{_sclKind = a}) -- | The saved columns being requested. sclItems :: Lens' SavedColumnList [SavedColumn] sclItems = lens _sclItems (\ s a -> s{_sclItems = a}) . _Default . _Coerce instance FromJSON SavedColumnList where parseJSON = withObject "SavedColumnList" (\ o -> SavedColumnList' <$> (o .:? "kind") <*> (o .:? "items" .!= mempty)) instance ToJSON SavedColumnList where toJSON SavedColumnList'{..} = object (catMaybes [("kind" .=) <$> _sclKind, ("items" .=) <$> _sclItems])
brendanhay/gogol
gogol-doubleclick-search/gen/Network/Google/DoubleClickSearch/Types/Product.hs
mpl-2.0
61,551
0
43
15,047
11,751
6,767
4,984
1,286
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Directory.Resources.Features.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Retrieves a feature. -- -- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @directory.resources.features.get@. module Network.Google.Resource.Directory.Resources.Features.Get ( -- * REST Resource ResourcesFeaturesGetResource -- * Creating a Request , resourcesFeaturesGet , ResourcesFeaturesGet -- * Request Lenses , rfgXgafv , rfgUploadProtocol , rfgAccessToken , rfgUploadType , rfgCustomer , rfgFeatureKey , rfgCallback ) where import Network.Google.Directory.Types import Network.Google.Prelude -- | A resource alias for @directory.resources.features.get@ method which the -- 'ResourcesFeaturesGet' request conforms to. type ResourcesFeaturesGetResource = "admin" :> "directory" :> "v1" :> "customer" :> Capture "customer" Text :> "resources" :> "features" :> Capture "featureKey" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Feature -- | Retrieves a feature. -- -- /See:/ 'resourcesFeaturesGet' smart constructor. data ResourcesFeaturesGet = ResourcesFeaturesGet' { _rfgXgafv :: !(Maybe Xgafv) , _rfgUploadProtocol :: !(Maybe Text) , _rfgAccessToken :: !(Maybe Text) , _rfgUploadType :: !(Maybe Text) , _rfgCustomer :: !Text , _rfgFeatureKey :: !Text , _rfgCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ResourcesFeaturesGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rfgXgafv' -- -- * 'rfgUploadProtocol' -- -- * 'rfgAccessToken' -- -- * 'rfgUploadType' -- -- * 'rfgCustomer' -- -- * 'rfgFeatureKey' -- -- * 'rfgCallback' resourcesFeaturesGet :: Text -- ^ 'rfgCustomer' -> Text -- ^ 'rfgFeatureKey' -> ResourcesFeaturesGet resourcesFeaturesGet pRfgCustomer_ pRfgFeatureKey_ = ResourcesFeaturesGet' { _rfgXgafv = Nothing , _rfgUploadProtocol = Nothing , _rfgAccessToken = Nothing , _rfgUploadType = Nothing , _rfgCustomer = pRfgCustomer_ , _rfgFeatureKey = pRfgFeatureKey_ , _rfgCallback = Nothing } -- | V1 error format. rfgXgafv :: Lens' ResourcesFeaturesGet (Maybe Xgafv) rfgXgafv = lens _rfgXgafv (\ s a -> s{_rfgXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). rfgUploadProtocol :: Lens' ResourcesFeaturesGet (Maybe Text) rfgUploadProtocol = lens _rfgUploadProtocol (\ s a -> s{_rfgUploadProtocol = a}) -- | OAuth access token. rfgAccessToken :: Lens' ResourcesFeaturesGet (Maybe Text) rfgAccessToken = lens _rfgAccessToken (\ s a -> s{_rfgAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). rfgUploadType :: Lens' ResourcesFeaturesGet (Maybe Text) rfgUploadType = lens _rfgUploadType (\ s a -> s{_rfgUploadType = a}) -- | The unique ID for the customer\'s Google Workspace account. As an -- account administrator, you can also use the \`my_customer\` alias to -- represent your account\'s customer ID. rfgCustomer :: Lens' ResourcesFeaturesGet Text rfgCustomer = lens _rfgCustomer (\ s a -> s{_rfgCustomer = a}) -- | The unique ID of the feature to retrieve. rfgFeatureKey :: Lens' ResourcesFeaturesGet Text rfgFeatureKey = lens _rfgFeatureKey (\ s a -> s{_rfgFeatureKey = a}) -- | JSONP rfgCallback :: Lens' ResourcesFeaturesGet (Maybe Text) rfgCallback = lens _rfgCallback (\ s a -> s{_rfgCallback = a}) instance GoogleRequest ResourcesFeaturesGet where type Rs ResourcesFeaturesGet = Feature type Scopes ResourcesFeaturesGet = '["https://www.googleapis.com/auth/admin.directory.resource.calendar", "https://www.googleapis.com/auth/admin.directory.resource.calendar.readonly"] requestClient ResourcesFeaturesGet'{..} = go _rfgCustomer _rfgFeatureKey _rfgXgafv _rfgUploadProtocol _rfgAccessToken _rfgUploadType _rfgCallback (Just AltJSON) directoryService where go = buildClient (Proxy :: Proxy ResourcesFeaturesGetResource) mempty
brendanhay/gogol
gogol-admin-directory/gen/Network/Google/Resource/Directory/Resources/Features/Get.hs
mpl-2.0
5,392
0
21
1,314
795
463
332
119
1
module Notation.DiracNotation ((*|) ,(>|) , (|.|) , (><) , Scalar , Tuple(..) , Ket(..) , Bra(toKet) , toBra , applyOp, QuantumState(normalize, dim) ) where import Data.Complex import Data.List (nub) import Notation.QuantumState -------------------------------------------------------------------------------- -- Infix operators for nice and simple notation -- -------------------------------------------------------------------------------- infixl 5 +| -- Addition of Kets infixl 5 +< -- Addition of Bras infix 6 *| -- Scalar multiplication with Ket infix 6 *< -- Scalar multiplication with Bra infixl 7 >| -- Ket tensor product -- infixl 7 |< -- Bra tensor product, not sure how to properly implement infix 4 |.| -- Inner product infix 5 >< -- Outer product -------------------------------------------------------------------------------- -- Data type for multidimensional Ket spaces -- -------------------------------------------------------------------------------- data Tuple a b = a :* b deriving (Eq) -------------------------------------------------------------------------------- -- Ket constructors -- -------------------------------------------------------------------------------- data Ket a= KetZero -- 0 vector | Ket a | Scalar :*| Ket a | Ket a :+| Ket a -------------------------------------------------------------------------------- -- Make our Ket vectors instances of the QuantumState type class -- -- and define appropriate functions on Kets -- -------------------------------------------------------------------------------- instance Eq a => QuantumState (Ket a) where add = (+|) scale = (*|) reduce = reduceKet basis = ketBasis components x = [toBra e |.| x | e <- basis x] compose coeffs v= foldl1 (:+|) [uncurry (:*|) z | z <- zip coeffs v] norm KetZero = 0 norm x = sqrt $ realPart (toBra x |.| x) bracket = bracketKet -------------------------------------------------------------------------------- -- Define Bra vectors as linear functions from Ket vectors into Scalars -- -------------------------------------------------------------------------------- -- type Bra a = Ket a -> Scalar -------------------------------------------------------------------------------- -- The Bra 'constructor' using bracket b a = <b|a> = Scalar -- -- partial application gives bracket b = <b| = Ket a -> Scalar -- -- which we use as our representation of the Bra vector -- -------------------------------------------------------------------------------- -- bra :: Eq a => Ket a -> Bra a -- bra = bracket -------------------------------------------------------------------------------- -- Ket tensor product -- -------------------------------------------------------------------------------- (>|) :: (Eq a, Eq b) => Ket a -> Ket b -> Ket (Tuple a b) Ket a >| Ket b = Ket (a :* b) _ >| KetZero = KetZero KetZero >| _ = KetZero x >| y = foldl1 (:+|) [((toBra (Ket a) |.| x) * (toBra (Ket b) |.| y)) :*| Ket (a :* b) | Ket a <- basis x, Ket b <- basis y] -------------------------------------------------------------------------------- -- Bra tensor product -- -------------------------------------------------------------------------------- (|<) :: (Eq a, Eq b) => Bra a -> Bra b -> Bra (Tuple a b) a |< b = toBra ( toKet a >| toKet b) -------------------------------------------------------------------------------- -- Addition of two Ket vectors results in a Ket vector -- -------------------------------------------------------------------------------- (+|) :: Eq a => Ket a -> Ket a -> Ket a x +| KetZero = x KetZero +| x = x x +| y = reduce (x :+| y) -------------------------------------------------------------------------------- -- Adding two Bra vectors results in a new linear operator Ket -> Scalar -- -- (<a| + <b|)|c> = <a|c> + <b|c> -- -------------------------------------------------------------------------------- (+<) :: Eq a => Bra a -> Bra a -> Bra a x +< y = toBra (reduce (toKet x +| toKet y)) -------------------------------------------------------------------------------- -- Multiplication of a Ket by a scalar results in a Ket vector -- -------------------------------------------------------------------------------- (*|) :: Eq a => Scalar -> Ket a -> Ket a s *| (x :+| y) = (s *| x) +| (s *| y) _ *| KetZero = KetZero 0 *| _ = KetZero s *| (t :*| x) = (s*t) *| x s *| x = s :*| x -------------------------------------------------------------------------------- -- Bra myltiplied by a scalar results in a linear operator Ket -> Scalar -- -- (a * <b|)|c> = a * <b|c> -- -------------------------------------------------------------------------------- (*<) :: Eq a => Scalar -> Bra a -> Bra a s *< x = toBra (s *| toKet x) -- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -- This part is shaky, at best! !! -- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -------------------------------------------------------------------------------- -- Make Kets instances of Num, in order to simplify notation a bit -- -- at least (+) and (-) are trivial to implement -- -- the other functions are less obvious, e.g. fromInteger seems impossible -- -------------------------------------------------------------------------------- instance Eq a => Num (Ket a) where x + y = x +| y x - y = x +| ((-1) *| y) -------------------------------------------------------------------------------- -- Two Kets are equal iff all their components are equal -- -------------------------------------------------------------------------------- instance Eq a => Eq (Ket a) where x == y = and [coeff v x == coeff v y | v <- basis x] where coeff v z = toBra v |.| z -------------------------------------------------------------------------------- -- Reduce a Ket to a sum of orthogonal basis Kets -- -------------------------------------------------------------------------------- reduceKet :: Eq a => Ket a -> Ket a reduceKet x = compose coeffs v where v = basis x coeffs = [toBra vi |.| x | vi <- v] -------------------------------------------------------------------------------- -- Extract the basis vectors from a Ket -- -------------------------------------------------------------------------------- ketBasis :: Eq a => Ket a -> [Ket a] ketBasis KetZero = [KetZero] ketBasis (Ket k) = [Ket k] ketBasis (_ :*| x) = [x] ketBasis (k1 :+| k2) = nub (ketBasis k1 ++ ketBasis k2) -------------------------------------------------------------------------------- -- Converting a Ket into a Bra is simply applying bracket to the Ket -- -- bracket a = <a| = (Ket -> Scalar) -- toBra :: Eq a => Ket a -> Bra a -- toBra = bra -------------------------------------------------------------------------------- -- The inner product between two QuantumStates, a and b, is defined as -- -- <a|b> = (<a|)|b>, i.e. bracket a b = (bracket a) b -- -------------------------------------------------------------------------------- (|.|) :: (Eq a) => Bra a -> Ket a -> Scalar -- b |.| k = b k b |.| k = bracketBra b k -------------------------------------------------------------------------------- -- In a Bra - Ket representation define the bracket in terms of Ket vectors -- -- bracket |a> |b> = <a|b>, thus defining the dual Bra vectors as well -- -------------------------------------------------------------------------------- bracketKet :: (Eq a) => Ket a -> Ket a -> Scalar bracketKet KetZero _ = 0 bracketKet _ KetZero = 0 bracketKet (Ket i) (Ket j) = d i j -- Assuming the basis Kets are orthonormal bracketKet (p :*| x) y = conjugate p * bracketKet x y bracketKet x (p :*| y) = p * bracketKet x y bracketKet (x1 :+| x2) y = bracketKet x1 y + bracketKet x2 y bracketKet x (y1 :+| y2) = bracketKet x y1 + bracketKet x y2 -------------------------------------------------------------------------------- -- Kroenecker dela function, used as the inner product of two basis Kets -- -------------------------------------------------------------------------------- d :: Eq a => a -> a -> Scalar d i j | i == j = 1 | otherwise = 0 -------------------------------------------------------------------------------- -- nicer notation for applying the closure expansion -- -------------------------------------------------------------------------------- (><) :: (QuantumState b, QuantumState a) => (a -> b) -> a -> b operator >< x = closure operator x -------------------------------------------------------------------------------- -- Helper function to avoid having to use the closure relation -- -- when applying operators to Ket vectors -- -------------------------------------------------------------------------------- applyOp :: (QuantumState a, QuantumState b) => (a -> b) -> a -> b applyOp f x = f >< x -------------------------------------------------------------------------------- -- Monoids, because monoids. MONOIDS!!! -- -------------------------------------------------------------------------------- instance Eq a => Monoid (Ket a) where mempty = KetZero k `mappend` l = k +| l ------------------------------------------------------------------------------- -- Make Ket an instance of Functor. This might be useful for changing basis. -- -- With emphasis on the might part. -- ------------------------------------------------------------------------------- instance Functor Ket where fmap f KetZero = KetZero fmap f (Ket i) = Ket (f i) fmap f (k :+| l) = fmap f k :+| fmap f l fmap f (s :*| k) = s :*| fmap f k -------------------------------------------------------------------------------- -- Make Ket an instance of Applicative. Again this might be useful for -- -- changing basis, but with an heavier emphasis on the might part than for -- -- Functor. -- -------------------------------------------------------------------------------- instance Applicative Ket where pure = Ket KetZero <*> _ = KetZero Ket f <*> k = fmap f k -------------------------------------------------------------------------------- -- Make Ket a an instance of Show, in order to print Ket vectors in a pretty -- -- way. Since Bra vectors are functions in Haskell they cannot be made an -- -- instance of Show and thus cannot be printed -- -------------------------------------------------------------------------------- instance (Show a, Eq a) => Show (Ket a) where showsPrec _ KetZero = showString "Zero-Ket" showsPrec n (Ket j) = showString "|" . showsPrec n j . showString ">" showsPrec n (x :*| k) = showScalar n x . showsPrec n k showsPrec n (j :+| k) = showsPrec n j . showSign n k . showsPrec n k -------------------------------------------------------------------------------- -- Function to improve the prettyness of the printing. -- -- This function fixes the printing of negative coefficients. -- -------------------------------------------------------------------------------- showSign :: (Show a, Eq a) => Int -> Ket a -> String -> String showSign n (Ket j) = showString " + " showSign n (s@(a :+ b) :*| k) | b == 0, a < 0 = showString " " | otherwise = showString " + " -------------------------------------------------------------------------------- -- Make Tuple (a b) an instance of Show in order to properly print Kets of -- -- all possible basis Kets -- -------------------------------------------------------------------------------- instance (Show a, Show b) => Show (Tuple a b) where showsPrec n (a :* b) = showsPrec n a . showString "; " . showsPrec n b -- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -- HIGHLY EXPERIMENTAL STUFF STARTS here -- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -------------------------------------------------------------------------------- -- Alternative implementation of Bra vectors, using more advanced stuff from -- -- functional programming. -- -------------------------------------------------------------------------------- data Bra a = Bra { bracketBra :: Ket a -> Scalar, toKet :: Ket a } -------------------------------------------------------------------------------- -- Convert Ket vectors into Bra vectors -- -------------------------------------------------------------------------------- toBra :: (Eq a) => Ket a -> Bra a toBra k = Bra (bracket k) k -------------------------------------------------------------------------------- -- As for Ket vectors, this part is shaky but it simplifies notation. -- -------------------------------------------------------------------------------- instance (Eq a) => Num (Bra a) where (Bra f a) + (Bra g b) = Bra f' a' where a' = a + b f' = bracket a' (Bra f a) - (Bra g b) = Bra f' a' where a' = a - b f' = bracket a' -------------------------------------------------------------------------------- -- Monoids! Because MONOIDS!!! -- -------------------------------------------------------------------------------- instance (Eq a) => Monoid (Bra a) where mempty = toBra KetZero (Bra k a) `mappend` (Bra l b) = Bra (bracket a') a' where a' = a + b -------------------------------------------------------------------------------- -- Printing. So pretty! -- -------------------------------------------------------------------------------- instance (Show a, Eq a) => Show (Bra a) where showsPrec _ (Bra _ KetZero) = showString "Zero-Bra" showsPrec n (Bra _ (Ket j)) = showString "<" . showsPrec n j . showString "|" showsPrec n (Bra f (x :*| k)) = showScalar n x' . showsPrec n (Bra f k) where x' = conjugate x showsPrec n (Bra f (j :+| k)) = showsPrec n (Bra f j) . showSign n k . showsPrec n (Bra f k)
johanjoensson/QuantumHaskell
QuantumHaskell-notation/src/Notation/DiracNotation.hs
agpl-3.0
15,303
0
14
3,711
2,781
1,467
1,314
146
1
module RPM.Version.Tests(vercmpTests) where import Test.Tasty(TestTree, testGroup) import Test.Tasty.HUnit(assertEqual, testCase) import RPM.Version(vercmp) vercmpTests :: TestTree vercmpTests = testGroup "vercmp tests" $ map verTestCase versionCases where verTestCase :: (String, String, Ordering) -> TestTree verTestCase (verA, verB, ord) = testCase (verA ++ " " ++ show ord ++ " " ++ verB) $ assertEqual "" ord (vercmp verA verB) versionCases :: [(String, String, Ordering)] versionCases = [ ("1.0", "1.0", EQ), ("1.0", "2.0", LT), ("2.0", "1.0", GT), ("2.0.1", "2.0.1", EQ), ("2.0", "2.0.1", LT), ("2.0.1", "2.0", GT), ("2.0.1a", "2.0.1a", EQ), ("2.0.1a", "2.0.1", GT), ("2.0.1", "2.0.1a", LT), ("5.5p1", "5.5p1", EQ), ("5.5p1", "5.5p2", LT), ("5.5p2", "5.5p1", GT), ("5.5p10", "5.5p10", EQ), ("5.5p1", "5.5p10", LT), ("5.5p10", "5.5p1", GT), ("10xyz", "10.1xyz", LT), ("10.1xyz", "10xyz", GT), ("xyz10", "xyz10", EQ), ("xyz10", "xyz10.1", LT), ("xyz10.1", "xyz10", GT), ("xyz.4", "xyz.4", EQ), ("xyz.4", "8", LT), ("8", "xyz.4", GT), ("xyz.4", "2", LT), ("2", "xyz.4", GT), ("5.5p2", "5.6p1", LT), ("5.6p1", "5.5p2", GT), ("5.6p1", "6.5p1", LT), ("6.5p1", "5.6p1", GT), ("6.0.rc1", "6.0", GT), ("6.0", "6.0.rc1", LT), ("10b2", "10a1", GT), ("10a2", "10b2", LT), ("1.0aa", "1.0aa", EQ), ("1.0a", "1.0aa", LT), ("1.0aa", "1.0a", GT), ("10.0001", "10.0001", EQ), ("10.0001", "10.1", EQ), ("10.1", "10.0001", EQ), ("10.0001", "10.0039", LT), ("10.0039", "10.0001", GT), ("4.999.9", "5.0", LT), ("5.0", "4.999.9", GT), ("20101121", "20101121", EQ), ("20101121", "20101122", LT), ("20101122", "20101121", GT), ("2_0", "2_0", EQ), ("2.0", "2_0", EQ), ("2_0", "2.0", EQ), ("a", "a", EQ), ("a+", "a+", EQ), ("a+", "a_", EQ), ("a_", "a+", EQ), ("+a", "+a", EQ), ("+a", "_a", EQ), ("_a", "+a", EQ), ("+_", "+_", EQ), ("_+", "+_", EQ), ("_+", "_+", EQ), ("+", "_", EQ), ("_", "+", EQ), ("1.0~rc1", "1.0~rc1", EQ), ("1.0~rc1", "1.0", LT), ("1.0", "1.0~rc1", GT), ("1.0~rc1", "1.0~rc2", LT), ("1.0~rc2", "1.0~rc1", GT), ("1.0~rc1~git123", "1.0~rc1~git123", EQ), ("1.0~rc1~git123", "1.0~rc1", LT), ("1.0~rc1", "1.0~rc1~git123", GT) ]
dashea/bdcs
haskell-rpm/tests/RPM/Version/Tests.hs
lgpl-2.1
3,618
0
13
1,766
1,002
651
351
79
1
-- Copyright 2019-2021 Google LLC -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. {-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} module Main where import Control.Monad (replicateM) import Test.Framework (defaultMain) import Test.Framework.Providers.QuickCheck2 (testProperty) import Test.QuickCheck ( (===), Arbitrary(..), Property , Gen, choose, forAll, sized, getPositive ) import Data.RLE (RLE, Run((:><)), fromList, toList) import qualified Data.RLE as RLE arbitrarySizedRLE :: Eq a => Gen a -> Int -> Gen (RLE a) arbitrarySizedRLE genElement n = do maxRun <- arbitrary resultList <- replicateM n $ do element <- genElement runLen <- choose (1, getPositive maxRun) pure (runLen :>< element) pure (RLE.fromRuns resultList) rles :: (Eq a, Arbitrary a) => Gen (RLE a) rles = sized (arbitrarySizedRLE arbitrary) prop_reverse :: (Eq a, Show a) => RLE a -> Property prop_reverse rle = RLE.reverse rle === (fromList . reverse . toList) rle prop_length :: (Eq a, Show a) => RLE a -> Property prop_length rle = RLE.length rle === (length . toList) rle prop_fromList_toList :: (Eq a, Show a) => RLE a -> Property prop_fromList_toList rle = rle === (fromList . toList) rle prop_toList_fromList :: (Eq a, Show a) => [a] -> Property prop_toList_fromList xs = xs === (toList . fromList) xs prop_take :: forall a . (Arbitrary a, Eq a, Show a) => Property prop_take = forAll rles $ \rle -> forAll (choose (-1, RLE.length rle + 1)) $ \i -> prop_take' @a i rle prop_take' :: (Eq a, Show a) => Int -> RLE a -> Property prop_take' i rle = take i (toList rle) === toList (RLE.take i rle) prop_splitAt :: forall a . (Arbitrary a, Eq a, Show a) => Property prop_splitAt = forAll rles $ \rle -> forAll (choose (-1, RLE.length rle + 1)) $ \i -> prop_splitAt' @a i rle prop_splitAt' :: (Eq a, Show a) => Int -> RLE a -> Property prop_splitAt' i rle = (fromList l, fromList r) === RLE.splitAt i rle where (l, r) = splitAt i (toList rle) main :: IO () main = defaultMain [ testProperty "reverse" (forAll rles $ prop_reverse @Int) , testProperty "length" (forAll rles $ prop_length @Int) , testProperty "toList . fromList" (prop_toList_fromList @Int) , testProperty "fromList . toList" (forAll rles $ prop_fromList_toList @Int) , testProperty "splitAt" (prop_splitAt @Int) , testProperty "take" (prop_take @Int) ]
google/hs-niche-containers
rle/test/RLETest.hs
apache-2.0
2,953
0
14
559
968
510
458
54
1
module Coins where data Coin = Good | Fake Bool deriving (Show) generateSequence :: Int -> Int -> [Coin] generateSequence coinsNumber fakePosition | coinsNumber <= 0 = [] | coinsNumber == fakePosition = nextIteration ++ [Fake True] | otherwise = nextIteration ++ [Good] where nextIteration = generateSequence (coinsNumber - 1) fakePosition generateThirteen :: Int -> [Coin] generateThirteen fakePosition = generateSequence 13 fakePosition -- generate all possible positions of fake coin allThirteenSequences :: [[Coin]] allThirteenSequences = map generateThirteen [1..13] foldCoinsSet :: [Coin] -> Coin foldCoinsSet [] = Good foldCoinsSet xs = foldr (\x acc -> if x == Fake then x else acc) Good xs -- isFirstSetLess :: [Coin] -> [Coin] -> Bool -- isFirstSetLess [] [] = false -- isFirstSetLess [] _ = true -- isFirstSetLess _ [] = false -- isFirstSetLess first second -- | length first != length secont = error "Coins sets should be the -- same size" -- step one -- compare first five with second five
vmatelsky/haskell
coins.hs
apache-2.0
1,016
0
9
174
238
131
107
15
2
-- http://www.codewars.com/kata/544aed4c4a30184e960010f4 module Divisors where divisors :: (Show a, Integral a) => a -> Either String [a] divisors a = if divs==[] then Left (show a ++ " is prime") else Right divs where divs = filter (\n -> a`mod`n==0) [2..a-1]
Bodigrim/katas
src/haskell/6-Find-the-divisors.hs
bsd-2-clause
263
0
11
43
114
62
52
4
2
-- | This module defines the functions that can be used run the classical subset -- of QIO. That is, QIO computations that only use classical unitary operations. module QIO.QioClass where import Data.Maybe as Maybe import Data.Monoid as Monoid import Control.Monad.State import QIO.QioSyn import QIO.Heap -- | A classical unitary operation is defined as a function that will -- update the current classical state. newtype UnitaryC = U {unU :: Int -> HeapMap -> HeapMap} -- | The classical unitary type forms a Monoid instance Monoid UnitaryC where mempty = U (\ fv bs -> bs) mappend (U f) (U g) = U (\ fv h -> g fv (f fv h)) -- | A single qubit rotation can be converted into the classical unitary type, -- if it is indeed classical (otherwise an error is thrown). uRotC :: Qbit -> Rotation -> UnitaryC uRotC x f | f==rnot = U (\ _ h -> update h x (not (fromJust (h ? x)))) | f==rid = mempty | otherwise = error "not classical" -- | A swap operation can be defined in the classical unitary type. uSwapC :: Qbit -> Qbit -> UnitaryC uSwapC x y = U (\ _ h -> hswap h x y ) -- | A conditional operation can be defined in the classical unitary type. uCondC :: Qbit -> (Bool -> UnitaryC) -> UnitaryC uCondC x br = U (\ fv h -> update (unU (br (fromJust (h ? x))) fv (forget h x)) x (fromJust (h ? x))) -- | A let operation can be defined in the classical unitary type. uLetC :: Bool -> (Qbit -> UnitaryC) -> UnitaryC uLetC b ux = U (\ fv h -> unU (ux (Qbit fv)) (fv+1) (update h (Qbit fv) b)) -- | A unitary can be run by converting it into the classical unitary type. runUC :: U -> UnitaryC runUC UReturn = mempty runUC (Rot x r u) = uRotC x r `mappend` runUC u runUC (Swap x y u) = uSwapC x y `mappend` runUC u runUC (Cond x us u) = uCondC x (runUC.us) `mappend` runUC u runUC (Ulet b xu u) = uLetC b (runUC.xu) `mappend` runUC u -- | A classical state consists of the next free qubit reference, along with -- a Heap that represents the overall state of the current qubits in scope. data StateC = StateC {fv :: Int, heap :: HeapMap} -- | An initial state is defined as an empty heap, with 0 set as the next -- free qubit referece initialStateC :: StateC initialStateC = StateC 0 initial -- | A QIO computation can be converted into a stateful computation, over -- a state of type "StateC". runQStateC :: QIO a -> State StateC a runQStateC (QReturn a) = return a runQStateC (MkQbit b xq) = do (StateC fv h) <- get put (StateC (fv+1) (update h (Qbit fv) b)) runQStateC (xq (Qbit fv)) runQStateC (ApplyU u q) = do (StateC fv h) <- get put (StateC fv (unU (runUC u) fv h)) runQStateC q runQStateC (Meas x qs) = do (StateC _ h) <- get runQStateC (qs (fromJust (h ? x))) -- | We can run a classical QIO computation by converting it into a stateful -- computation, and evaluating that using the initial state. runC :: QIO a -> a runC q = evalState (runQStateC q) initialStateC
alexandersgreen/qio-haskell
QIO/QioClass.hs
bsd-2-clause
3,085
0
16
783
940
489
451
41
1
module Env where import Prelude hiding (FilePath) data Env a = Env { setup :: IO a, teardown :: IO () } runEnv :: Env a -> (a -> IO r) -> IO r runEnv env operation = do identity <- setup env result <- operation identity teardown env return result
jfeltz/tasty-integrate
tests/Env.hs
bsd-2-clause
257
0
10
61
114
57
57
9
1
{-# LANGUAGE CPP #-} ---------------------------------------------------------------------------- -- -- Stg to C--: primitive operations -- -- (c) The University of Glasgow 2004-2006 -- ----------------------------------------------------------------------------- module StgCmmPrim ( cgOpApp, cgPrimOp, -- internal(ish), used by cgCase to get code for a -- comparison without also turning it into a Bool. shouldInlinePrimOp ) where #include "HsVersions.h" import StgCmmLayout import StgCmmForeign import StgCmmEnv import StgCmmMonad import StgCmmUtils import StgCmmTicky import StgCmmHeap import StgCmmProf ( costCentreFrom, curCCS ) import DynFlags import Platform import BasicTypes import MkGraph import StgSyn import Cmm import CmmInfo import Type ( Type, tyConAppTyCon ) import TyCon import CLabel import CmmUtils import PrimOp import SMRep import FastString import Outputable import Util #if __GLASGOW_HASKELL__ >= 709 import Prelude hiding ((<*>)) #endif import Data.Bits ((.&.), bit) import Control.Monad (liftM, when) ------------------------------------------------------------------------ -- Primitive operations and foreign calls ------------------------------------------------------------------------ {- Note [Foreign call results] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ A foreign call always returns an unboxed tuple of results, one of which is the state token. This seems to happen even for pure calls. Even if we returned a single result for pure calls, it'd still be right to wrap it in a singleton unboxed tuple, because the result might be a Haskell closure pointer, we don't want to evaluate it. -} ---------------------------------- cgOpApp :: StgOp -- The op -> [StgArg] -- Arguments -> Type -- Result type (always an unboxed tuple) -> FCode ReturnKind -- Foreign calls cgOpApp (StgFCallOp fcall _) stg_args res_ty = cgForeignCall fcall stg_args res_ty -- Note [Foreign call results] -- tagToEnum# is special: we need to pull the constructor -- out of the table, and perform an appropriate return. cgOpApp (StgPrimOp TagToEnumOp) [arg] res_ty = ASSERT(isEnumerationTyCon tycon) do { dflags <- getDynFlags ; args' <- getNonVoidArgAmodes [arg] ; let amode = case args' of [amode] -> amode _ -> panic "TagToEnumOp had void arg" ; emitReturn [tagToClosure dflags tycon amode] } where -- If you're reading this code in the attempt to figure -- out why the compiler panic'ed here, it is probably because -- you used tagToEnum# in a non-monomorphic setting, e.g., -- intToTg :: Enum a => Int -> a ; intToTg (I# x#) = tagToEnum# x# -- That won't work. tycon = tyConAppTyCon res_ty cgOpApp (StgPrimOp primop) args res_ty = do dflags <- getDynFlags cmm_args <- getNonVoidArgAmodes args case shouldInlinePrimOp dflags primop cmm_args of Nothing -> do -- out-of-line let fun = CmmLit (CmmLabel (mkRtsPrimOpLabel primop)) emitCall (NativeNodeCall, NativeReturn) fun cmm_args Just f -- inline | ReturnsPrim VoidRep <- result_info -> do f [] emitReturn [] | ReturnsPrim rep <- result_info -> do dflags <- getDynFlags res <- newTemp (primRepCmmType dflags rep) f [res] emitReturn [CmmReg (CmmLocal res)] | ReturnsAlg tycon <- result_info, isUnboxedTupleTyCon tycon -> do (regs, _hints) <- newUnboxedTupleRegs res_ty f regs emitReturn (map (CmmReg . CmmLocal) regs) | otherwise -> panic "cgPrimop" where result_info = getPrimOpResultInfo primop cgOpApp (StgPrimCallOp primcall) args _res_ty = do { cmm_args <- getNonVoidArgAmodes args ; let fun = CmmLit (CmmLabel (mkPrimCallLabel primcall)) ; emitCall (NativeNodeCall, NativeReturn) fun cmm_args } -- | Interpret the argument as an unsigned value, assuming the value -- is given in two-complement form in the given width. -- -- Example: @asUnsigned W64 (-1)@ is 18446744073709551615. -- -- This function is used to work around the fact that many array -- primops take Int# arguments, but we interpret them as unsigned -- quantities in the code gen. This means that we have to be careful -- every time we work on e.g. a CmmInt literal that corresponds to the -- array size, as it might contain a negative Integer value if the -- user passed a value larger than 2^(wORD_SIZE_IN_BITS-1) as the Int# -- literal. asUnsigned :: Width -> Integer -> Integer asUnsigned w n = n .&. (bit (widthInBits w) - 1) -- TODO: Several primop implementations (e.g. 'doNewByteArrayOp') use -- ByteOff (or some other fixed width signed type) to represent -- array sizes or indices. This means that these will overflow for -- large enough sizes. -- | Decide whether an out-of-line primop should be replaced by an -- inline implementation. This might happen e.g. if there's enough -- static information, such as statically know arguments, to emit a -- more efficient implementation inline. -- -- Returns 'Nothing' if this primop should use its out-of-line -- implementation (defined elsewhere) and 'Just' together with a code -- generating function that takes the output regs as arguments -- otherwise. shouldInlinePrimOp :: DynFlags -> PrimOp -- ^ The primop -> [CmmExpr] -- ^ The primop arguments -> Maybe ([LocalReg] -> FCode ()) shouldInlinePrimOp dflags NewByteArrayOp_Char [(CmmLit (CmmInt n w))] | asUnsigned w n <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> doNewByteArrayOp res (fromInteger n) shouldInlinePrimOp dflags NewArrayOp [(CmmLit (CmmInt n w)), init] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> doNewArrayOp res (arrPtrsRep dflags (fromInteger n)) mkMAP_DIRTY_infoLabel [ (mkIntExpr dflags (fromInteger n), fixedHdrSize dflags + oFFSET_StgMutArrPtrs_ptrs dflags) , (mkIntExpr dflags (nonHdrSizeW (arrPtrsRep dflags (fromInteger n))), fixedHdrSize dflags + oFFSET_StgMutArrPtrs_size dflags) ] (fromInteger n) init shouldInlinePrimOp _ CopyArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopyArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp _ CopyMutableArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopyMutableArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp _ CopyArrayArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopyArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp _ CopyMutableArrayArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopyMutableArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp dflags CloneArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneArray mkMAP_FROZEN_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags CloneMutableArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneArray mkMAP_DIRTY_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags FreezeArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneArray mkMAP_FROZEN_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags ThawArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneArray mkMAP_DIRTY_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags NewSmallArrayOp [(CmmLit (CmmInt n w)), init] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> doNewArrayOp res (smallArrPtrsRep (fromInteger n)) mkSMAP_DIRTY_infoLabel [ (mkIntExpr dflags (fromInteger n), fixedHdrSize dflags + oFFSET_StgSmallMutArrPtrs_ptrs dflags) ] (fromInteger n) init shouldInlinePrimOp _ CopySmallArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopySmallArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp _ CopySmallMutableArrayOp [src, src_off, dst, dst_off, (CmmLit (CmmInt n _))] = Just $ \ [] -> doCopySmallMutableArrayOp src src_off dst dst_off (fromInteger n) shouldInlinePrimOp dflags CloneSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneSmallArray mkSMAP_FROZEN_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags CloneSmallMutableArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneSmallArray mkSMAP_DIRTY_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags FreezeSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneSmallArray mkSMAP_FROZEN_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags ThawSmallArrayOp [src, src_off, (CmmLit (CmmInt n w))] | wordsToBytes dflags (asUnsigned w n) <= fromIntegral (maxInlineAllocSize dflags) = Just $ \ [res] -> emitCloneSmallArray mkSMAP_DIRTY_infoLabel res src src_off (fromInteger n) shouldInlinePrimOp dflags primop args | primOpOutOfLine primop = Nothing | otherwise = Just $ \ regs -> emitPrimOp dflags regs primop args -- TODO: Several primops, such as 'copyArray#', only have an inline -- implementation (below) but could possibly have both an inline -- implementation and an out-of-line implementation, just like -- 'newArray#'. This would lower the amount of code generated, -- hopefully without a performance impact (needs to be measured). --------------------------------------------------- cgPrimOp :: [LocalReg] -- where to put the results -> PrimOp -- the op -> [StgArg] -- arguments -> FCode () cgPrimOp results op args = do dflags <- getDynFlags arg_exprs <- getNonVoidArgAmodes args emitPrimOp dflags results op arg_exprs ------------------------------------------------------------------------ -- Emitting code for a primop ------------------------------------------------------------------------ emitPrimOp :: DynFlags -> [LocalReg] -- where to put the results -> PrimOp -- the op -> [CmmExpr] -- arguments -> FCode () -- First we handle various awkward cases specially. The remaining -- easy cases are then handled by translateOp, defined below. emitPrimOp _ [res] ParOp [arg] = -- for now, just implement this in a C function -- later, we might want to inline it. emitCCall [(res,NoHint)] (CmmLit (CmmLabel (mkForeignLabel (fsLit "newSpark") Nothing ForeignLabelInExternalPackage IsFunction))) [(CmmReg (CmmGlobal BaseReg), AddrHint), (arg,AddrHint)] emitPrimOp dflags [res] SparkOp [arg] = do -- returns the value of arg in res. We're going to therefore -- refer to arg twice (once to pass to newSpark(), and once to -- assign to res), so put it in a temporary. tmp <- assignTemp arg tmp2 <- newTemp (bWord dflags) emitCCall [(tmp2,NoHint)] (CmmLit (CmmLabel (mkForeignLabel (fsLit "newSpark") Nothing ForeignLabelInExternalPackage IsFunction))) [(CmmReg (CmmGlobal BaseReg), AddrHint), ((CmmReg (CmmLocal tmp)), AddrHint)] emitAssign (CmmLocal res) (CmmReg (CmmLocal tmp)) emitPrimOp dflags [res] GetCCSOfOp [arg] = emitAssign (CmmLocal res) val where val | gopt Opt_SccProfilingOn dflags = costCentreFrom dflags (cmmUntag dflags arg) | otherwise = CmmLit (zeroCLit dflags) emitPrimOp _ [res] GetCurrentCCSOp [_dummy_arg] = emitAssign (CmmLocal res) curCCS emitPrimOp dflags [res] ReadMutVarOp [mutv] = emitAssign (CmmLocal res) (cmmLoadIndexW dflags mutv (fixedHdrSizeW dflags) (gcWord dflags)) emitPrimOp dflags [] WriteMutVarOp [mutv,var] = do emitStore (cmmOffsetW dflags mutv (fixedHdrSizeW dflags)) var emitCCall [{-no results-}] (CmmLit (CmmLabel mkDirty_MUT_VAR_Label)) [(CmmReg (CmmGlobal BaseReg), AddrHint), (mutv,AddrHint)] -- #define sizzeofByteArrayzh(r,a) \ -- r = ((StgArrWords *)(a))->bytes emitPrimOp dflags [res] SizeofByteArrayOp [arg] = emit $ mkAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags) (bWord dflags)) -- #define sizzeofMutableByteArrayzh(r,a) \ -- r = ((StgArrWords *)(a))->bytes emitPrimOp dflags [res] SizeofMutableByteArrayOp [arg] = emitPrimOp dflags [res] SizeofByteArrayOp [arg] -- #define touchzh(o) /* nothing */ emitPrimOp _ res@[] TouchOp args@[_arg] = do emitPrimCall res MO_Touch args -- #define byteArrayContentszh(r,a) r = BYTE_ARR_CTS(a) emitPrimOp dflags [res] ByteArrayContents_Char [arg] = emitAssign (CmmLocal res) (cmmOffsetB dflags arg (arrWordsHdrSize dflags)) -- #define stableNameToIntzh(r,s) (r = ((StgStableName *)s)->sn) emitPrimOp dflags [res] StableNameToIntOp [arg] = emitAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags) (bWord dflags)) -- #define eqStableNamezh(r,sn1,sn2) \ -- (r = (((StgStableName *)sn1)->sn == ((StgStableName *)sn2)->sn)) emitPrimOp dflags [res] EqStableNameOp [arg1,arg2] = emitAssign (CmmLocal res) (CmmMachOp (mo_wordEq dflags) [ cmmLoadIndexW dflags arg1 (fixedHdrSizeW dflags) (bWord dflags), cmmLoadIndexW dflags arg2 (fixedHdrSizeW dflags) (bWord dflags) ]) emitPrimOp dflags [res] ReallyUnsafePtrEqualityOp [arg1,arg2] = emitAssign (CmmLocal res) (CmmMachOp (mo_wordEq dflags) [arg1,arg2]) -- #define addrToHValuezh(r,a) r=(P_)a emitPrimOp _ [res] AddrToAnyOp [arg] = emitAssign (CmmLocal res) arg -- #define dataToTagzh(r,a) r=(GET_TAG(((StgClosure *)a)->header.info)) -- Note: argument may be tagged! emitPrimOp dflags [res] DataToTagOp [arg] = emitAssign (CmmLocal res) (getConstrTag dflags (cmmUntag dflags arg)) {- Freezing arrays-of-ptrs requires changing an info table, for the benefit of the generational collector. It needs to scavenge mutable objects, even if they are in old space. When they become immutable, they can be removed from this scavenge list. -} -- #define unsafeFreezzeArrayzh(r,a) -- { -- SET_INFO((StgClosure *)a,&stg_MUT_ARR_PTRS_FROZEN0_info); -- r = a; -- } emitPrimOp _ [res] UnsafeFreezeArrayOp [arg] = emit $ catAGraphs [ setInfo arg (CmmLit (CmmLabel mkMAP_FROZEN0_infoLabel)), mkAssign (CmmLocal res) arg ] emitPrimOp _ [res] UnsafeFreezeArrayArrayOp [arg] = emit $ catAGraphs [ setInfo arg (CmmLit (CmmLabel mkMAP_FROZEN0_infoLabel)), mkAssign (CmmLocal res) arg ] emitPrimOp _ [res] UnsafeFreezeSmallArrayOp [arg] = emit $ catAGraphs [ setInfo arg (CmmLit (CmmLabel mkSMAP_FROZEN0_infoLabel)), mkAssign (CmmLocal res) arg ] -- #define unsafeFreezzeByteArrayzh(r,a) r=(a) emitPrimOp _ [res] UnsafeFreezeByteArrayOp [arg] = emitAssign (CmmLocal res) arg -- Reading/writing pointer arrays emitPrimOp _ [res] ReadArrayOp [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] IndexArrayOp [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [] WriteArrayOp [obj,ix,v] = doWritePtrArrayOp obj ix v emitPrimOp _ [res] IndexArrayArrayOp_ByteArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] IndexArrayArrayOp_ArrayArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] ReadArrayArrayOp_ByteArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] ReadArrayArrayOp_MutableByteArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] ReadArrayArrayOp_ArrayArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [res] ReadArrayArrayOp_MutableArrayArray [obj,ix] = doReadPtrArrayOp res obj ix emitPrimOp _ [] WriteArrayArrayOp_ByteArray [obj,ix,v] = doWritePtrArrayOp obj ix v emitPrimOp _ [] WriteArrayArrayOp_MutableByteArray [obj,ix,v] = doWritePtrArrayOp obj ix v emitPrimOp _ [] WriteArrayArrayOp_ArrayArray [obj,ix,v] = doWritePtrArrayOp obj ix v emitPrimOp _ [] WriteArrayArrayOp_MutableArrayArray [obj,ix,v] = doWritePtrArrayOp obj ix v emitPrimOp _ [res] ReadSmallArrayOp [obj,ix] = doReadSmallPtrArrayOp res obj ix emitPrimOp _ [res] IndexSmallArrayOp [obj,ix] = doReadSmallPtrArrayOp res obj ix emitPrimOp _ [] WriteSmallArrayOp [obj,ix,v] = doWriteSmallPtrArrayOp obj ix v -- Getting the size of pointer arrays emitPrimOp dflags [res] SizeofArrayOp [arg] = emit $ mkAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags + oFFSET_StgMutArrPtrs_ptrs dflags) (bWord dflags)) emitPrimOp dflags [res] SizeofMutableArrayOp [arg] = emitPrimOp dflags [res] SizeofArrayOp [arg] emitPrimOp dflags [res] SizeofArrayArrayOp [arg] = emitPrimOp dflags [res] SizeofArrayOp [arg] emitPrimOp dflags [res] SizeofMutableArrayArrayOp [arg] = emitPrimOp dflags [res] SizeofArrayOp [arg] emitPrimOp dflags [res] SizeofSmallArrayOp [arg] = emit $ mkAssign (CmmLocal res) (cmmLoadIndexW dflags arg (fixedHdrSizeW dflags + oFFSET_StgSmallMutArrPtrs_ptrs dflags) (bWord dflags)) emitPrimOp dflags [res] SizeofSmallMutableArrayOp [arg] = emitPrimOp dflags [res] SizeofSmallArrayOp [arg] -- IndexXXXoffAddr emitPrimOp dflags res IndexOffAddrOp_Char args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexOffAddrOp_WideChar args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp dflags res IndexOffAddrOp_Int args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexOffAddrOp_Word args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexOffAddrOp_Addr args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp _ res IndexOffAddrOp_Float args = doIndexOffAddrOp Nothing f32 res args emitPrimOp _ res IndexOffAddrOp_Double args = doIndexOffAddrOp Nothing f64 res args emitPrimOp dflags res IndexOffAddrOp_StablePtr args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexOffAddrOp_Int8 args = doIndexOffAddrOp (Just (mo_s_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexOffAddrOp_Int16 args = doIndexOffAddrOp (Just (mo_s_16ToWord dflags)) b16 res args emitPrimOp dflags res IndexOffAddrOp_Int32 args = doIndexOffAddrOp (Just (mo_s_32ToWord dflags)) b32 res args emitPrimOp _ res IndexOffAddrOp_Int64 args = doIndexOffAddrOp Nothing b64 res args emitPrimOp dflags res IndexOffAddrOp_Word8 args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexOffAddrOp_Word16 args = doIndexOffAddrOp (Just (mo_u_16ToWord dflags)) b16 res args emitPrimOp dflags res IndexOffAddrOp_Word32 args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp _ res IndexOffAddrOp_Word64 args = doIndexOffAddrOp Nothing b64 res args -- ReadXXXoffAddr, which are identical, for our purposes, to IndexXXXoffAddr. emitPrimOp dflags res ReadOffAddrOp_Char args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadOffAddrOp_WideChar args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp dflags res ReadOffAddrOp_Int args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadOffAddrOp_Word args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadOffAddrOp_Addr args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp _ res ReadOffAddrOp_Float args = doIndexOffAddrOp Nothing f32 res args emitPrimOp _ res ReadOffAddrOp_Double args = doIndexOffAddrOp Nothing f64 res args emitPrimOp dflags res ReadOffAddrOp_StablePtr args = doIndexOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadOffAddrOp_Int8 args = doIndexOffAddrOp (Just (mo_s_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadOffAddrOp_Int16 args = doIndexOffAddrOp (Just (mo_s_16ToWord dflags)) b16 res args emitPrimOp dflags res ReadOffAddrOp_Int32 args = doIndexOffAddrOp (Just (mo_s_32ToWord dflags)) b32 res args emitPrimOp _ res ReadOffAddrOp_Int64 args = doIndexOffAddrOp Nothing b64 res args emitPrimOp dflags res ReadOffAddrOp_Word8 args = doIndexOffAddrOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadOffAddrOp_Word16 args = doIndexOffAddrOp (Just (mo_u_16ToWord dflags)) b16 res args emitPrimOp dflags res ReadOffAddrOp_Word32 args = doIndexOffAddrOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp _ res ReadOffAddrOp_Word64 args = doIndexOffAddrOp Nothing b64 res args -- IndexXXXArray emitPrimOp dflags res IndexByteArrayOp_Char args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexByteArrayOp_WideChar args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp dflags res IndexByteArrayOp_Int args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexByteArrayOp_Word args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexByteArrayOp_Addr args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp _ res IndexByteArrayOp_Float args = doIndexByteArrayOp Nothing f32 res args emitPrimOp _ res IndexByteArrayOp_Double args = doIndexByteArrayOp Nothing f64 res args emitPrimOp dflags res IndexByteArrayOp_StablePtr args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res IndexByteArrayOp_Int8 args = doIndexByteArrayOp (Just (mo_s_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexByteArrayOp_Int16 args = doIndexByteArrayOp (Just (mo_s_16ToWord dflags)) b16 res args emitPrimOp dflags res IndexByteArrayOp_Int32 args = doIndexByteArrayOp (Just (mo_s_32ToWord dflags)) b32 res args emitPrimOp _ res IndexByteArrayOp_Int64 args = doIndexByteArrayOp Nothing b64 res args emitPrimOp dflags res IndexByteArrayOp_Word8 args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res IndexByteArrayOp_Word16 args = doIndexByteArrayOp (Just (mo_u_16ToWord dflags)) b16 res args emitPrimOp dflags res IndexByteArrayOp_Word32 args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp _ res IndexByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args -- ReadXXXArray, identical to IndexXXXArray. emitPrimOp dflags res ReadByteArrayOp_Char args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadByteArrayOp_WideChar args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp dflags res ReadByteArrayOp_Int args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadByteArrayOp_Word args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadByteArrayOp_Addr args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp _ res ReadByteArrayOp_Float args = doIndexByteArrayOp Nothing f32 res args emitPrimOp _ res ReadByteArrayOp_Double args = doIndexByteArrayOp Nothing f64 res args emitPrimOp dflags res ReadByteArrayOp_StablePtr args = doIndexByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res ReadByteArrayOp_Int8 args = doIndexByteArrayOp (Just (mo_s_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadByteArrayOp_Int16 args = doIndexByteArrayOp (Just (mo_s_16ToWord dflags)) b16 res args emitPrimOp dflags res ReadByteArrayOp_Int32 args = doIndexByteArrayOp (Just (mo_s_32ToWord dflags)) b32 res args emitPrimOp _ res ReadByteArrayOp_Int64 args = doIndexByteArrayOp Nothing b64 res args emitPrimOp dflags res ReadByteArrayOp_Word8 args = doIndexByteArrayOp (Just (mo_u_8ToWord dflags)) b8 res args emitPrimOp dflags res ReadByteArrayOp_Word16 args = doIndexByteArrayOp (Just (mo_u_16ToWord dflags)) b16 res args emitPrimOp dflags res ReadByteArrayOp_Word32 args = doIndexByteArrayOp (Just (mo_u_32ToWord dflags)) b32 res args emitPrimOp _ res ReadByteArrayOp_Word64 args = doIndexByteArrayOp Nothing b64 res args -- WriteXXXoffAddr emitPrimOp dflags res WriteOffAddrOp_Char args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteOffAddrOp_WideChar args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp dflags res WriteOffAddrOp_Int args = doWriteOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteOffAddrOp_Word args = doWriteOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteOffAddrOp_Addr args = doWriteOffAddrOp Nothing (bWord dflags) res args emitPrimOp _ res WriteOffAddrOp_Float args = doWriteOffAddrOp Nothing f32 res args emitPrimOp _ res WriteOffAddrOp_Double args = doWriteOffAddrOp Nothing f64 res args emitPrimOp dflags res WriteOffAddrOp_StablePtr args = doWriteOffAddrOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteOffAddrOp_Int8 args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteOffAddrOp_Int16 args = doWriteOffAddrOp (Just (mo_WordTo16 dflags)) b16 res args emitPrimOp dflags res WriteOffAddrOp_Int32 args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp _ res WriteOffAddrOp_Int64 args = doWriteOffAddrOp Nothing b64 res args emitPrimOp dflags res WriteOffAddrOp_Word8 args = doWriteOffAddrOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteOffAddrOp_Word16 args = doWriteOffAddrOp (Just (mo_WordTo16 dflags)) b16 res args emitPrimOp dflags res WriteOffAddrOp_Word32 args = doWriteOffAddrOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp _ res WriteOffAddrOp_Word64 args = doWriteOffAddrOp Nothing b64 res args -- WriteXXXArray emitPrimOp dflags res WriteByteArrayOp_Char args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteByteArrayOp_WideChar args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp dflags res WriteByteArrayOp_Int args = doWriteByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteByteArrayOp_Word args = doWriteByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteByteArrayOp_Addr args = doWriteByteArrayOp Nothing (bWord dflags) res args emitPrimOp _ res WriteByteArrayOp_Float args = doWriteByteArrayOp Nothing f32 res args emitPrimOp _ res WriteByteArrayOp_Double args = doWriteByteArrayOp Nothing f64 res args emitPrimOp dflags res WriteByteArrayOp_StablePtr args = doWriteByteArrayOp Nothing (bWord dflags) res args emitPrimOp dflags res WriteByteArrayOp_Int8 args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteByteArrayOp_Int16 args = doWriteByteArrayOp (Just (mo_WordTo16 dflags)) b16 res args emitPrimOp dflags res WriteByteArrayOp_Int32 args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp _ res WriteByteArrayOp_Int64 args = doWriteByteArrayOp Nothing b64 res args emitPrimOp dflags res WriteByteArrayOp_Word8 args = doWriteByteArrayOp (Just (mo_WordTo8 dflags)) b8 res args emitPrimOp dflags res WriteByteArrayOp_Word16 args = doWriteByteArrayOp (Just (mo_WordTo16 dflags)) b16 res args emitPrimOp dflags res WriteByteArrayOp_Word32 args = doWriteByteArrayOp (Just (mo_WordTo32 dflags)) b32 res args emitPrimOp _ res WriteByteArrayOp_Word64 args = doWriteByteArrayOp Nothing b64 res args -- Copying and setting byte arrays emitPrimOp _ [] CopyByteArrayOp [src,src_off,dst,dst_off,n] = doCopyByteArrayOp src src_off dst dst_off n emitPrimOp _ [] CopyMutableByteArrayOp [src,src_off,dst,dst_off,n] = doCopyMutableByteArrayOp src src_off dst dst_off n emitPrimOp _ [] CopyByteArrayToAddrOp [src,src_off,dst,n] = doCopyByteArrayToAddrOp src src_off dst n emitPrimOp _ [] CopyMutableByteArrayToAddrOp [src,src_off,dst,n] = doCopyMutableByteArrayToAddrOp src src_off dst n emitPrimOp _ [] CopyAddrToByteArrayOp [src,dst,dst_off,n] = doCopyAddrToByteArrayOp src dst dst_off n emitPrimOp _ [] SetByteArrayOp [ba,off,len,c] = doSetByteArrayOp ba off len c emitPrimOp _ [res] BSwap16Op [w] = emitBSwapCall res w W16 emitPrimOp _ [res] BSwap32Op [w] = emitBSwapCall res w W32 emitPrimOp _ [res] BSwap64Op [w] = emitBSwapCall res w W64 emitPrimOp dflags [res] BSwapOp [w] = emitBSwapCall res w (wordWidth dflags) -- Population count emitPrimOp _ [res] PopCnt8Op [w] = emitPopCntCall res w W8 emitPrimOp _ [res] PopCnt16Op [w] = emitPopCntCall res w W16 emitPrimOp _ [res] PopCnt32Op [w] = emitPopCntCall res w W32 emitPrimOp _ [res] PopCnt64Op [w] = emitPopCntCall res w W64 emitPrimOp dflags [res] PopCntOp [w] = emitPopCntCall res w (wordWidth dflags) -- count leading zeros emitPrimOp _ [res] Clz8Op [w] = emitClzCall res w W8 emitPrimOp _ [res] Clz16Op [w] = emitClzCall res w W16 emitPrimOp _ [res] Clz32Op [w] = emitClzCall res w W32 emitPrimOp _ [res] Clz64Op [w] = emitClzCall res w W64 emitPrimOp dflags [res] ClzOp [w] = emitClzCall res w (wordWidth dflags) -- count trailing zeros emitPrimOp _ [res] Ctz8Op [w] = emitCtzCall res w W8 emitPrimOp _ [res] Ctz16Op [w] = emitCtzCall res w W16 emitPrimOp _ [res] Ctz32Op [w] = emitCtzCall res w W32 emitPrimOp _ [res] Ctz64Op [w] = emitCtzCall res w W64 emitPrimOp dflags [res] CtzOp [w] = emitCtzCall res w (wordWidth dflags) -- Unsigned int to floating point conversions emitPrimOp _ [res] Word2FloatOp [w] = emitPrimCall [res] (MO_UF_Conv W32) [w] emitPrimOp _ [res] Word2DoubleOp [w] = emitPrimCall [res] (MO_UF_Conv W64) [w] -- SIMD primops emitPrimOp dflags [res] (VecBroadcastOp vcat n w) [e] = do checkVecCompatibility dflags vcat n w doVecPackOp (vecElemInjectCast dflags vcat w) ty zeros (replicate n e) res where zeros :: CmmExpr zeros = CmmLit $ CmmVec (replicate n zero) zero :: CmmLit zero = case vcat of IntVec -> CmmInt 0 w WordVec -> CmmInt 0 w FloatVec -> CmmFloat 0 w ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags [res] (VecPackOp vcat n w) es = do checkVecCompatibility dflags vcat n w when (length es /= n) $ panic "emitPrimOp: VecPackOp has wrong number of arguments" doVecPackOp (vecElemInjectCast dflags vcat w) ty zeros es res where zeros :: CmmExpr zeros = CmmLit $ CmmVec (replicate n zero) zero :: CmmLit zero = case vcat of IntVec -> CmmInt 0 w WordVec -> CmmInt 0 w FloatVec -> CmmFloat 0 w ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecUnpackOp vcat n w) [arg] = do checkVecCompatibility dflags vcat n w when (length res /= n) $ panic "emitPrimOp: VecUnpackOp has wrong number of results" doVecUnpackOp (vecElemProjectCast dflags vcat w) ty arg res where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags [res] (VecInsertOp vcat n w) [v,e,i] = do checkVecCompatibility dflags vcat n w doVecInsertOp (vecElemInjectCast dflags vcat w) ty v e i res where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecIndexByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexByteArrayOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecReadByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexByteArrayOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecWriteByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doWriteByteArrayOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecIndexOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexOffAddrOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecReadOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexOffAddrOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecWriteOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doWriteOffAddrOp Nothing ty res args where ty :: CmmType ty = vecVmmType vcat n w emitPrimOp dflags res (VecIndexScalarByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexByteArrayOpAs Nothing vecty ty res args where vecty :: CmmType vecty = vecVmmType vcat n w ty :: CmmType ty = vecCmmCat vcat w emitPrimOp dflags res (VecReadScalarByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexByteArrayOpAs Nothing vecty ty res args where vecty :: CmmType vecty = vecVmmType vcat n w ty :: CmmType ty = vecCmmCat vcat w emitPrimOp dflags res (VecWriteScalarByteArrayOp vcat n w) args = do checkVecCompatibility dflags vcat n w doWriteByteArrayOp Nothing ty res args where ty :: CmmType ty = vecCmmCat vcat w emitPrimOp dflags res (VecIndexScalarOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexOffAddrOpAs Nothing vecty ty res args where vecty :: CmmType vecty = vecVmmType vcat n w ty :: CmmType ty = vecCmmCat vcat w emitPrimOp dflags res (VecReadScalarOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doIndexOffAddrOpAs Nothing vecty ty res args where vecty :: CmmType vecty = vecVmmType vcat n w ty :: CmmType ty = vecCmmCat vcat w emitPrimOp dflags res (VecWriteScalarOffAddrOp vcat n w) args = do checkVecCompatibility dflags vcat n w doWriteOffAddrOp Nothing ty res args where ty :: CmmType ty = vecCmmCat vcat w -- Prefetch emitPrimOp _ [] PrefetchByteArrayOp3 args = doPrefetchByteArrayOp 3 args emitPrimOp _ [] PrefetchMutableByteArrayOp3 args = doPrefetchMutableByteArrayOp 3 args emitPrimOp _ [] PrefetchAddrOp3 args = doPrefetchAddrOp 3 args emitPrimOp _ [] PrefetchValueOp3 args = doPrefetchValueOp 3 args emitPrimOp _ [] PrefetchByteArrayOp2 args = doPrefetchByteArrayOp 2 args emitPrimOp _ [] PrefetchMutableByteArrayOp2 args = doPrefetchMutableByteArrayOp 2 args emitPrimOp _ [] PrefetchAddrOp2 args = doPrefetchAddrOp 2 args emitPrimOp _ [] PrefetchValueOp2 args = doPrefetchValueOp 2 args emitPrimOp _ [] PrefetchByteArrayOp1 args = doPrefetchByteArrayOp 1 args emitPrimOp _ [] PrefetchMutableByteArrayOp1 args = doPrefetchMutableByteArrayOp 1 args emitPrimOp _ [] PrefetchAddrOp1 args = doPrefetchAddrOp 1 args emitPrimOp _ [] PrefetchValueOp1 args = doPrefetchValueOp 1 args emitPrimOp _ [] PrefetchByteArrayOp0 args = doPrefetchByteArrayOp 0 args emitPrimOp _ [] PrefetchMutableByteArrayOp0 args = doPrefetchMutableByteArrayOp 0 args emitPrimOp _ [] PrefetchAddrOp0 args = doPrefetchAddrOp 0 args emitPrimOp _ [] PrefetchValueOp0 args = doPrefetchValueOp 0 args -- Atomic read-modify-write emitPrimOp dflags [res] FetchAddByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_Add mba ix (bWord dflags) n emitPrimOp dflags [res] FetchSubByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_Sub mba ix (bWord dflags) n emitPrimOp dflags [res] FetchAndByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_And mba ix (bWord dflags) n emitPrimOp dflags [res] FetchNandByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_Nand mba ix (bWord dflags) n emitPrimOp dflags [res] FetchOrByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_Or mba ix (bWord dflags) n emitPrimOp dflags [res] FetchXorByteArrayOp_Int [mba, ix, n] = doAtomicRMW res AMO_Xor mba ix (bWord dflags) n emitPrimOp dflags [res] AtomicReadByteArrayOp_Int [mba, ix] = doAtomicReadByteArray res mba ix (bWord dflags) emitPrimOp dflags [] AtomicWriteByteArrayOp_Int [mba, ix, val] = doAtomicWriteByteArray mba ix (bWord dflags) val emitPrimOp dflags [res] CasByteArrayOp_Int [mba, ix, old, new] = doCasByteArray res mba ix (bWord dflags) old new -- The rest just translate straightforwardly emitPrimOp dflags [res] op [arg] | nopOp op = emitAssign (CmmLocal res) arg | Just (mop,rep) <- narrowOp op = emitAssign (CmmLocal res) $ CmmMachOp (mop rep (wordWidth dflags)) [CmmMachOp (mop (wordWidth dflags) rep) [arg]] emitPrimOp dflags r@[res] op args | Just prim <- callishOp op = do emitPrimCall r prim args | Just mop <- translateOp dflags op = let stmt = mkAssign (CmmLocal res) (CmmMachOp mop args) in emit stmt emitPrimOp dflags results op args = case callishPrimOpSupported dflags op of Left op -> emit $ mkUnsafeCall (PrimTarget op) results args Right gen -> gen results args type GenericOp = [CmmFormal] -> [CmmActual] -> FCode () callishPrimOpSupported :: DynFlags -> PrimOp -> Either CallishMachOp GenericOp callishPrimOpSupported dflags op = case op of IntQuotRemOp | ncg && x86ish -> Left (MO_S_QuotRem (wordWidth dflags)) | otherwise -> Right (genericIntQuotRemOp dflags) WordQuotRemOp | ncg && x86ish -> Left (MO_U_QuotRem (wordWidth dflags)) | otherwise -> Right (genericWordQuotRemOp dflags) WordQuotRem2Op | ncg && x86ish -> Left (MO_U_QuotRem2 (wordWidth dflags)) | otherwise -> Right (genericWordQuotRem2Op dflags) WordAdd2Op | (ncg && x86ish) || llvm -> Left (MO_Add2 (wordWidth dflags)) | otherwise -> Right genericWordAdd2Op IntAddCOp | (ncg && x86ish) || llvm -> Left (MO_AddIntC (wordWidth dflags)) | otherwise -> Right genericIntAddCOp IntSubCOp | (ncg && x86ish) || llvm -> Left (MO_SubIntC (wordWidth dflags)) | otherwise -> Right genericIntSubCOp WordMul2Op | ncg && x86ish -> Left (MO_U_Mul2 (wordWidth dflags)) | otherwise -> Right genericWordMul2Op _ -> pprPanic "emitPrimOp: can't translate PrimOp " (ppr op) where ncg = case hscTarget dflags of HscAsm -> True _ -> False llvm = case hscTarget dflags of HscLlvm -> True _ -> False x86ish = case platformArch (targetPlatform dflags) of ArchX86 -> True ArchX86_64 -> True _ -> False genericIntQuotRemOp :: DynFlags -> GenericOp genericIntQuotRemOp dflags [res_q, res_r] [arg_x, arg_y] = emit $ mkAssign (CmmLocal res_q) (CmmMachOp (MO_S_Quot (wordWidth dflags)) [arg_x, arg_y]) <*> mkAssign (CmmLocal res_r) (CmmMachOp (MO_S_Rem (wordWidth dflags)) [arg_x, arg_y]) genericIntQuotRemOp _ _ _ = panic "genericIntQuotRemOp" genericWordQuotRemOp :: DynFlags -> GenericOp genericWordQuotRemOp dflags [res_q, res_r] [arg_x, arg_y] = emit $ mkAssign (CmmLocal res_q) (CmmMachOp (MO_U_Quot (wordWidth dflags)) [arg_x, arg_y]) <*> mkAssign (CmmLocal res_r) (CmmMachOp (MO_U_Rem (wordWidth dflags)) [arg_x, arg_y]) genericWordQuotRemOp _ _ _ = panic "genericWordQuotRemOp" genericWordQuotRem2Op :: DynFlags -> GenericOp genericWordQuotRem2Op dflags [res_q, res_r] [arg_x_high, arg_x_low, arg_y] = emit =<< f (widthInBits (wordWidth dflags)) zero arg_x_high arg_x_low where ty = cmmExprType dflags arg_x_high shl x i = CmmMachOp (MO_Shl (wordWidth dflags)) [x, i] shr x i = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, i] or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y] ge x y = CmmMachOp (MO_U_Ge (wordWidth dflags)) [x, y] ne x y = CmmMachOp (MO_Ne (wordWidth dflags)) [x, y] minus x y = CmmMachOp (MO_Sub (wordWidth dflags)) [x, y] times x y = CmmMachOp (MO_Mul (wordWidth dflags)) [x, y] zero = lit 0 one = lit 1 negone = lit (fromIntegral (widthInBits (wordWidth dflags)) - 1) lit i = CmmLit (CmmInt i (wordWidth dflags)) f :: Int -> CmmExpr -> CmmExpr -> CmmExpr -> FCode CmmAGraph f 0 acc high _ = return (mkAssign (CmmLocal res_q) acc <*> mkAssign (CmmLocal res_r) high) f i acc high low = do roverflowedBit <- newTemp ty rhigh' <- newTemp ty rhigh'' <- newTemp ty rlow' <- newTemp ty risge <- newTemp ty racc' <- newTemp ty let high' = CmmReg (CmmLocal rhigh') isge = CmmReg (CmmLocal risge) overflowedBit = CmmReg (CmmLocal roverflowedBit) let this = catAGraphs [mkAssign (CmmLocal roverflowedBit) (shr high negone), mkAssign (CmmLocal rhigh') (or (shl high one) (shr low negone)), mkAssign (CmmLocal rlow') (shl low one), mkAssign (CmmLocal risge) (or (overflowedBit `ne` zero) (high' `ge` arg_y)), mkAssign (CmmLocal rhigh'') (high' `minus` (arg_y `times` isge)), mkAssign (CmmLocal racc') (or (shl acc one) isge)] rest <- f (i - 1) (CmmReg (CmmLocal racc')) (CmmReg (CmmLocal rhigh'')) (CmmReg (CmmLocal rlow')) return (this <*> rest) genericWordQuotRem2Op _ _ _ = panic "genericWordQuotRem2Op" genericWordAdd2Op :: GenericOp genericWordAdd2Op [res_h, res_l] [arg_x, arg_y] = do dflags <- getDynFlags r1 <- newTemp (cmmExprType dflags arg_x) r2 <- newTemp (cmmExprType dflags arg_x) let topHalf x = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, hww] toTopHalf x = CmmMachOp (MO_Shl (wordWidth dflags)) [x, hww] bottomHalf x = CmmMachOp (MO_And (wordWidth dflags)) [x, hwm] add x y = CmmMachOp (MO_Add (wordWidth dflags)) [x, y] or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y] hww = CmmLit (CmmInt (fromIntegral (widthInBits (halfWordWidth dflags))) (wordWidth dflags)) hwm = CmmLit (CmmInt (halfWordMask dflags) (wordWidth dflags)) emit $ catAGraphs [mkAssign (CmmLocal r1) (add (bottomHalf arg_x) (bottomHalf arg_y)), mkAssign (CmmLocal r2) (add (topHalf (CmmReg (CmmLocal r1))) (add (topHalf arg_x) (topHalf arg_y))), mkAssign (CmmLocal res_h) (topHalf (CmmReg (CmmLocal r2))), mkAssign (CmmLocal res_l) (or (toTopHalf (CmmReg (CmmLocal r2))) (bottomHalf (CmmReg (CmmLocal r1))))] genericWordAdd2Op _ _ = panic "genericWordAdd2Op" genericIntAddCOp :: GenericOp genericIntAddCOp [res_r, res_c] [aa, bb] {- With some bit-twiddling, we can define int{Add,Sub}Czh portably in C, and without needing any comparisons. This may not be the fastest way to do it - if you have better code, please send it! --SDM Return : r = a + b, c = 0 if no overflow, 1 on overflow. We currently don't make use of the r value if c is != 0 (i.e. overflow), we just convert to big integers and try again. This could be improved by making r and c the correct values for plugging into a new J#. { r = ((I_)(a)) + ((I_)(b)); \ c = ((StgWord)(~(((I_)(a))^((I_)(b))) & (((I_)(a))^r))) \ >> (BITS_IN (I_) - 1); \ } Wading through the mass of bracketry, it seems to reduce to: c = ( (~(a^b)) & (a^r) ) >>unsigned (BITS_IN(I_)-1) -} = do dflags <- getDynFlags emit $ catAGraphs [ mkAssign (CmmLocal res_r) (CmmMachOp (mo_wordAdd dflags) [aa,bb]), mkAssign (CmmLocal res_c) $ CmmMachOp (mo_wordUShr dflags) [ CmmMachOp (mo_wordAnd dflags) [ CmmMachOp (mo_wordNot dflags) [CmmMachOp (mo_wordXor dflags) [aa,bb]], CmmMachOp (mo_wordXor dflags) [aa, CmmReg (CmmLocal res_r)] ], mkIntExpr dflags (wORD_SIZE_IN_BITS dflags - 1) ] ] genericIntAddCOp _ _ = panic "genericIntAddCOp" genericIntSubCOp :: GenericOp genericIntSubCOp [res_r, res_c] [aa, bb] {- Similarly: #define subIntCzh(r,c,a,b) \ { r = ((I_)(a)) - ((I_)(b)); \ c = ((StgWord)((((I_)(a))^((I_)(b))) & (((I_)(a))^r))) \ >> (BITS_IN (I_) - 1); \ } c = ((a^b) & (a^r)) >>unsigned (BITS_IN(I_)-1) -} = do dflags <- getDynFlags emit $ catAGraphs [ mkAssign (CmmLocal res_r) (CmmMachOp (mo_wordSub dflags) [aa,bb]), mkAssign (CmmLocal res_c) $ CmmMachOp (mo_wordUShr dflags) [ CmmMachOp (mo_wordAnd dflags) [ CmmMachOp (mo_wordXor dflags) [aa,bb], CmmMachOp (mo_wordXor dflags) [aa, CmmReg (CmmLocal res_r)] ], mkIntExpr dflags (wORD_SIZE_IN_BITS dflags - 1) ] ] genericIntSubCOp _ _ = panic "genericIntSubCOp" genericWordMul2Op :: GenericOp genericWordMul2Op [res_h, res_l] [arg_x, arg_y] = do dflags <- getDynFlags let t = cmmExprType dflags arg_x xlyl <- liftM CmmLocal $ newTemp t xlyh <- liftM CmmLocal $ newTemp t xhyl <- liftM CmmLocal $ newTemp t r <- liftM CmmLocal $ newTemp t -- This generic implementation is very simple and slow. We might -- well be able to do better, but for now this at least works. let topHalf x = CmmMachOp (MO_U_Shr (wordWidth dflags)) [x, hww] toTopHalf x = CmmMachOp (MO_Shl (wordWidth dflags)) [x, hww] bottomHalf x = CmmMachOp (MO_And (wordWidth dflags)) [x, hwm] add x y = CmmMachOp (MO_Add (wordWidth dflags)) [x, y] sum = foldl1 add mul x y = CmmMachOp (MO_Mul (wordWidth dflags)) [x, y] or x y = CmmMachOp (MO_Or (wordWidth dflags)) [x, y] hww = CmmLit (CmmInt (fromIntegral (widthInBits (halfWordWidth dflags))) (wordWidth dflags)) hwm = CmmLit (CmmInt (halfWordMask dflags) (wordWidth dflags)) emit $ catAGraphs [mkAssign xlyl (mul (bottomHalf arg_x) (bottomHalf arg_y)), mkAssign xlyh (mul (bottomHalf arg_x) (topHalf arg_y)), mkAssign xhyl (mul (topHalf arg_x) (bottomHalf arg_y)), mkAssign r (sum [topHalf (CmmReg xlyl), bottomHalf (CmmReg xhyl), bottomHalf (CmmReg xlyh)]), mkAssign (CmmLocal res_l) (or (bottomHalf (CmmReg xlyl)) (toTopHalf (CmmReg r))), mkAssign (CmmLocal res_h) (sum [mul (topHalf arg_x) (topHalf arg_y), topHalf (CmmReg xhyl), topHalf (CmmReg xlyh), topHalf (CmmReg r)])] genericWordMul2Op _ _ = panic "genericWordMul2Op" -- These PrimOps are NOPs in Cmm nopOp :: PrimOp -> Bool nopOp Int2WordOp = True nopOp Word2IntOp = True nopOp Int2AddrOp = True nopOp Addr2IntOp = True nopOp ChrOp = True -- Int# and Char# are rep'd the same nopOp OrdOp = True nopOp _ = False -- These PrimOps turn into double casts narrowOp :: PrimOp -> Maybe (Width -> Width -> MachOp, Width) narrowOp Narrow8IntOp = Just (MO_SS_Conv, W8) narrowOp Narrow16IntOp = Just (MO_SS_Conv, W16) narrowOp Narrow32IntOp = Just (MO_SS_Conv, W32) narrowOp Narrow8WordOp = Just (MO_UU_Conv, W8) narrowOp Narrow16WordOp = Just (MO_UU_Conv, W16) narrowOp Narrow32WordOp = Just (MO_UU_Conv, W32) narrowOp _ = Nothing -- Native word signless ops translateOp :: DynFlags -> PrimOp -> Maybe MachOp translateOp dflags IntAddOp = Just (mo_wordAdd dflags) translateOp dflags IntSubOp = Just (mo_wordSub dflags) translateOp dflags WordAddOp = Just (mo_wordAdd dflags) translateOp dflags WordSubOp = Just (mo_wordSub dflags) translateOp dflags AddrAddOp = Just (mo_wordAdd dflags) translateOp dflags AddrSubOp = Just (mo_wordSub dflags) translateOp dflags IntEqOp = Just (mo_wordEq dflags) translateOp dflags IntNeOp = Just (mo_wordNe dflags) translateOp dflags WordEqOp = Just (mo_wordEq dflags) translateOp dflags WordNeOp = Just (mo_wordNe dflags) translateOp dflags AddrEqOp = Just (mo_wordEq dflags) translateOp dflags AddrNeOp = Just (mo_wordNe dflags) translateOp dflags AndOp = Just (mo_wordAnd dflags) translateOp dflags OrOp = Just (mo_wordOr dflags) translateOp dflags XorOp = Just (mo_wordXor dflags) translateOp dflags NotOp = Just (mo_wordNot dflags) translateOp dflags SllOp = Just (mo_wordShl dflags) translateOp dflags SrlOp = Just (mo_wordUShr dflags) translateOp dflags AddrRemOp = Just (mo_wordURem dflags) -- Native word signed ops translateOp dflags IntMulOp = Just (mo_wordMul dflags) translateOp dflags IntMulMayOfloOp = Just (MO_S_MulMayOflo (wordWidth dflags)) translateOp dflags IntQuotOp = Just (mo_wordSQuot dflags) translateOp dflags IntRemOp = Just (mo_wordSRem dflags) translateOp dflags IntNegOp = Just (mo_wordSNeg dflags) translateOp dflags IntGeOp = Just (mo_wordSGe dflags) translateOp dflags IntLeOp = Just (mo_wordSLe dflags) translateOp dflags IntGtOp = Just (mo_wordSGt dflags) translateOp dflags IntLtOp = Just (mo_wordSLt dflags) translateOp dflags AndIOp = Just (mo_wordAnd dflags) translateOp dflags OrIOp = Just (mo_wordOr dflags) translateOp dflags XorIOp = Just (mo_wordXor dflags) translateOp dflags NotIOp = Just (mo_wordNot dflags) translateOp dflags ISllOp = Just (mo_wordShl dflags) translateOp dflags ISraOp = Just (mo_wordSShr dflags) translateOp dflags ISrlOp = Just (mo_wordUShr dflags) -- Native word unsigned ops translateOp dflags WordGeOp = Just (mo_wordUGe dflags) translateOp dflags WordLeOp = Just (mo_wordULe dflags) translateOp dflags WordGtOp = Just (mo_wordUGt dflags) translateOp dflags WordLtOp = Just (mo_wordULt dflags) translateOp dflags WordMulOp = Just (mo_wordMul dflags) translateOp dflags WordQuotOp = Just (mo_wordUQuot dflags) translateOp dflags WordRemOp = Just (mo_wordURem dflags) translateOp dflags AddrGeOp = Just (mo_wordUGe dflags) translateOp dflags AddrLeOp = Just (mo_wordULe dflags) translateOp dflags AddrGtOp = Just (mo_wordUGt dflags) translateOp dflags AddrLtOp = Just (mo_wordULt dflags) -- Char# ops translateOp dflags CharEqOp = Just (MO_Eq (wordWidth dflags)) translateOp dflags CharNeOp = Just (MO_Ne (wordWidth dflags)) translateOp dflags CharGeOp = Just (MO_U_Ge (wordWidth dflags)) translateOp dflags CharLeOp = Just (MO_U_Le (wordWidth dflags)) translateOp dflags CharGtOp = Just (MO_U_Gt (wordWidth dflags)) translateOp dflags CharLtOp = Just (MO_U_Lt (wordWidth dflags)) -- Double ops translateOp _ DoubleEqOp = Just (MO_F_Eq W64) translateOp _ DoubleNeOp = Just (MO_F_Ne W64) translateOp _ DoubleGeOp = Just (MO_F_Ge W64) translateOp _ DoubleLeOp = Just (MO_F_Le W64) translateOp _ DoubleGtOp = Just (MO_F_Gt W64) translateOp _ DoubleLtOp = Just (MO_F_Lt W64) translateOp _ DoubleAddOp = Just (MO_F_Add W64) translateOp _ DoubleSubOp = Just (MO_F_Sub W64) translateOp _ DoubleMulOp = Just (MO_F_Mul W64) translateOp _ DoubleDivOp = Just (MO_F_Quot W64) translateOp _ DoubleNegOp = Just (MO_F_Neg W64) -- Float ops translateOp _ FloatEqOp = Just (MO_F_Eq W32) translateOp _ FloatNeOp = Just (MO_F_Ne W32) translateOp _ FloatGeOp = Just (MO_F_Ge W32) translateOp _ FloatLeOp = Just (MO_F_Le W32) translateOp _ FloatGtOp = Just (MO_F_Gt W32) translateOp _ FloatLtOp = Just (MO_F_Lt W32) translateOp _ FloatAddOp = Just (MO_F_Add W32) translateOp _ FloatSubOp = Just (MO_F_Sub W32) translateOp _ FloatMulOp = Just (MO_F_Mul W32) translateOp _ FloatDivOp = Just (MO_F_Quot W32) translateOp _ FloatNegOp = Just (MO_F_Neg W32) -- Vector ops translateOp _ (VecAddOp FloatVec n w) = Just (MO_VF_Add n w) translateOp _ (VecSubOp FloatVec n w) = Just (MO_VF_Sub n w) translateOp _ (VecMulOp FloatVec n w) = Just (MO_VF_Mul n w) translateOp _ (VecDivOp FloatVec n w) = Just (MO_VF_Quot n w) translateOp _ (VecNegOp FloatVec n w) = Just (MO_VF_Neg n w) translateOp _ (VecAddOp IntVec n w) = Just (MO_V_Add n w) translateOp _ (VecSubOp IntVec n w) = Just (MO_V_Sub n w) translateOp _ (VecMulOp IntVec n w) = Just (MO_V_Mul n w) translateOp _ (VecQuotOp IntVec n w) = Just (MO_VS_Quot n w) translateOp _ (VecRemOp IntVec n w) = Just (MO_VS_Rem n w) translateOp _ (VecNegOp IntVec n w) = Just (MO_VS_Neg n w) translateOp _ (VecAddOp WordVec n w) = Just (MO_V_Add n w) translateOp _ (VecSubOp WordVec n w) = Just (MO_V_Sub n w) translateOp _ (VecMulOp WordVec n w) = Just (MO_V_Mul n w) translateOp _ (VecQuotOp WordVec n w) = Just (MO_VU_Quot n w) translateOp _ (VecRemOp WordVec n w) = Just (MO_VU_Rem n w) -- Conversions translateOp dflags Int2DoubleOp = Just (MO_SF_Conv (wordWidth dflags) W64) translateOp dflags Double2IntOp = Just (MO_FS_Conv W64 (wordWidth dflags)) translateOp dflags Int2FloatOp = Just (MO_SF_Conv (wordWidth dflags) W32) translateOp dflags Float2IntOp = Just (MO_FS_Conv W32 (wordWidth dflags)) translateOp _ Float2DoubleOp = Just (MO_FF_Conv W32 W64) translateOp _ Double2FloatOp = Just (MO_FF_Conv W64 W32) -- Word comparisons masquerading as more exotic things. translateOp dflags SameMutVarOp = Just (mo_wordEq dflags) translateOp dflags SameMVarOp = Just (mo_wordEq dflags) translateOp dflags SameMutableArrayOp = Just (mo_wordEq dflags) translateOp dflags SameMutableByteArrayOp = Just (mo_wordEq dflags) translateOp dflags SameMutableArrayArrayOp= Just (mo_wordEq dflags) translateOp dflags SameSmallMutableArrayOp= Just (mo_wordEq dflags) translateOp dflags SameTVarOp = Just (mo_wordEq dflags) translateOp dflags EqStablePtrOp = Just (mo_wordEq dflags) translateOp _ _ = Nothing -- These primops are implemented by CallishMachOps, because they sometimes -- turn into foreign calls depending on the backend. callishOp :: PrimOp -> Maybe CallishMachOp callishOp DoublePowerOp = Just MO_F64_Pwr callishOp DoubleSinOp = Just MO_F64_Sin callishOp DoubleCosOp = Just MO_F64_Cos callishOp DoubleTanOp = Just MO_F64_Tan callishOp DoubleSinhOp = Just MO_F64_Sinh callishOp DoubleCoshOp = Just MO_F64_Cosh callishOp DoubleTanhOp = Just MO_F64_Tanh callishOp DoubleAsinOp = Just MO_F64_Asin callishOp DoubleAcosOp = Just MO_F64_Acos callishOp DoubleAtanOp = Just MO_F64_Atan callishOp DoubleLogOp = Just MO_F64_Log callishOp DoubleExpOp = Just MO_F64_Exp callishOp DoubleSqrtOp = Just MO_F64_Sqrt callishOp FloatPowerOp = Just MO_F32_Pwr callishOp FloatSinOp = Just MO_F32_Sin callishOp FloatCosOp = Just MO_F32_Cos callishOp FloatTanOp = Just MO_F32_Tan callishOp FloatSinhOp = Just MO_F32_Sinh callishOp FloatCoshOp = Just MO_F32_Cosh callishOp FloatTanhOp = Just MO_F32_Tanh callishOp FloatAsinOp = Just MO_F32_Asin callishOp FloatAcosOp = Just MO_F32_Acos callishOp FloatAtanOp = Just MO_F32_Atan callishOp FloatLogOp = Just MO_F32_Log callishOp FloatExpOp = Just MO_F32_Exp callishOp FloatSqrtOp = Just MO_F32_Sqrt callishOp _ = Nothing ------------------------------------------------------------------------------ -- Helpers for translating various minor variants of array indexing. doIndexOffAddrOp :: Maybe MachOp -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doIndexOffAddrOp maybe_post_read_cast rep [res] [addr,idx] = mkBasicIndexedRead 0 maybe_post_read_cast rep res addr rep idx doIndexOffAddrOp _ _ _ _ = panic "StgCmmPrim: doIndexOffAddrOp" doIndexOffAddrOpAs :: Maybe MachOp -> CmmType -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doIndexOffAddrOpAs maybe_post_read_cast rep idx_rep [res] [addr,idx] = mkBasicIndexedRead 0 maybe_post_read_cast rep res addr idx_rep idx doIndexOffAddrOpAs _ _ _ _ _ = panic "StgCmmPrim: doIndexOffAddrOpAs" doIndexByteArrayOp :: Maybe MachOp -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doIndexByteArrayOp maybe_post_read_cast rep [res] [addr,idx] = do dflags <- getDynFlags mkBasicIndexedRead (arrWordsHdrSize dflags) maybe_post_read_cast rep res addr rep idx doIndexByteArrayOp _ _ _ _ = panic "StgCmmPrim: doIndexByteArrayOp" doIndexByteArrayOpAs :: Maybe MachOp -> CmmType -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doIndexByteArrayOpAs maybe_post_read_cast rep idx_rep [res] [addr,idx] = do dflags <- getDynFlags mkBasicIndexedRead (arrWordsHdrSize dflags) maybe_post_read_cast rep res addr idx_rep idx doIndexByteArrayOpAs _ _ _ _ _ = panic "StgCmmPrim: doIndexByteArrayOpAs" doReadPtrArrayOp :: LocalReg -> CmmExpr -> CmmExpr -> FCode () doReadPtrArrayOp res addr idx = do dflags <- getDynFlags mkBasicIndexedRead (arrPtrsHdrSize dflags) Nothing (gcWord dflags) res addr (gcWord dflags) idx doWriteOffAddrOp :: Maybe MachOp -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doWriteOffAddrOp maybe_pre_write_cast idx_ty [] [addr,idx,val] = mkBasicIndexedWrite 0 maybe_pre_write_cast addr idx_ty idx val doWriteOffAddrOp _ _ _ _ = panic "StgCmmPrim: doWriteOffAddrOp" doWriteByteArrayOp :: Maybe MachOp -> CmmType -> [LocalReg] -> [CmmExpr] -> FCode () doWriteByteArrayOp maybe_pre_write_cast idx_ty [] [addr,idx,val] = do dflags <- getDynFlags mkBasicIndexedWrite (arrWordsHdrSize dflags) maybe_pre_write_cast addr idx_ty idx val doWriteByteArrayOp _ _ _ _ = panic "StgCmmPrim: doWriteByteArrayOp" doWritePtrArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> FCode () doWritePtrArrayOp addr idx val = do dflags <- getDynFlags let ty = cmmExprType dflags val mkBasicIndexedWrite (arrPtrsHdrSize dflags) Nothing addr ty idx val emit (setInfo addr (CmmLit (CmmLabel mkMAP_DIRTY_infoLabel))) -- the write barrier. We must write a byte into the mark table: -- bits8[a + header_size + StgMutArrPtrs_size(a) + x >> N] emit $ mkStore ( cmmOffsetExpr dflags (cmmOffsetExprW dflags (cmmOffsetB dflags addr (arrPtrsHdrSize dflags)) (loadArrPtrsSize dflags addr)) (CmmMachOp (mo_wordUShr dflags) [idx, mkIntExpr dflags (mUT_ARR_PTRS_CARD_BITS dflags)]) ) (CmmLit (CmmInt 1 W8)) loadArrPtrsSize :: DynFlags -> CmmExpr -> CmmExpr loadArrPtrsSize dflags addr = CmmLoad (cmmOffsetB dflags addr off) (bWord dflags) where off = fixedHdrSize dflags + oFFSET_StgMutArrPtrs_ptrs dflags mkBasicIndexedRead :: ByteOff -- Initial offset in bytes -> Maybe MachOp -- Optional result cast -> CmmType -- Type of element we are accessing -> LocalReg -- Destination -> CmmExpr -- Base address -> CmmType -- Type of element by which we are indexing -> CmmExpr -- Index -> FCode () mkBasicIndexedRead off Nothing ty res base idx_ty idx = do dflags <- getDynFlags emitAssign (CmmLocal res) (cmmLoadIndexOffExpr dflags off ty base idx_ty idx) mkBasicIndexedRead off (Just cast) ty res base idx_ty idx = do dflags <- getDynFlags emitAssign (CmmLocal res) (CmmMachOp cast [ cmmLoadIndexOffExpr dflags off ty base idx_ty idx]) mkBasicIndexedWrite :: ByteOff -- Initial offset in bytes -> Maybe MachOp -- Optional value cast -> CmmExpr -- Base address -> CmmType -- Type of element by which we are indexing -> CmmExpr -- Index -> CmmExpr -- Value to write -> FCode () mkBasicIndexedWrite off Nothing base idx_ty idx val = do dflags <- getDynFlags emitStore (cmmIndexOffExpr dflags off (typeWidth idx_ty) base idx) val mkBasicIndexedWrite off (Just cast) base idx_ty idx val = mkBasicIndexedWrite off Nothing base idx_ty idx (CmmMachOp cast [val]) -- ---------------------------------------------------------------------------- -- Misc utils cmmIndexOffExpr :: DynFlags -> ByteOff -- Initial offset in bytes -> Width -- Width of element by which we are indexing -> CmmExpr -- Base address -> CmmExpr -- Index -> CmmExpr cmmIndexOffExpr dflags off width base idx = cmmIndexExpr dflags width (cmmOffsetB dflags base off) idx cmmLoadIndexOffExpr :: DynFlags -> ByteOff -- Initial offset in bytes -> CmmType -- Type of element we are accessing -> CmmExpr -- Base address -> CmmType -- Type of element by which we are indexing -> CmmExpr -- Index -> CmmExpr cmmLoadIndexOffExpr dflags off ty base idx_ty idx = CmmLoad (cmmIndexOffExpr dflags off (typeWidth idx_ty) base idx) ty setInfo :: CmmExpr -> CmmExpr -> CmmAGraph setInfo closure_ptr info_ptr = mkStore closure_ptr info_ptr ------------------------------------------------------------------------------ -- Helpers for translating vector primops. vecVmmType :: PrimOpVecCat -> Length -> Width -> CmmType vecVmmType pocat n w = vec n (vecCmmCat pocat w) vecCmmCat :: PrimOpVecCat -> Width -> CmmType vecCmmCat IntVec = cmmBits vecCmmCat WordVec = cmmBits vecCmmCat FloatVec = cmmFloat vecElemInjectCast :: DynFlags -> PrimOpVecCat -> Width -> Maybe MachOp vecElemInjectCast _ FloatVec _ = Nothing vecElemInjectCast dflags IntVec W8 = Just (mo_WordTo8 dflags) vecElemInjectCast dflags IntVec W16 = Just (mo_WordTo16 dflags) vecElemInjectCast dflags IntVec W32 = Just (mo_WordTo32 dflags) vecElemInjectCast _ IntVec W64 = Nothing vecElemInjectCast dflags WordVec W8 = Just (mo_WordTo8 dflags) vecElemInjectCast dflags WordVec W16 = Just (mo_WordTo16 dflags) vecElemInjectCast dflags WordVec W32 = Just (mo_WordTo32 dflags) vecElemInjectCast _ WordVec W64 = Nothing vecElemInjectCast _ _ _ = Nothing vecElemProjectCast :: DynFlags -> PrimOpVecCat -> Width -> Maybe MachOp vecElemProjectCast _ FloatVec _ = Nothing vecElemProjectCast dflags IntVec W8 = Just (mo_s_8ToWord dflags) vecElemProjectCast dflags IntVec W16 = Just (mo_s_16ToWord dflags) vecElemProjectCast dflags IntVec W32 = Just (mo_s_32ToWord dflags) vecElemProjectCast _ IntVec W64 = Nothing vecElemProjectCast dflags WordVec W8 = Just (mo_u_8ToWord dflags) vecElemProjectCast dflags WordVec W16 = Just (mo_u_16ToWord dflags) vecElemProjectCast dflags WordVec W32 = Just (mo_u_32ToWord dflags) vecElemProjectCast _ WordVec W64 = Nothing vecElemProjectCast _ _ _ = Nothing -- Check to make sure that we can generate code for the specified vector type -- given the current set of dynamic flags. checkVecCompatibility :: DynFlags -> PrimOpVecCat -> Length -> Width -> FCode () checkVecCompatibility dflags vcat l w = do when (hscTarget dflags /= HscLlvm) $ do sorry $ unlines ["SIMD vector instructions require the LLVM back-end." ,"Please use -fllvm."] check vecWidth vcat l w where check :: Width -> PrimOpVecCat -> Length -> Width -> FCode () check W128 FloatVec 4 W32 | not (isSseEnabled dflags) = sorry $ "128-bit wide single-precision floating point " ++ "SIMD vector instructions require at least -msse." check W128 _ _ _ | not (isSse2Enabled dflags) = sorry $ "128-bit wide integer and double precision " ++ "SIMD vector instructions require at least -msse2." check W256 FloatVec _ _ | not (isAvxEnabled dflags) = sorry $ "256-bit wide floating point " ++ "SIMD vector instructions require at least -mavx." check W256 _ _ _ | not (isAvx2Enabled dflags) = sorry $ "256-bit wide integer " ++ "SIMD vector instructions require at least -mavx2." check W512 _ _ _ | not (isAvx512fEnabled dflags) = sorry $ "512-bit wide " ++ "SIMD vector instructions require -mavx512f." check _ _ _ _ = return () vecWidth = typeWidth (vecVmmType vcat l w) ------------------------------------------------------------------------------ -- Helpers for translating vector packing and unpacking. doVecPackOp :: Maybe MachOp -- Cast from element to vector component -> CmmType -- Type of vector -> CmmExpr -- Initial vector -> [CmmExpr] -- Elements -> CmmFormal -- Destination for result -> FCode () doVecPackOp maybe_pre_write_cast ty z es res = do dst <- newTemp ty emitAssign (CmmLocal dst) z vecPack dst es 0 where vecPack :: CmmFormal -> [CmmExpr] -> Int -> FCode () vecPack src [] _ = emitAssign (CmmLocal res) (CmmReg (CmmLocal src)) vecPack src (e : es) i = do dst <- newTemp ty if isFloatType (vecElemType ty) then emitAssign (CmmLocal dst) (CmmMachOp (MO_VF_Insert len wid) [CmmReg (CmmLocal src), cast e, iLit]) else emitAssign (CmmLocal dst) (CmmMachOp (MO_V_Insert len wid) [CmmReg (CmmLocal src), cast e, iLit]) vecPack dst es (i + 1) where -- vector indices are always 32-bits iLit = CmmLit (CmmInt (toInteger i) W32) cast :: CmmExpr -> CmmExpr cast val = case maybe_pre_write_cast of Nothing -> val Just cast -> CmmMachOp cast [val] len :: Length len = vecLength ty wid :: Width wid = typeWidth (vecElemType ty) doVecUnpackOp :: Maybe MachOp -- Cast from vector component to element result -> CmmType -- Type of vector -> CmmExpr -- Vector -> [CmmFormal] -- Element results -> FCode () doVecUnpackOp maybe_post_read_cast ty e res = vecUnpack res 0 where vecUnpack :: [CmmFormal] -> Int -> FCode () vecUnpack [] _ = return () vecUnpack (r : rs) i = do if isFloatType (vecElemType ty) then emitAssign (CmmLocal r) (cast (CmmMachOp (MO_VF_Extract len wid) [e, iLit])) else emitAssign (CmmLocal r) (cast (CmmMachOp (MO_V_Extract len wid) [e, iLit])) vecUnpack rs (i + 1) where -- vector indices are always 32-bits iLit = CmmLit (CmmInt (toInteger i) W32) cast :: CmmExpr -> CmmExpr cast val = case maybe_post_read_cast of Nothing -> val Just cast -> CmmMachOp cast [val] len :: Length len = vecLength ty wid :: Width wid = typeWidth (vecElemType ty) doVecInsertOp :: Maybe MachOp -- Cast from element to vector component -> CmmType -- Vector type -> CmmExpr -- Source vector -> CmmExpr -- Element -> CmmExpr -- Index at which to insert element -> CmmFormal -- Destination for result -> FCode () doVecInsertOp maybe_pre_write_cast ty src e idx res = do dflags <- getDynFlags -- vector indices are always 32-bits let idx' :: CmmExpr idx' = CmmMachOp (MO_SS_Conv (wordWidth dflags) W32) [idx] if isFloatType (vecElemType ty) then emitAssign (CmmLocal res) (CmmMachOp (MO_VF_Insert len wid) [src, cast e, idx']) else emitAssign (CmmLocal res) (CmmMachOp (MO_V_Insert len wid) [src, cast e, idx']) where cast :: CmmExpr -> CmmExpr cast val = case maybe_pre_write_cast of Nothing -> val Just cast -> CmmMachOp cast [val] len :: Length len = vecLength ty wid :: Width wid = typeWidth (vecElemType ty) ------------------------------------------------------------------------------ -- Helpers for translating prefetching. -- | Translate byte array prefetch operations into proper primcalls. doPrefetchByteArrayOp :: Int -> [CmmExpr] -> FCode () doPrefetchByteArrayOp locality [addr,idx] = do dflags <- getDynFlags mkBasicPrefetch locality (arrWordsHdrSize dflags) addr idx doPrefetchByteArrayOp _ _ = panic "StgCmmPrim: doPrefetchByteArrayOp" -- | Translate mutable byte array prefetch operations into proper primcalls. doPrefetchMutableByteArrayOp :: Int -> [CmmExpr] -> FCode () doPrefetchMutableByteArrayOp locality [addr,idx] = do dflags <- getDynFlags mkBasicPrefetch locality (arrWordsHdrSize dflags) addr idx doPrefetchMutableByteArrayOp _ _ = panic "StgCmmPrim: doPrefetchByteArrayOp" -- | Translate address prefetch operations into proper primcalls. doPrefetchAddrOp ::Int -> [CmmExpr] -> FCode () doPrefetchAddrOp locality [addr,idx] = mkBasicPrefetch locality 0 addr idx doPrefetchAddrOp _ _ = panic "StgCmmPrim: doPrefetchAddrOp" -- | Translate value prefetch operations into proper primcalls. doPrefetchValueOp :: Int -> [CmmExpr] -> FCode () doPrefetchValueOp locality [addr] = do dflags <- getDynFlags mkBasicPrefetch locality 0 addr (CmmLit (CmmInt 0 (wordWidth dflags))) doPrefetchValueOp _ _ = panic "StgCmmPrim: doPrefetchValueOp" -- | helper to generate prefetch primcalls mkBasicPrefetch :: Int -- Locality level 0-3 -> ByteOff -- Initial offset in bytes -> CmmExpr -- Base address -> CmmExpr -- Index -> FCode () mkBasicPrefetch locality off base idx = do dflags <- getDynFlags emitPrimCall [] (MO_Prefetch_Data locality) [cmmIndexExpr dflags W8 (cmmOffsetB dflags base off) idx] return () -- ---------------------------------------------------------------------------- -- Allocating byte arrays -- | Takes a register to return the newly allocated array in and the -- size of the new array in bytes. Allocates a new -- 'MutableByteArray#'. doNewByteArrayOp :: CmmFormal -> ByteOff -> FCode () doNewByteArrayOp res_r n = do dflags <- getDynFlags let info_ptr = mkLblExpr mkArrWords_infoLabel rep = arrWordsRep dflags n tickyAllocPrim (mkIntExpr dflags (arrWordsHdrSize dflags)) (mkIntExpr dflags (nonHdrSize dflags rep)) (zeroExpr dflags) let hdr_size = fixedHdrSize dflags base <- allocHeapClosure rep info_ptr curCCS [ (mkIntExpr dflags n, hdr_size + oFFSET_StgArrWords_bytes dflags) ] emit $ mkAssign (CmmLocal res_r) base -- ---------------------------------------------------------------------------- -- Copying byte arrays -- | Takes a source 'ByteArray#', an offset in the source array, a -- destination 'MutableByteArray#', an offset into the destination -- array, and the number of bytes to copy. Copies the given number of -- bytes from the source array to the destination array. doCopyByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doCopyByteArrayOp = emitCopyByteArray copy where -- Copy data (we assume the arrays aren't overlapping since -- they're of different types) copy _src _dst dst_p src_p bytes = emitMemcpyCall dst_p src_p bytes 1 -- | Takes a source 'MutableByteArray#', an offset in the source -- array, a destination 'MutableByteArray#', an offset into the -- destination array, and the number of bytes to copy. Copies the -- given number of bytes from the source array to the destination -- array. doCopyMutableByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doCopyMutableByteArrayOp = emitCopyByteArray copy where -- The only time the memory might overlap is when the two arrays -- we were provided are the same array! -- TODO: Optimize branch for common case of no aliasing. copy src dst dst_p src_p bytes = do dflags <- getDynFlags [moveCall, cpyCall] <- forkAlts [ getCode $ emitMemmoveCall dst_p src_p bytes 1, getCode $ emitMemcpyCall dst_p src_p bytes 1 ] emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall emitCopyByteArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()) -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () emitCopyByteArray copy src src_off dst dst_off n = do dflags <- getDynFlags dst_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags dst (arrWordsHdrSize dflags)) dst_off src_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags src (arrWordsHdrSize dflags)) src_off copy src dst dst_p src_p n -- | Takes a source 'ByteArray#', an offset in the source array, a -- destination 'Addr#', and the number of bytes to copy. Copies the given -- number of bytes from the source array to the destination memory region. doCopyByteArrayToAddrOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doCopyByteArrayToAddrOp src src_off dst_p bytes = do -- Use memcpy (we are allowed to assume the arrays aren't overlapping) dflags <- getDynFlags src_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags src (arrWordsHdrSize dflags)) src_off emitMemcpyCall dst_p src_p bytes 1 -- | Takes a source 'MutableByteArray#', an offset in the source array, a -- destination 'Addr#', and the number of bytes to copy. Copies the given -- number of bytes from the source array to the destination memory region. doCopyMutableByteArrayToAddrOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doCopyMutableByteArrayToAddrOp = doCopyByteArrayToAddrOp -- | Takes a source 'Addr#', a destination 'MutableByteArray#', an offset into -- the destination array, and the number of bytes to copy. Copies the given -- number of bytes from the source memory region to the destination array. doCopyAddrToByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doCopyAddrToByteArrayOp src_p dst dst_off bytes = do -- Use memcpy (we are allowed to assume the arrays aren't overlapping) dflags <- getDynFlags dst_p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags dst (arrWordsHdrSize dflags)) dst_off emitMemcpyCall dst_p src_p bytes 1 -- ---------------------------------------------------------------------------- -- Setting byte arrays -- | Takes a 'MutableByteArray#', an offset into the array, a length, -- and a byte, and sets each of the selected bytes in the array to the -- character. doSetByteArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> FCode () doSetByteArrayOp ba off len c = do dflags <- getDynFlags p <- assignTempE $ cmmOffsetExpr dflags (cmmOffsetB dflags ba (arrWordsHdrSize dflags)) off emitMemsetCall p c len 1 -- ---------------------------------------------------------------------------- -- Allocating arrays -- | Allocate a new array. doNewArrayOp :: CmmFormal -- ^ return register -> SMRep -- ^ representation of the array -> CLabel -- ^ info pointer -> [(CmmExpr, ByteOff)] -- ^ header payload -> WordOff -- ^ array size -> CmmExpr -- ^ initial element -> FCode () doNewArrayOp res_r rep info payload n init = do dflags <- getDynFlags let info_ptr = mkLblExpr info tickyAllocPrim (mkIntExpr dflags (hdrSize dflags rep)) (mkIntExpr dflags (nonHdrSize dflags rep)) (zeroExpr dflags) base <- allocHeapClosure rep info_ptr curCCS payload arr <- CmmLocal `fmap` newTemp (bWord dflags) emit $ mkAssign arr base -- Initialise all elements of the the array p <- assignTemp $ cmmOffsetB dflags (CmmReg arr) (hdrSize dflags rep) for <- newLabelC emitLabel for let loopBody = [ mkStore (CmmReg (CmmLocal p)) init , mkAssign (CmmLocal p) (cmmOffsetW dflags (CmmReg (CmmLocal p)) 1) , mkBranch for ] emit =<< mkCmmIfThen (cmmULtWord dflags (CmmReg (CmmLocal p)) (cmmOffsetW dflags (CmmReg arr) (hdrSizeW dflags rep + n))) (catAGraphs loopBody) emit $ mkAssign (CmmLocal res_r) (CmmReg arr) -- ---------------------------------------------------------------------------- -- Copying pointer arrays -- EZY: This code has an unusually high amount of assignTemp calls, seen -- nowhere else in the code generator. This is mostly because these -- "primitive" ops result in a surprisingly large amount of code. It -- will likely be worthwhile to optimize what is emitted here, so that -- our optimization passes don't waste time repeatedly optimizing the -- same bits of code. -- More closely imitates 'assignTemp' from the old code generator, which -- returns a CmmExpr rather than a LocalReg. assignTempE :: CmmExpr -> FCode CmmExpr assignTempE e = do t <- assignTemp e return (CmmReg (CmmLocal t)) -- | Takes a source 'Array#', an offset in the source array, a -- destination 'MutableArray#', an offset into the destination array, -- and the number of elements to copy. Copies the given number of -- elements from the source array to the destination array. doCopyArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff -> FCode () doCopyArrayOp = emitCopyArray copy where -- Copy data (we assume the arrays aren't overlapping since -- they're of different types) copy _src _dst dst_p src_p bytes = do dflags <- getDynFlags emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags) -- | Takes a source 'MutableArray#', an offset in the source array, a -- destination 'MutableArray#', an offset into the destination array, -- and the number of elements to copy. Copies the given number of -- elements from the source array to the destination array. doCopyMutableArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff -> FCode () doCopyMutableArrayOp = emitCopyArray copy where -- The only time the memory might overlap is when the two arrays -- we were provided are the same array! -- TODO: Optimize branch for common case of no aliasing. copy src dst dst_p src_p bytes = do dflags <- getDynFlags [moveCall, cpyCall] <- forkAlts [ getCode $ emitMemmoveCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags), getCode $ emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags) ] emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall emitCopyArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> ByteOff -> FCode ()) -- ^ copy function -> CmmExpr -- ^ source array -> CmmExpr -- ^ offset in source array -> CmmExpr -- ^ destination array -> CmmExpr -- ^ offset in destination array -> WordOff -- ^ number of elements to copy -> FCode () emitCopyArray copy src0 src_off dst0 dst_off0 n = do dflags <- getDynFlags when (n /= 0) $ do -- Passed as arguments (be careful) src <- assignTempE src0 dst <- assignTempE dst0 dst_off <- assignTempE dst_off0 -- Set the dirty bit in the header. emit (setInfo dst (CmmLit (CmmLabel mkMAP_DIRTY_infoLabel))) dst_elems_p <- assignTempE $ cmmOffsetB dflags dst (arrPtrsHdrSize dflags) dst_p <- assignTempE $ cmmOffsetExprW dflags dst_elems_p dst_off src_p <- assignTempE $ cmmOffsetExprW dflags (cmmOffsetB dflags src (arrPtrsHdrSize dflags)) src_off let bytes = wordsToBytes dflags n copy src dst dst_p src_p bytes -- The base address of the destination card table dst_cards_p <- assignTempE $ cmmOffsetExprW dflags dst_elems_p (loadArrPtrsSize dflags dst) emitSetCards dst_off dst_cards_p n doCopySmallArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff -> FCode () doCopySmallArrayOp = emitCopySmallArray copy where -- Copy data (we assume the arrays aren't overlapping since -- they're of different types) copy _src _dst dst_p src_p bytes = do dflags <- getDynFlags emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags) doCopySmallMutableArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> WordOff -> FCode () doCopySmallMutableArrayOp = emitCopySmallArray copy where -- The only time the memory might overlap is when the two arrays -- we were provided are the same array! -- TODO: Optimize branch for common case of no aliasing. copy src dst dst_p src_p bytes = do dflags <- getDynFlags [moveCall, cpyCall] <- forkAlts [ getCode $ emitMemmoveCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags) , getCode $ emitMemcpyCall dst_p src_p (mkIntExpr dflags bytes) (wORD_SIZE dflags) ] emit =<< mkCmmIfThenElse (cmmEqWord dflags src dst) moveCall cpyCall emitCopySmallArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> ByteOff -> FCode ()) -- ^ copy function -> CmmExpr -- ^ source array -> CmmExpr -- ^ offset in source array -> CmmExpr -- ^ destination array -> CmmExpr -- ^ offset in destination array -> WordOff -- ^ number of elements to copy -> FCode () emitCopySmallArray copy src0 src_off dst0 dst_off n = do dflags <- getDynFlags -- Passed as arguments (be careful) src <- assignTempE src0 dst <- assignTempE dst0 -- Set the dirty bit in the header. emit (setInfo dst (CmmLit (CmmLabel mkSMAP_DIRTY_infoLabel))) dst_p <- assignTempE $ cmmOffsetExprW dflags (cmmOffsetB dflags dst (smallArrPtrsHdrSize dflags)) dst_off src_p <- assignTempE $ cmmOffsetExprW dflags (cmmOffsetB dflags src (smallArrPtrsHdrSize dflags)) src_off let bytes = wordsToBytes dflags n copy src dst dst_p src_p bytes -- | Takes an info table label, a register to return the newly -- allocated array in, a source array, an offset in the source array, -- and the number of elements to copy. Allocates a new array and -- initializes it from the source array. emitCloneArray :: CLabel -> CmmFormal -> CmmExpr -> CmmExpr -> WordOff -> FCode () emitCloneArray info_p res_r src src_off n = do dflags <- getDynFlags let info_ptr = mkLblExpr info_p rep = arrPtrsRep dflags n tickyAllocPrim (mkIntExpr dflags (arrPtrsHdrSize dflags)) (mkIntExpr dflags (nonHdrSize dflags rep)) (zeroExpr dflags) let hdr_size = fixedHdrSize dflags base <- allocHeapClosure rep info_ptr curCCS [ (mkIntExpr dflags n, hdr_size + oFFSET_StgMutArrPtrs_ptrs dflags) , (mkIntExpr dflags (nonHdrSizeW rep), hdr_size + oFFSET_StgMutArrPtrs_size dflags) ] arr <- CmmLocal `fmap` newTemp (bWord dflags) emit $ mkAssign arr base dst_p <- assignTempE $ cmmOffsetB dflags (CmmReg arr) (arrPtrsHdrSize dflags) src_p <- assignTempE $ cmmOffsetExprW dflags src (cmmAddWord dflags (mkIntExpr dflags (arrPtrsHdrSizeW dflags)) src_off) emitMemcpyCall dst_p src_p (mkIntExpr dflags (wordsToBytes dflags n)) (wORD_SIZE dflags) emit $ mkAssign (CmmLocal res_r) (CmmReg arr) -- | Takes an info table label, a register to return the newly -- allocated array in, a source array, an offset in the source array, -- and the number of elements to copy. Allocates a new array and -- initializes it from the source array. emitCloneSmallArray :: CLabel -> CmmFormal -> CmmExpr -> CmmExpr -> WordOff -> FCode () emitCloneSmallArray info_p res_r src src_off n = do dflags <- getDynFlags let info_ptr = mkLblExpr info_p rep = smallArrPtrsRep n tickyAllocPrim (mkIntExpr dflags (smallArrPtrsHdrSize dflags)) (mkIntExpr dflags (nonHdrSize dflags rep)) (zeroExpr dflags) let hdr_size = fixedHdrSize dflags base <- allocHeapClosure rep info_ptr curCCS [ (mkIntExpr dflags n, hdr_size + oFFSET_StgSmallMutArrPtrs_ptrs dflags) ] arr <- CmmLocal `fmap` newTemp (bWord dflags) emit $ mkAssign arr base dst_p <- assignTempE $ cmmOffsetB dflags (CmmReg arr) (smallArrPtrsHdrSize dflags) src_p <- assignTempE $ cmmOffsetExprW dflags src (cmmAddWord dflags (mkIntExpr dflags (smallArrPtrsHdrSizeW dflags)) src_off) emitMemcpyCall dst_p src_p (mkIntExpr dflags (wordsToBytes dflags n)) (wORD_SIZE dflags) emit $ mkAssign (CmmLocal res_r) (CmmReg arr) -- | Takes and offset in the destination array, the base address of -- the card table, and the number of elements affected (*not* the -- number of cards). The number of elements may not be zero. -- Marks the relevant cards as dirty. emitSetCards :: CmmExpr -> CmmExpr -> WordOff -> FCode () emitSetCards dst_start dst_cards_start n = do dflags <- getDynFlags start_card <- assignTempE $ cardCmm dflags dst_start let end_card = cardCmm dflags (cmmSubWord dflags (cmmAddWord dflags dst_start (mkIntExpr dflags n)) (mkIntExpr dflags 1)) emitMemsetCall (cmmAddWord dflags dst_cards_start start_card) (mkIntExpr dflags 1) (cmmAddWord dflags (cmmSubWord dflags end_card start_card) (mkIntExpr dflags 1)) 1 -- no alignment (1 byte) -- Convert an element index to a card index cardCmm :: DynFlags -> CmmExpr -> CmmExpr cardCmm dflags i = cmmUShrWord dflags i (mkIntExpr dflags (mUT_ARR_PTRS_CARD_BITS dflags)) ------------------------------------------------------------------------------ -- SmallArray PrimOp implementations doReadSmallPtrArrayOp :: LocalReg -> CmmExpr -> CmmExpr -> FCode () doReadSmallPtrArrayOp res addr idx = do dflags <- getDynFlags mkBasicIndexedRead (smallArrPtrsHdrSize dflags) Nothing (gcWord dflags) res addr (gcWord dflags) idx doWriteSmallPtrArrayOp :: CmmExpr -> CmmExpr -> CmmExpr -> FCode () doWriteSmallPtrArrayOp addr idx val = do dflags <- getDynFlags let ty = cmmExprType dflags val mkBasicIndexedWrite (smallArrPtrsHdrSize dflags) Nothing addr ty idx val emit (setInfo addr (CmmLit (CmmLabel mkSMAP_DIRTY_infoLabel))) ------------------------------------------------------------------------------ -- Atomic read-modify-write -- | Emit an atomic modification to a byte array element. The result -- reg contains that previous value of the element. Implies a full -- memory barrier. doAtomicRMW :: LocalReg -- ^ Result reg -> AtomicMachOp -- ^ Atomic op (e.g. add) -> CmmExpr -- ^ MutableByteArray# -> CmmExpr -- ^ Index -> CmmType -- ^ Type of element by which we are indexing -> CmmExpr -- ^ Op argument (e.g. amount to add) -> FCode () doAtomicRMW res amop mba idx idx_ty n = do dflags <- getDynFlags let width = typeWidth idx_ty addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) width mba idx emitPrimCall [ res ] (MO_AtomicRMW width amop) [ addr, n ] -- | Emit an atomic read to a byte array that acts as a memory barrier. doAtomicReadByteArray :: LocalReg -- ^ Result reg -> CmmExpr -- ^ MutableByteArray# -> CmmExpr -- ^ Index -> CmmType -- ^ Type of element by which we are indexing -> FCode () doAtomicReadByteArray res mba idx idx_ty = do dflags <- getDynFlags let width = typeWidth idx_ty addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) width mba idx emitPrimCall [ res ] (MO_AtomicRead width) [ addr ] -- | Emit an atomic write to a byte array that acts as a memory barrier. doAtomicWriteByteArray :: CmmExpr -- ^ MutableByteArray# -> CmmExpr -- ^ Index -> CmmType -- ^ Type of element by which we are indexing -> CmmExpr -- ^ Value to write -> FCode () doAtomicWriteByteArray mba idx idx_ty val = do dflags <- getDynFlags let width = typeWidth idx_ty addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) width mba idx emitPrimCall [ {- no results -} ] (MO_AtomicWrite width) [ addr, val ] doCasByteArray :: LocalReg -- ^ Result reg -> CmmExpr -- ^ MutableByteArray# -> CmmExpr -- ^ Index -> CmmType -- ^ Type of element by which we are indexing -> CmmExpr -- ^ Old value -> CmmExpr -- ^ New value -> FCode () doCasByteArray res mba idx idx_ty old new = do dflags <- getDynFlags let width = (typeWidth idx_ty) addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) width mba idx emitPrimCall [ res ] (MO_Cmpxchg width) [ addr, old, new ] ------------------------------------------------------------------------------ -- Helpers for emitting function calls -- | Emit a call to @memcpy@. emitMemcpyCall :: CmmExpr -> CmmExpr -> CmmExpr -> Int -> FCode () emitMemcpyCall dst src n align = do emitPrimCall [ {-no results-} ] (MO_Memcpy align) [ dst, src, n ] -- | Emit a call to @memmove@. emitMemmoveCall :: CmmExpr -> CmmExpr -> CmmExpr -> Int -> FCode () emitMemmoveCall dst src n align = do emitPrimCall [ {- no results -} ] (MO_Memmove align) [ dst, src, n ] -- | Emit a call to @memset@. The second argument must fit inside an -- unsigned char. emitMemsetCall :: CmmExpr -> CmmExpr -> CmmExpr -> Int -> FCode () emitMemsetCall dst c n align = do emitPrimCall [ {- no results -} ] (MO_Memset align) [ dst, c, n ] emitBSwapCall :: LocalReg -> CmmExpr -> Width -> FCode () emitBSwapCall res x width = do emitPrimCall [ res ] (MO_BSwap width) [ x ] emitPopCntCall :: LocalReg -> CmmExpr -> Width -> FCode () emitPopCntCall res x width = do emitPrimCall [ res ] (MO_PopCnt width) [ x ] emitClzCall :: LocalReg -> CmmExpr -> Width -> FCode () emitClzCall res x width = do emitPrimCall [ res ] (MO_Clz width) [ x ] emitCtzCall :: LocalReg -> CmmExpr -> Width -> FCode () emitCtzCall res x width = do emitPrimCall [ res ] (MO_Ctz width) [ x ]
urbanslug/ghc
compiler/codeGen/StgCmmPrim.hs
bsd-3-clause
96,243
0
19
25,659
24,834
12,417
12,417
-1
-1
import Test.Framework (defaultMain, testGroup) import Test.Framework.Providers.HUnit import Test.HUnit import TestArithmetic import TestOperators import TestExpectations import TestTransformations import TestTrace import TestPositiveP tests = [ TestArithmetic.test_group, TestOperators.test_group, TestExpectations.test_group, TestTransformations.test_group, TestTrace.test_group, TestPositiveP.test_group ] main = defaultMain tests
fjarri/wigner
test/test.hs
bsd-3-clause
464
0
6
65
87
53
34
17
1
module Main where import Control.Concurrent import Control.Monad import Network import qualified Network.Socket as S import Network.IRC import System.IO import Text.Printf server :: String server = "irc.freenode.net" port :: Int port = 6667 nickname :: String nickname = "HaskNag" channels :: [String] channels = ["#haskell-alerts"] bot :: String -> IO Handle bot nickname' = withSocketsDo $ do h <- connectTo server (PortNumber (fromIntegral port)) hSetBuffering h NoBuffering write h "NICK" nickname' write h "USER" (nickname ++ " 0 * :Haskell Nagios Bot") mapM_ (write h "JOIN") channels return h write :: Handle -> String -> String -> IO () write h s t = do _ <- hPrintf h "%s %s\r\n" s t printf "> %s %s\n" s t botPrivmsg :: Handle -> String -> IO () botPrivmsg h msg = mapM_ (\c -> write h "PRIVMSG" $ c ++ " :" ++ msg) channels botPrivmsgChannel :: Handle -> String -> String -> IO () botPrivmsgChannel h ch msg = write h "PRIVMSG" $ ch ++ " :" ++ msg botListen :: Handle -> IO () botListen h = forever $ do s <- hGetLine h handleLine h $ decode s putStrLn s handleLine :: Handle -> Maybe Message -> IO () handleLine _ Nothing = return () handleLine h (Just m) = reply $ msg_command m where params = msg_params m reply "PRIVMSG" = case response of Just s -> botPrivmsgChannel h (head params) s >> return () _ -> return () where response = case params !! 1 of "!hello" -> Just "hey there" _ -> Nothing reply "PING" = write h "PONG " $ ":" ++ params !! 0 reply _ = return () udpServer :: Handle -> IO () udpServer h = withSocketsDo $ do sock <- S.socket S.AF_INET S.Datagram 0 S.bindSocket sock (S.SockAddrInet 2000 S.iNADDR_ANY) forever $ do (mesg, _, client) <- S.recvFrom sock 1024 _ <- S.sendTo sock mesg client botPrivmsg h mesg putStrLn mesg main :: IO () main = do h <- bot nickname _ <- forkIO $ do botListen h udpServer h
haskell-infra/HaskNag
hasknag.hs
bsd-3-clause
2,005
0
13
509
782
378
404
65
5
----------------------------------------------------------------------------- -- | -- Module : XMonad.Prompt.Window -- Description : A prompt for bringing windows to you, and bring you to windows. -- Copyright : Devin Mullins <[email protected]> -- Andrea Rossato <[email protected]> -- License : BSD-style (see LICENSE) -- -- Maintainer : Devin Mullins <[email protected]> -- Andrea Rossato <[email protected]> -- Stability : unstable -- Portability : unportable -- -- xprompt operations to bring windows to you, and bring you to windows. -- ----------------------------------------------------------------------------- module XMonad.Prompt.Window ( -- * Usage -- $usage WindowPrompt(..), windowPrompt, windowMultiPrompt, allWindows, allApplications, wsWindows, XWindowMap, -- * Deprecated windowPromptGoto, windowPromptBring, windowPromptBringCopy, ) where import XMonad.Prelude (forM) import qualified Data.Map as M import qualified XMonad.StackSet as W import XMonad import XMonad.Prompt import XMonad.Actions.CopyWindow import XMonad.Actions.WindowBringer import XMonad.Util.NamedWindows -- $usage -- WindowPrompt brings windows to you and you to windows. That is to -- say, it pops up a prompt with window names, in case you forgot -- where you left your XChat. It also offers helpers to build the -- subset of windows which is used for the prompt completion. -- -- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@: -- -- > import XMonad.Prompt -- > import XMonad.Prompt.Window -- -- and in the keys definition: -- -- > , ((modm .|. shiftMask, xK_g ), windowPrompt def Goto wsWindows) -- > , ((modm .|. shiftMask, xK_b ), windowPrompt def Bring allWindows) -- -- The autoComplete option is a handy complement here: -- -- > , ((modm .|. shiftMask, xK_g ), windowPrompt -- > def { autoComplete = Just 500000 } -- > Goto allWindows) -- -- The \'500000\' is the number of microseconds to pause before sending you to -- your new window. This is useful so that you don't accidentally send some -- keystrokes to the selected client. -- -- For detailed instruction on editing the key binding see -- "XMonad.Doc.Extending#Editing_key_bindings". -- Describe actions that can applied on the selected window data WindowPrompt = Goto | Bring | BringCopy | BringToMaster | WithWindow String (Window -> X()) instance XPrompt WindowPrompt where showXPrompt Goto = "Go to window: " showXPrompt Bring = "Bring window: " showXPrompt BringToMaster = "Bring window to master: " showXPrompt BringCopy = "Bring a copy: " showXPrompt (WithWindow xs _) = xs commandToComplete _ c = c nextCompletion _ = getNextCompletion -- | Internal type used for the multiple mode prompt. data WindowModePrompt = WindowModePrompt WindowPrompt (M.Map String Window) (String -> String -> Bool) instance XPrompt WindowModePrompt where showXPrompt (WindowModePrompt action _ _) = showXPrompt action completionFunction (WindowModePrompt _ winmap predicate) s = return . filter (predicate s) . map fst . M.toList $ winmap modeAction (WindowModePrompt action winmap _) buf auto = do let name = if null auto then buf else auto a = case action of Goto -> gotoAction Bring -> bringAction BringCopy -> bringCopyAction BringToMaster -> bringToMaster WithWindow _ f -> withWindow f a name where withWindow f = flip whenJust f . flip M.lookup winmap winAction a = withWindow (windows . a) gotoAction = winAction W.focusWindow bringAction = winAction bringWindow bringCopyAction = winAction bringCopyWindow bringToMaster = winAction (\w s -> W.shiftMaster . W.focusWindow w $ bringWindow w s) -- | Deprecated. Use windowPrompt instead. {-# DEPRECATED windowPromptGoto "Use windowPrompt instead." #-} {-# DEPRECATED windowPromptBring "Use windowPrompt instead." #-} {-# DEPRECATED windowPromptBringCopy "Use windowPrompt instead." #-} windowPromptGoto, windowPromptBring, windowPromptBringCopy :: XPConfig -> X () windowPromptGoto c = windowPrompt c Goto windowMap windowPromptBring c = windowPrompt c Bring windowMap windowPromptBringCopy c = windowPrompt c BringCopy windowMap -- | A helper to get the map of all windows. allWindows :: XWindowMap allWindows = windowMap -- | A helper to get the map of all applications allApplications :: XWindowMap allApplications = windowAppMap -- | A helper to get the map of windows of the current workspace. wsWindows :: XWindowMap wsWindows = withWindowSet (return . W.index) >>= winmap where winmap = fmap M.fromList . mapM pair pair w = do name <- show <$> getName w return (name, w) -- | A Map where keys are pretty printable window names and values are -- Xmonad windows identifier. type XWindowMap = X (M.Map String Window) -- | Pops open a prompt with window titles belonging to -- winmap. Choose one, and an action is applied on the -- selected window, according to WindowPrompt. windowPrompt :: XPConfig -> WindowPrompt -> XWindowMap -> X () windowPrompt c t winmap = do wm <- winmap let mode = WindowModePrompt t wm (searchPredicate c) action = modeAction mode compList = completionFunction mode mkXPrompt t c compList (\s -> action s s) -- | Like 'windowPrompt', but uses the multiple modes feature of -- @Prompt@ (via 'mkXPromptWithModes'). -- -- Given a list of actions along with the windows they should work -- with, display the appropriate prompt with the ability to switch -- between them using the @changeModeKey@. -- -- For example, to have a prompt that first shows you all windows, but -- allows you to narrow the list down to just the windows on the -- current workspace: -- -- > windowMultiPrompt config [(Goto, allWindows), (Goto, wsWindows)] windowMultiPrompt :: XPConfig -> [(WindowPrompt, XWindowMap)] -> X () windowMultiPrompt c modes = do modes' <- forM modes $ \(t, wm) -> do wm' <- wm return . XPT $ WindowModePrompt t wm' (searchPredicate c) mkXPromptWithModes modes' c -- | Brings a copy of the specified window into the current workspace. bringCopyWindow :: Window -> WindowSet -> WindowSet bringCopyWindow w ws = copyWindow w (W.currentTag ws) ws
xmonad/xmonad-contrib
XMonad/Prompt/Window.hs
bsd-3-clause
6,626
0
15
1,530
1,019
563
456
84
1
module MediaFileSpec (spec) where import Test.Hspec import Data.ByteString.IsoBaseFileFormat.Box import Data.ByteString.IsoBaseFileFormat.MediaFile import Data.ByteString.IsoBaseFileFormat.ReExports import Data.ByteString.Lazy (unpack) spec :: Spec spec = describe "mediaBuilder" $ do describe "Empty BoxLayout" $ it "accepts valid box content types" $ let test = mediaBuilder (Proxy :: Proxy TestBrandEmpty) NoBoxes in printBuilderStdOut test describe "Single Box BoxLayout" $ it "accepts valid box content types" $ let test = mediaBuilder (Proxy :: Proxy TestBrandSingle) (singletonBox testBox1) in printBuilderStdOut test describe "Multiple nested Boxes BoxLayout" $ it "accepts valid box content types" $ let test = mediaBuilder (Proxy :: Proxy TestBrandNested) (singletonBox (testParentBox1 $: testBox1)) in printBuilderStdOut test printBuilderStdOut :: Builder -> IO () printBuilderStdOut b = putStrLn $ unlines $ (" "++) <$> lines (show (unpack (toLazyByteString b))) data TestBox1 instance IsBox TestBox1 where type BoxContent TestBox1 = () type instance BoxTypeSymbol TestBox1 = "tst1" testBox1 :: Box TestBox1 testBox1 = Box () data TestParentBox1 instance IsBox TestParentBox1 where type BoxContent TestParentBox1 = () type instance BoxTypeSymbol TestParentBox1 = "par1" testParentBox1 :: Boxes ts -> Box (ContainerBox TestParentBox1 ts) testParentBox1 = containerBox () data TestBrandEmpty instance IsMediaFileFormat TestBrandEmpty where type BoxLayout TestBrandEmpty = Boxes '[] data TestBrandSingle instance IsMediaFileFormat TestBrandSingle where type BoxLayout TestBrandSingle = Boxes '[OM_ TestBox1] data TestBrandNested instance IsMediaFileFormat TestBrandNested where type BoxLayout TestBrandNested = Boxes '[OM TestParentBox1 '[OM_ TestBox1]]
sheyll/isobmff-builder
spec/MediaFileSpec.hs
bsd-3-clause
2,012
0
17
465
490
252
238
-1
-1
{-# LANGUAGE InstanceSigs #-} {-# LANGUAGE FlexibleContexts #-} module Matrix.Traversable ( matrixTraverse ) where import Numeric.LinearAlgebra.HMatrix hiding (corr) import Data.Traversable import Util.Tuples -- Matrix cannot be an instance of Functor, so it cannot be an instance of traversable -- instead, convert to list and use the list traverse matrixTraverse :: (Element b, Element c) => (a -> b -> (a,c)) -> a -> Matrix b -> (a, Matrix c) matrixTraverse f a m = applySnd (fromLists) $ mapAccumL (mapAccumL f) a (toLists m)
eklinkhammer/neural-algorithms
src/Matrix/Traversable.hs
bsd-3-clause
543
0
10
96
139
78
61
10
1
module Glazier.React.Obj ( -- | constructor not exported -- Ref -- , strongRef -- , _strongRef -- , weakRef -- , _weakRef -- -- | constructor not exported -- , PlanRef -- , ModelVar -- | constructor not exported Obj -- , objPlan -- , modelVar -- , _modelVar -- , planRef -- , _planRef -- | constructor not exported , WeakObj , weakObj , _weakObj , fromWeakObj ) where import Glazier.React.Obj.Internal
louispan/glazier-react
src/Glazier/React/Obj.hs
bsd-3-clause
503
0
4
173
47
37
10
8
0
{-# LANGUAGE ImplicitParams #-} module Tct.Its.Strategies ( itsDeclarations , runtime , runtime' , runtimeDeclaration , module Tct.Its.Processors ) where import Tct.Core import qualified Tct.Core.Data as T import Tct.Its.Data.Selector import Tct.Its.Data.Problem import Tct.Its.Processors itsDeclarations :: [StrategyDeclaration Its Its] itsDeclarations = [ SD emptyDeclaration , SD farkasDeclaration , SD knowledgePropagationDeclaration , SD leafRulesDeclaration , SD pathAnalysisDeclaration , SD polyDeclaration , SD sizeboundsDeclaration , SD unreachableRulesDeclaration , SD unsatRulesDeclaration , SD runtimeDeclaration ] runtimeDeclaration :: T.Declaration ('[Argument 'Optional Bool, Argument 'Optional Bool] T.:-> ItsStrategy) runtimeDeclaration = strategy "runtime" (atarg, afarg) def where atarg = bool "useTransitionAbstraction" ["Wether predicate abstraction should be used."] `optional` False afarg = bool "useArgumentFilter" ["Wether argument filtering should be used."] `optional` False wellformed :: ItsStrategy wellformed = withProblem $ \prob -> when (not $ validate prob) (failing "Problem is not well-fomed.") runtime :: ItsStrategy runtime = T.deflFun runtimeDeclaration runtime' :: Bool -> Bool -> ItsStrategy runtime' = T.declFun runtimeDeclaration def :: Bool -> Bool -> ItsStrategy def useAT useAF = let ?maxChain = 2 :: Int ?nInChain = 5 :: Int ?nOutChain = 10 :: Int ?useAT = useAT ?useAF = useAF in wellformed .>>> try simpl1 .>>> try (when ?useAT (withProblem (transitionAbstraction . monotonicityPredicates))) .>>> try (when ?useAF (withProblem (argumentFilter . unusedFilter))) -- .>>> try pathAnalysis -- FIXME: update rvgraph error; just re-compute it .>>> try st .>>> withChaining st .>>> empty where st = try simpl2 .>>> te (withKnowledgePropagation farkas) -- .>>> te (try sizebounds .>>> withKnowledgePropagation farkas) .>>> te (try sizebounds .>>> usingTimebounds) usingTimebounds = withProblem $ \prob -> es $ fastestN 8 [ withKnowledgePropagation (timebounds c) | c <- timeboundsCandidates (selNextSCC prob) ] -- FIXME: boundtrivialsccs is not always 1 in the recursive case; take max label simpl1 :: ItsStrategy simpl1 = force $ try boundTrivialSCCs .>>> try unsatRules simpl2 :: ItsStrategy simpl2 = force $ try unsatPaths .>>> try unreachableRules .>>> try leafRules -- withArgumentFilter :: ItsStrategy -> ItsStrategy -- withArgumentFilter st = st .>>> try af -- where af = withProblem (argumentFilter . unusedFilter) withKnowledgePropagation :: ItsStrategy -> ItsStrategy withKnowledgePropagation st = st .>>> try knowledgePropagation innerChaining :: ItsStrategy innerChaining = withProblem $ \prob -> chaining . chainingCandidates k prob $ selNextSCC prob where k prob r = maxCost 2 prob r && maxOuts 3 prob r outerChaining :: ItsStrategy outerChaining = withProblem $ \prob -> chaining . chainingCandidates k prob $ selToNextSCC prob where k prob r = isUnknown prob r && maxCost 20 prob r && maxOuts 4 prob r withChaining :: (?maxChain :: Int, ?nInChain :: Int, ?nOutChain :: Int) => ItsStrategy -> ItsStrategy -- withChaining st = es $ try st .>>> (exhaustivelyN ?nInChain innerChaining <|> exhaustivelyN ?nOutChain outerChaining) withChaining st = exhaustivelyN ?maxChain $ try st .>>> (exhaustivelyN ?nInChain innerChaining .<|> exhaustivelyN ?nOutChain outerChaining) .>>> try empty
ComputationWithBoundedResources/tct-its
src/Tct/Its/Strategies.hs
bsd-3-clause
3,551
0
18
674
855
452
403
-1
-1
module Saves ( saveName , saveGame , loadGame ) where import Text.Read import Game saveName :: FilePath saveName = "7drl2017.sav" saveGame :: FilePath -> GameState -> IO () saveGame fp gs = writeFile fp (show gs) loadGame :: FilePath -> IO (Maybe GameState) loadGame fp = readMaybe <$> readFile fp
dagit/7drl2017
src/Saves.hs
bsd-3-clause
303
0
8
55
105
56
49
12
1
{-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : -- Copyright : (c) 2012 Boyun Tang -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : ghc -- -- -- ----------------------------------------------------------------------------- module Bio.Seq.EMBL ( extractEMBL , extractUnparseable , readEMBL , splitEMBL , module Bio.Seq.EMBL.Parser , module Bio.Seq.EMBL.Types -- * References -- $references ) where import Bio.Seq.EMBL.Parser import Bio.Seq.EMBL.Types import Data.Attoparsec.ByteString.Lazy import qualified Data.ByteString.Lazy.Char8 as B8 import qualified Data.ByteString.Lazy.Builder as B8 import Data.ByteString.Lazy (ByteString) import Data.Monoid import Data.Maybe splitEMBL :: ByteString -> [ByteString] splitEMBL str = if B8.null str then [] else go $ B8.findIndices (== '/') str where go [] = [] go (_:[]) = [] go (i:j:is) = if j - i == 1 then if i > 1 && B8.index str (i-1) == '\n' then let (lhs,rhs) = B8.splitAt (j+1) str remain = case B8.dropWhile (/= '\n') rhs of "" -> "" re -> B8.tail re in lhs : splitEMBL remain else go is else go (j:is) -- | Extract `EMBL` record from a lazy `ByteString` , unparseable parts will be -- thrown away. extractEMBL :: ByteString -> [EMBL] extractEMBL = catMaybes . map (maybeResult . parse parseEMBL) . splitEMBL readEMBL :: FilePath -> IO [EMBL] readEMBL fp = B8.readFile fp >>= return . extractEMBL -- | Lazily extract unparseable record from a large embl file, -- mainly for debugging purposes. extractUnparseable :: ByteString -> ByteString extractUnparseable = B8.toLazyByteString . foldr (\a b -> B8.lazyByteString a `mappend` B8.byteString "\n" `mappend` b) mempty . filter (isNothing . maybeResult . parse parseEMBL) . splitEMBL -- $references -- -- * The European Nucleotide Archive/EMBL-Bank User Manual: -- <ftp://ftp.ebi.ac.uk/pub/databases/embl/release/usrman.txt> -- -- * The DDBJ\/EMBL\/GenBank Feature Table Definition: -- <ftp://ftp.ebi.ac.uk/pub/databases/embl/doc/FT_current.txt> -- -- * European Nucleotide Archive: Webin - Features & Qualifiers: -- <http://www.ebi.ac.uk/ena/WebFeat/>
tangboyun/bio-seq-embl
src/Bio/Seq/EMBL.hs
bsd-3-clause
2,551
0
17
655
525
305
220
50
7
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE FlexibleContexts #-} {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-} module Math.Multicategory ( Forest(..) , C(..) , Multicategory(..) -- * Utilities , inputs, outputs , splitForest ) where import Data.Constraint import Data.Proxy import Data.Type.Equality import Math.Category import Math.Functor import Math.Monad import Math.Polycategory.PRO import Math.Rec import Prelude (($)) import qualified Prelude -------------------------------------------------------------------------------- -- * Forests -------------------------------------------------------------------------------- data Forest :: ([i] -> i -> *) -> [i] -> [i] -> * where Nil :: Forest f '[] '[] (:-) :: f k o -> Forest f m n -> Forest f (k ++ m) (o ': n) inputs :: forall f g is os. (forall as b. f as b -> Rec g as) -> Forest f is os -> Rec g is inputs _ Nil = RNil inputs f (a :- as) = f a `appendRec` inputs f as outputs :: (forall as b. f as b -> p b) -> Forest f is os -> Rec p os outputs _ Nil = RNil outputs f (a :- as) = f a :& outputs f as splitForest :: forall f g ds is js os r. Rec f is -> Forest g js os -> Forest g ds (is ++ js) -> (forall bs cs. (ds ~ (bs ++ cs)) => Forest g bs is -> Forest g cs js -> r) -> r splitForest RNil _ as k = k Nil as splitForest (_ :& is) bs ((j :: g as o) :- js) k = splitForest is bs js $ \ (l :: Forest g bs as1) (r :: Forest g cs js) -> case appendAssocAxiom (Proxy :: Proxy as) (Proxy :: Proxy bs) (Proxy :: Proxy cs) of Dict -> k (j :- l) r instance Multicategory f => Functor (Forest f) where type Dom (Forest f) = Op (Forest f) type Cod (Forest f) = Nat (Forest f) (->) fmap (Op f) = Nat (. f) instance (Multicategory f) => Functor (Forest f is) where type Dom (Forest f is) = Forest f type Cod (Forest f is) = (->) fmap = (.) -------------------------------------------------------------------------------- -- * Forgetting the multicategory structure -------------------------------------------------------------------------------- data C (f :: [i] -> i -> *) (a :: i) (b :: i) where C :: { runC :: f '[a] b } -> C f a b instance Multicategory f => Category (C f) where type Ob (C f) = Mob f id = C ident C f . C g = C (compose f (g :- Nil)) source (C f) = case sources f of Dict1 :& RNil -> Dict target (C f) = case mtarget f of Dict1 -> Dict -------------------------------------------------------------------------------- -- * Multicategories -------------------------------------------------------------------------------- class ( Category (MDom f) , Bifunctor f , Dom f ~ Op (Forest f) , Dom2 f ~ C f , Cod2 f ~ (->) ) => Multicategory (f :: [i] -> i -> *) where type MDom f :: i -> i -> * ident :: Mob f a => f '[a] a sources :: f as b -> Rec (Dict1 (Mob f)) as mtarget :: f as b -> Dict1 (Mob f) b type Mob f = Ob (MDom f) compose :: Multicategory f => f bs c -> Forest f as bs -> f as c compose f es = case mtarget f of Dict1 -> lmap es f instance Multicategory f => Category (Forest f) where type Ob (Forest f) = All (Mob f) id = go proofs where go :: Rec (Dict1 (Mob f)) is -> Forest f is is go (Dict1 :& as) = ident :- go as go RNil = Nil Nil . Nil = Nil (b :- bs) . as = splitForest (sources b) bs as $ \es fs -> compose b es :- (bs . fs) source = reproof . inputs sources target = reproof . outputs mtarget instance Multicategory f => PRO (Forest f) where pro Nil rs = rs pro (l :- ls) rs = case appendAssocAxiom (sources l) (go (source ls)) (go (source rs)) of Dict -> l :- pro ls rs where go :: Dict (All p as) -> Rec (Dict1 p) as go Dict = proofs data IM :: ([k] -> k -> *) -> (k -> *) -> k -> * where IM :: f is o -> Rec a is -> IM f a o -- A subcategory of (:~:) satisfying a constraint `p` data Dat p i j where Dat :: p i => Dat p i i dum :: Dat p i j -> i :~: j dum Dat = Refl instance Category (Dat p) where type Ob (Dat p) = p id = Dat Dat . Dat = Dat source Dat{} = Dict target Dat{} = Dict instance Functor (Dat p) where type Dom (Dat p) = Op (Dat p) type Cod (Dat p) = Nat (Dat p) (->) instance Functor (Dat p a) where type Dom (Dat p a) = Dat p type Cod (Dat p a) = (->) --instance Functor IM where -- type Dom IM = Nat (:~:) (Nat (:~:) (->)) -- type Cod IM = Nat (Nat (:~:) (->)) (Nat (:~:) (->)) instance Multicategory f => Functor (IM f) where type Dom (IM f) = Nat (Dat (Mob f)) (->) type Cod (IM f) = Nat (Dat (Mob f)) (->) fmap f = Nat $ \(IM s d) -> IM s (runNat (fmap (go f)) d) where go :: Nat (Dat (Mob f)) (->) i j -> Nat (:~:) (->) i j go = Prelude.undefined -- TODO -- what we really need is a restricted 'Rec' which can only be mapped with a Dat (Mob f) rather than (:~:) instance Multicategory f => Functor (IM f a) where type Dom (IM f a) = Dat (Mob f) type Cod (IM f a) = (->) fmap Dat a = a instance Multicategory f => Monad (IM f) where return = Nat $ \a -> IM ident (a :& RNil)
ekmett/categories
src/Math/Multicategory.hs
bsd-3-clause
5,304
0
16
1,272
2,321
1,215
1,106
121
1
module Main where import Control.Applicative import Control.Monad import Data.Attoparsec.Text.Lazy import Data.Counter (Counter) import Data.Csv (ToRecord, Record) import Data.Monoid import Data.Text (Text) import Data.Vector (Vector) import System.Environment (getArgs) import System.Exit (exitFailure) import Text.Read import qualified Data.ByteString.Lazy.Char8 as ByteString import qualified Data.Counter as Counter import qualified Data.Csv as CSV import qualified Data.Map as Map import qualified Data.Text as Text import qualified Data.Text.Lazy.IO as Text import qualified Data.Vector as Vector import Data.ByteString.Lazy.Char8 (ByteString) data Calc = Neural NeuralCalc | Naive NaiveCalc deriving (Show, Read, Eq) instance ToRecord Calc where toRecord (Neural c) = CSV.toRecord c toRecord (Naive c) = CSV.toRecord c data NeuralCalc = NeuralCalc [Int] Int Double Double (Counter Res) deriving (Show, Read, Eq) instance ToRecord NeuralCalc where toRecord (NeuralCalc ls t ct tt c) = CSV.record [ "neural" , CSV.toField $ show ls , CSV.toField t , CSV.toField ct , CSV.toField tt ] <> resToRecord c resToRecord :: Counter Res -> Record resToRecord c = CSV.record $ map (\x -> CSV.toField $ Counter.lookup x c) [TruePositive .. UnknownNegative] <> [CSV.toField $ Counter.total c] data NaiveCalc = NaiveCalc Double Double (Counter Res) deriving (Show, Read, Eq) instance ToRecord NaiveCalc where toRecord (NaiveCalc ct tt c) = CSV.record [ "naive" , mempty , mempty , CSV.toField ct , CSV.toField tt ] <> resToRecord c data Res = TruePositive | TrueNegative | FalsePositive | FalseNegative | UnknownPositive | UnknownNegative deriving (Show, Read, Eq, Ord, Enum) boolsToRes :: (Bool, Maybe Bool) -> Res boolsToRes x = case x of (True, Just True) -> TruePositive (False, Just False) -> TrueNegative (False, Just True) -> FalsePositive (True, Just False) -> FalseNegative (True, Nothing) -> UnknownPositive (False, Nothing) -> UnknownNegative vectToRes :: (Vector Double, Vector Double) -> Res vectToRes (x, y) = case (Vector.toList x, Vector.toList y) of ([1], [1]) -> TruePositive ([-1], [-1]) -> TrueNegative ([-1], [1]) -> FalsePositive ([1], [-1]) -> FalseNegative ([1], [0]) -> UnknownPositive ([-1], [0]) -> UnknownNegative _ -> error "invalid result" main :: IO () main = getArgs >>= \case [filename] -> go filename >>= ByteString.putStrLn x -> do putStrLn "Invalid arguments:" print x exitFailure go :: FilePath -> IO ByteString go filename = do file <- Text.readFile filename case eitherResult $ parse resultFile file of Left err -> do print err exitFailure Right xs -> return $ CSV.encode xs resultFile :: Parser [Calc] resultFile = many ((Neural <$> neuralCalc) <|> (Naive <$> naiveCalc)) neuralCalc :: Parser NeuralCalc neuralCalc = do layers <- label "layers" $ ss $ lineRemainder >>= readParser train <- label "train" $ ss decimal label "network" $ skipMany $ (("Network" <|> "[" <|> ",") *> ss lineRemainder) ct <- label "ct" $ ss timingLine results <- label "results" $ ss lineRemainder >>= readParser ft <- label "ft" $ ss timingLine return $ NeuralCalc layers train ct ft (mapKey vectToRes results) lineRemainder :: Parser Text lineRemainder = ss $ takeTill isEndOfLine label :: String -> Parser a -> Parser a label s x = x <?> s mapKey :: (Ord a, Ord b) => (a -> b) -> Counter a -> Counter b mapKey f (Counter.toMap -> m) = Counter.fromMap $ Map.mapKeys f m naiveCalc :: Parser NaiveCalc naiveCalc = do void $ ss $ lineRemainder ct <- ss timingLine results <- ss $ lineRemainder >>= readParser ft <- ss timingLine return $ NaiveCalc ct ft (mapKey boolsToRes results) readParser :: Read a => Text -> Parser a readParser x = case readMaybe (Text.unpack x) of Just y -> return y Nothing -> fail "read failed" timingLine :: Parser Double timingLine = double <* "s" ss :: Parser a -> Parser a ss x = x <* skipSpace
intolerable/project-utilities
parse-output/Main.hs
bsd-3-clause
4,082
0
12
848
1,543
807
736
-1
-1
----------------------------------------------------------------------------- -- | -- Module : Data.Array.Base -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : non-portable -- -- Basis for IArray and MArray. Not intended for external consumption; -- use IArray or MArray instead. -- ----------------------------------------------------------------------------- -- #hide module Data.Array.Base where import Prelude import Data.Ix ( Ix, range, index, rangeSize ) import Data.Int import Data.Word import Foreign.Ptr import Foreign.StablePtr import Data.Bits import Foreign.Storable import qualified Hugs.Array as Arr import qualified Hugs.ST as ArrST import Hugs.Array ( unsafeIndex ) import Hugs.ST ( STArray, ST(..), runST ) import Hugs.ByteArray import Data.Typeable ----------------------------------------------------------------------------- -- Class of immutable arrays -- | Class of array types with bounds class HasBounds a where -- | Extracts the bounds of an array bounds :: Ix i => a i e -> (i,i) {- | Class of immutable array types. An array type has the form @(a i e)@ where @a@ is the array type constructor (kind @* -> * -> *@), @i@ is the index type (a member of the class 'Ix'), and @e@ is the element type. The @IArray@ class is parameterised over both @a@ and @e@, so that instances specialised to certain element types can be defined. -} class HasBounds a => IArray a e where unsafeArray :: Ix i => (i,i) -> [(Int, e)] -> a i e unsafeAt :: Ix i => a i e -> Int -> e unsafeReplace :: Ix i => a i e -> [(Int, e)] -> a i e unsafeAccum :: Ix i => (e -> e' -> e) -> a i e -> [(Int, e')] -> a i e unsafeAccumArray :: Ix i => (e -> e' -> e) -> e -> (i,i) -> [(Int, e')] -> a i e unsafeReplace arr ies = runST (unsafeReplaceST arr ies >>= unsafeFreeze) unsafeAccum f arr ies = runST (unsafeAccumST f arr ies >>= unsafeFreeze) unsafeAccumArray f e lu ies = runST (unsafeAccumArrayST f e lu ies >>= unsafeFreeze) {-# INLINE unsafeReplaceST #-} unsafeReplaceST :: (IArray a e, Ix i) => a i e -> [(Int, e)] -> ST s (STArray s i e) unsafeReplaceST arr ies = do marr <- thaw arr sequence_ [unsafeWrite marr i e | (i, e) <- ies] return marr {-# INLINE unsafeAccumST #-} unsafeAccumST :: (IArray a e, Ix i) => (e -> e' -> e) -> a i e -> [(Int, e')] -> ST s (STArray s i e) unsafeAccumST f arr ies = do marr <- thaw arr sequence_ [do old <- unsafeRead marr i unsafeWrite marr i (f old new) | (i, new) <- ies] return marr {-# INLINE unsafeAccumArrayST #-} unsafeAccumArrayST :: Ix i => (e -> e' -> e) -> e -> (i,i) -> [(Int, e')] -> ST s (STArray s i e) unsafeAccumArrayST f e (l,u) ies = do marr <- newArray (l,u) e sequence_ [do old <- unsafeRead marr i unsafeWrite marr i (f old new) | (i, new) <- ies] return marr {-# INLINE array #-} {-| Constructs an immutable array from a pair of bounds and a list of initial associations. The bounds are specified as a pair of the lowest and highest bounds in the array respectively. For example, a one-origin vector of length 10 has bounds (1,10), and a one-origin 10 by 10 matrix has bounds ((1,1),(10,10)). An association is a pair of the form @(i,x)@, which defines the value of the array at index @i@ to be @x@. The array is undefined if any index in the list is out of bounds. If any two associations in the list have the same index, the value at that index is undefined. Because the indices must be checked for these errors, 'array' is strict in the bounds argument and in the indices of the association list. Whether @array@ is strict or non-strict in the elements depends on the array type: 'Data.Array.Array' is a non-strict array type, but all of the 'Data.Array.Unboxed.UArray' arrays are strict. Thus in a non-strict array, recurrences such as the following are possible: > a = array (1,100) ((1,1) : [(i, i * a!(i-1)) | i \<- [2..100]]) Not every index within the bounds of the array need appear in the association list, but the values associated with indices that do not appear will be undefined. If, in any dimension, the lower bound is greater than the upper bound, then the array is legal, but empty. Indexing an empty array always gives an array-bounds error, but 'bounds' still yields the bounds with which the array was constructed. -} array :: (IArray a e, Ix i) => (i,i) -- ^ bounds of the array: (lowest,highest) -> [(i, e)] -- ^ list of associations -> a i e array (l,u) ies = unsafeArray (l,u) [(index (l,u) i, e) | (i, e) <- ies] -- Since unsafeFreeze is not guaranteed to be only a cast, we will -- use unsafeArray and zip instead of a specialized loop to implement -- listArray, unlike Array.listArray, even though it generates some -- unnecessary heap allocation. Will use the loop only when we have -- fast unsafeFreeze, namely for Array and UArray (well, they cover -- almost all cases). {-# INLINE listArray #-} -- | Constructs an immutable array from a list of initial elements. -- The list gives the elements of the array in ascending order -- beginning with the lowest index. listArray :: (IArray a e, Ix i) => (i,i) -> [e] -> a i e listArray (l,u) es = unsafeArray (l,u) (zip [0 .. rangeSize (l,u) - 1] es) {-# INLINE listArrayST #-} listArrayST :: Ix i => (i,i) -> [e] -> ST s (STArray s i e) listArrayST (l,u) es = do marr <- newArray_ (l,u) let n = rangeSize (l,u) let fillFromList i xs | i == n = return () | otherwise = case xs of [] -> return () y:ys -> unsafeWrite marr i y >> fillFromList (i+1) ys fillFromList 0 es return marr {-# RULES "listArray/Array" listArray = \lu es -> runST (listArrayST lu es >>= ArrST.unsafeFreezeSTArray) #-} {-# INLINE listUArrayST #-} listUArrayST :: (MArray (STUArray s) e (ST s), Ix i) => (i,i) -> [e] -> ST s (STUArray s i e) listUArrayST (l,u) es = do marr <- newArray_ (l,u) let n = rangeSize (l,u) let fillFromList i xs | i == n = return () | otherwise = case xs of [] -> return () y:ys -> unsafeWrite marr i y >> fillFromList (i+1) ys fillFromList 0 es return marr -- I don't know how to write a single rule for listUArrayST, because -- the type looks like constrained over 's', which runST doesn't -- like. In fact all MArray (STUArray s) instances are polymorphic -- wrt. 's', but runST can't know that. -- I would like to write a rule for listUArrayST (or listArray or -- whatever) applied to unpackCString#. Unfortunately unpackCString# -- calls seem to be floated out, then floated back into the middle -- of listUArrayST, so I was not able to do this. {-# RULES "listArray/UArray/Bool" listArray = \lu (es :: [Bool]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Char" listArray = \lu (es :: [Char]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Int" listArray = \lu (es :: [Int]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Word" listArray = \lu (es :: [Word]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Ptr" listArray = \lu (es :: [Ptr a]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/FunPtr" listArray = \lu (es :: [FunPtr a]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Float" listArray = \lu (es :: [Float]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Double" listArray = \lu (es :: [Double]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/StablePtr" listArray = \lu (es :: [StablePtr a]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Int8" listArray = \lu (es :: [Int8]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Int16" listArray = \lu (es :: [Int16]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Int32" listArray = \lu (es :: [Int32]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Int64" listArray = \lu (es :: [Int64]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Word8" listArray = \lu (es :: [Word8]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Word16" listArray = \lu (es :: [Word16]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Word32" listArray = \lu (es :: [Word32]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) "listArray/UArray/Word64" listArray = \lu (es :: [Word64]) -> runST (listUArrayST lu es >>= unsafeFreezeSTUArray) #-} {-# INLINE (!) #-} -- | Returns the element of an immutable array at the specified index. (!) :: (IArray a e, Ix i) => a i e -> i -> e arr ! i = case bounds arr of (l,u) -> unsafeAt arr (index (l,u) i) {-# INLINE indices #-} -- | Returns a list of all the valid indices in an array. indices :: (HasBounds a, Ix i) => a i e -> [i] indices arr = case bounds arr of (l,u) -> range (l,u) {-# INLINE elems #-} -- | Returns a list of all the elements of an array, in the same order -- as their indices. elems :: (IArray a e, Ix i) => a i e -> [e] elems arr = case bounds arr of (l,u) -> [unsafeAt arr i | i <- [0 .. rangeSize (l,u) - 1]] {-# INLINE assocs #-} -- | Returns the contents of an array as a list of associations. assocs :: (IArray a e, Ix i) => a i e -> [(i, e)] assocs arr = case bounds arr of (l,u) -> [(i, unsafeAt arr (unsafeIndex (l,u) i)) | i <- range (l,u)] {-# INLINE accumArray #-} {-| Constructs an immutable array from a list of associations. Unlike 'array', the same index is allowed to occur multiple times in the list of associations; an /accumulating function/ is used to combine the values of elements with the same index. For example, given a list of values of some index type, hist produces a histogram of the number of occurrences of each index within a specified range: > hist :: (Ix a, Num b) => (a,a) -> [a] -> Array a b > hist bnds is = accumArray (+) 0 bnds [(i, 1) | i\<-is, inRange bnds i] -} accumArray :: (IArray a e, Ix i) => (e -> e' -> e) -- ^ An accumulating function -> e -- ^ A default element -> (i,i) -- ^ The bounds of the array -> [(i, e')] -- ^ List of associations -> a i e -- ^ Returns: the array accumArray f init (l,u) ies = unsafeAccumArray f init (l,u) [(index (l,u) i, e) | (i, e) <- ies] {-# INLINE (//) #-} {-| Takes an array and a list of pairs and returns an array identical to the left argument except that it has been updated by the associations in the right argument. (As with the array function, the indices in the association list must be unique for the updated elements to be defined.) For example, if m is a 1-origin, n by n matrix, then @m\/\/[((i,i), 0) | i \<- [1..n]]@ is the same matrix, except with the diagonal zeroed. For most array types, this operation is O(/n/) where /n/ is the size of the array. However, the 'Data.Array.Diff.DiffArray' type provides this operation with complexity linear in the number of updates. -} (//) :: (IArray a e, Ix i) => a i e -> [(i, e)] -> a i e arr // ies = case bounds arr of (l,u) -> unsafeReplace arr [(index (l,u) i, e) | (i, e) <- ies] {-# INLINE accum #-} {-| @accum f@ takes an array and an association list and accumulates pairs from the list into the array with the accumulating function @f@. Thus 'accumArray' can be defined using 'accum': > accumArray f z b = accum f (array b [(i, z) | i \<- range b]) -} accum :: (IArray a e, Ix i) => (e -> e' -> e) -> a i e -> [(i, e')] -> a i e accum f arr ies = case bounds arr of (l,u) -> unsafeAccum f arr [(index (l,u) i, e) | (i, e) <- ies] {-# INLINE amap #-} -- | Returns a new array derived from the original array by applying a -- function to each of the elements. amap :: (IArray a e', IArray a e, Ix i) => (e' -> e) -> a i e' -> a i e amap f arr = case bounds arr of (l,u) -> unsafeArray (l,u) [(i, f (unsafeAt arr i)) | i <- [0 .. rangeSize (l,u) - 1]] {-# INLINE ixmap #-} -- | Returns a new array derived from the original array by applying a -- function to each of the indices. ixmap :: (IArray a e, Ix i, Ix j) => (i,i) -> (i -> j) -> a j e -> a i e ixmap (l,u) f arr = unsafeArray (l,u) [(unsafeIndex (l,u) i, arr ! f i) | i <- range (l,u)] ----------------------------------------------------------------------------- -- Normal polymorphic arrays instance HasBounds Arr.Array where {-# INLINE bounds #-} bounds = Arr.bounds instance IArray Arr.Array e where {-# INLINE unsafeArray #-} unsafeArray = Arr.unsafeArray {-# INLINE unsafeAt #-} unsafeAt = Arr.unsafeAt {-# INLINE unsafeReplace #-} unsafeReplace = Arr.unsafeReplace {-# INLINE unsafeAccum #-} unsafeAccum = Arr.unsafeAccum {-# INLINE unsafeAccumArray #-} unsafeAccumArray = Arr.unsafeAccumArray ----------------------------------------------------------------------------- -- Flat unboxed arrays -- | Arrays with unboxed elements. Instances of 'IArray' are provided -- for 'UArray' with certain element types ('Int', 'Float', 'Char', -- etc.; see the 'UArray' class for a full list). -- -- A 'UArray' will generally be more efficient (in terms of both time -- and space) than the equivalent 'Data.Array.Array' with the same -- element type. However, 'UArray' is strict in its elements - so -- don\'t use 'UArray' if you require the non-strictness that -- 'Data.Array.Array' provides. -- -- Because the @IArray@ interface provides operations overloaded on -- the type of the array, it should be possible to just change the -- array type being used by a program from say @Array@ to @UArray@ to -- get the benefits of unboxed arrays (don\'t forget to import -- "Data.Array.Unboxed" instead of "Data.Array"). -- data UArray i e = UArray !i !i !ByteArray uArrayTc = mkTyCon "UArray"; instance (Typeable a, Typeable b) => Typeable (UArray a b) where { typeOf x = mkAppTy uArrayTc [typeOf ((undefined :: UArray a b -> a) x), typeOf ((undefined :: UArray a b -> b) x)] } instance HasBounds UArray where {-# INLINE bounds #-} bounds (UArray l u _) = (l,u) {-# INLINE unsafeArrayUArray #-} unsafeArrayUArray :: (MArray (STUArray s) e (ST s), Ix i) => (i,i) -> [(Int, e)] -> e -> ST s (UArray i e) unsafeArrayUArray (l,u) ies default_elem = do marr <- newArray (l,u) default_elem sequence_ [unsafeWrite marr i e | (i, e) <- ies] unsafeFreezeSTUArray marr unsafeFreezeSTUArray :: STUArray s i e -> ST s (UArray i e) unsafeFreezeSTUArray (STUArray l u marr) = do arr <- unsafeFreezeMutableByteArray marr return (UArray l u arr) {-# INLINE unsafeReplaceUArray #-} unsafeReplaceUArray :: (MArray (STUArray s) e (ST s), Ix i) => UArray i e -> [(Int, e)] -> ST s (UArray i e) unsafeReplaceUArray arr ies = do marr <- thawSTUArray arr sequence_ [unsafeWrite marr i e | (i, e) <- ies] unsafeFreezeSTUArray marr {-# INLINE unsafeAccumUArray #-} unsafeAccumUArray :: (MArray (STUArray s) e (ST s), Ix i) => (e -> e' -> e) -> UArray i e -> [(Int, e')] -> ST s (UArray i e) unsafeAccumUArray f arr ies = do marr <- thawSTUArray arr sequence_ [do old <- unsafeRead marr i unsafeWrite marr i (f old new) | (i, new) <- ies] unsafeFreezeSTUArray marr {-# INLINE unsafeAccumArrayUArray #-} unsafeAccumArrayUArray :: (MArray (STUArray s) e (ST s), Ix i) => (e -> e' -> e) -> e -> (i,i) -> [(Int, e')] -> ST s (UArray i e) unsafeAccumArrayUArray f init (l,u) ies = do marr <- newArray (l,u) init sequence_ [do old <- unsafeRead marr i unsafeWrite marr i (f old new) | (i, new) <- ies] unsafeFreezeSTUArray marr {-# INLINE eqUArray #-} eqUArray :: (IArray UArray e, Ix i, Eq e) => UArray i e -> UArray i e -> Bool eqUArray arr1@(UArray l1 u1 _) arr2@(UArray l2 u2 _) = if rangeSize (l1,u1) == 0 then rangeSize (l2,u2) == 0 else l1 == l2 && u1 == u2 && and [unsafeAt arr1 i == unsafeAt arr2 i | i <- [0 .. rangeSize (l1,u1) - 1]] {-# INLINE cmpUArray #-} cmpUArray :: (IArray UArray e, Ix i, Ord e) => UArray i e -> UArray i e -> Ordering cmpUArray arr1 arr2 = compare (assocs arr1) (assocs arr2) {-# INLINE cmpIntUArray #-} cmpIntUArray :: (IArray UArray e, Ord e) => UArray Int e -> UArray Int e -> Ordering cmpIntUArray arr1@(UArray l1 u1 _) arr2@(UArray l2 u2 _) = if rangeSize (l1,u1) == 0 then if rangeSize (l2,u2) == 0 then EQ else LT else if rangeSize (l2,u2) == 0 then GT else case compare l1 l2 of EQ -> foldr cmp (compare u1 u2) [0 .. rangeSize (l1, min u1 u2) - 1] other -> other where cmp i rest = case compare (unsafeAt arr1 i) (unsafeAt arr2 i) of EQ -> rest other -> other {-# RULES "cmpUArray/Int" cmpUArray = cmpIntUArray #-} ----------------------------------------------------------------------------- -- Showing IArrays {-# SPECIALISE showsIArray :: (IArray UArray e, Ix i, Show i, Show e) => Int -> UArray i e -> ShowS #-} showsIArray :: (IArray a e, Ix i, Show i, Show e) => Int -> a i e -> ShowS showsIArray p a = showParen (p > 9) $ showString "array " . shows (bounds a) . showChar ' ' . shows (assocs a) ----------------------------------------------------------------------------- -- Flat unboxed arrays: instances unsafeAtBArray :: Storable e => UArray i e -> Int -> e unsafeAtBArray (UArray _ _ arr) = readByteArray arr instance IArray UArray Bool where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies False) unsafeAt (UArray _ _ arr) i = testBit (readByteArray arr (bOOL_INDEX i)::BitSet) (bOOL_SUBINDEX i) {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Char where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies '\0') {-# INLINE unsafeAt #-} unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Int where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray (Ptr a) where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies nullPtr) {-# INLINE unsafeAt #-} unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray (FunPtr a) where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies nullFunPtr) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Float where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Double where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray (StablePtr a) where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies nullStablePtr) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) -- bogus StablePtr value for initialising a UArray of StablePtr. nullStablePtr = castPtrToStablePtr nullPtr instance IArray UArray Int8 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Int16 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Int32 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Int64 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Word8 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Word16 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Word32 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance IArray UArray Word64 where {-# INLINE unsafeArray #-} unsafeArray lu ies = runST (unsafeArrayUArray lu ies 0) unsafeAt = unsafeAtBArray {-# INLINE unsafeReplace #-} unsafeReplace arr ies = runST (unsafeReplaceUArray arr ies) {-# INLINE unsafeAccum #-} unsafeAccum f arr ies = runST (unsafeAccumUArray f arr ies) {-# INLINE unsafeAccumArray #-} unsafeAccumArray f init lu ies = runST (unsafeAccumArrayUArray f init lu ies) instance Ix ix => Eq (UArray ix Bool) where (==) = eqUArray instance Ix ix => Eq (UArray ix Char) where (==) = eqUArray instance Ix ix => Eq (UArray ix Int) where (==) = eqUArray instance Ix ix => Eq (UArray ix (Ptr a)) where (==) = eqUArray instance Ix ix => Eq (UArray ix (FunPtr a)) where (==) = eqUArray instance Ix ix => Eq (UArray ix Float) where (==) = eqUArray instance Ix ix => Eq (UArray ix Double) where (==) = eqUArray instance Ix ix => Eq (UArray ix Int8) where (==) = eqUArray instance Ix ix => Eq (UArray ix Int16) where (==) = eqUArray instance Ix ix => Eq (UArray ix Int32) where (==) = eqUArray instance Ix ix => Eq (UArray ix Int64) where (==) = eqUArray instance Ix ix => Eq (UArray ix Word8) where (==) = eqUArray instance Ix ix => Eq (UArray ix Word16) where (==) = eqUArray instance Ix ix => Eq (UArray ix Word32) where (==) = eqUArray instance Ix ix => Eq (UArray ix Word64) where (==) = eqUArray instance Ix ix => Ord (UArray ix Bool) where compare = cmpUArray instance Ix ix => Ord (UArray ix Char) where compare = cmpUArray instance Ix ix => Ord (UArray ix Int) where compare = cmpUArray instance Ix ix => Ord (UArray ix (Ptr a)) where compare = cmpUArray instance Ix ix => Ord (UArray ix (FunPtr a)) where compare = cmpUArray instance Ix ix => Ord (UArray ix Float) where compare = cmpUArray instance Ix ix => Ord (UArray ix Double) where compare = cmpUArray instance Ix ix => Ord (UArray ix Int8) where compare = cmpUArray instance Ix ix => Ord (UArray ix Int16) where compare = cmpUArray instance Ix ix => Ord (UArray ix Int32) where compare = cmpUArray instance Ix ix => Ord (UArray ix Int64) where compare = cmpUArray instance Ix ix => Ord (UArray ix Word8) where compare = cmpUArray instance Ix ix => Ord (UArray ix Word16) where compare = cmpUArray instance Ix ix => Ord (UArray ix Word32) where compare = cmpUArray instance Ix ix => Ord (UArray ix Word64) where compare = cmpUArray instance (Ix ix, Show ix) => Show (UArray ix Bool) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Char) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Int) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Float) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Double) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Int8) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Int16) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Int32) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Int64) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Word8) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Word16) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Word32) where showsPrec = showsIArray instance (Ix ix, Show ix) => Show (UArray ix Word64) where showsPrec = showsIArray ----------------------------------------------------------------------------- -- Mutable arrays {-# NOINLINE arrEleBottom #-} arrEleBottom :: a arrEleBottom = error "MArray: undefined array element" {-| Class of mutable array types. An array type has the form @(a i e)@ where @a@ is the array type constructor (kind @* -> * -> *@), @i@ is the index type (a member of the class 'Ix'), and @e@ is the element type. The @MArray@ class is parameterised over both @a@ and @e@ (so that instances specialised to certain element types can be defined, in the same way as for 'IArray'), and also over the type of the monad, @m@, in which the mutable array will be manipulated. -} class (HasBounds a, Monad m) => MArray a e m where -- | Builds a new array, with every element initialised to the supplied -- value. newArray :: Ix i => (i,i) -> e -> m (a i e) -- | Builds a new array, with every element initialised to undefined. newArray_ :: Ix i => (i,i) -> m (a i e) unsafeRead :: Ix i => a i e -> Int -> m e unsafeWrite :: Ix i => a i e -> Int -> e -> m () newArray (l,u) init = do marr <- newArray_ (l,u) sequence_ [unsafeWrite marr i init | i <- [0 .. rangeSize (l,u) - 1]] return marr newArray_ (l,u) = newArray (l,u) arrEleBottom -- newArray takes an initialiser which all elements of -- the newly created array are initialised to. newArray_ takes -- no initialiser, it is assumed that the array is initialised with -- "undefined" values. -- why not omit newArray_? Because in the unboxed array case we would -- like to omit the initialisation altogether if possible. We can't do -- this for boxed arrays, because the elements must all have valid values -- at all times in case of garbage collection. -- why not omit newArray? Because in the boxed case, we can omit the -- default initialisation with undefined values if we *do* know the -- initial value and it is constant for all elements. {-# INLINE newListArray #-} -- | Constructs a mutable array from a list of initial elements. -- The list gives the elements of the array in ascending order -- beginning with the lowest index. newListArray :: (MArray a e m, Ix i) => (i,i) -> [e] -> m (a i e) newListArray (l,u) es = do marr <- newArray_ (l,u) let n = rangeSize (l,u) let fillFromList i xs | i == n = return () | otherwise = case xs of [] -> return () y:ys -> unsafeWrite marr i y >> fillFromList (i+1) ys fillFromList 0 es return marr {-# INLINE readArray #-} -- | Read an element from a mutable array readArray :: (MArray a e m, Ix i) => a i e -> i -> m e readArray marr i = case bounds marr of (l,u) -> unsafeRead marr (index (l,u) i) {-# INLINE writeArray #-} -- | Write an element in a mutable array writeArray :: (MArray a e m, Ix i) => a i e -> i -> e -> m () writeArray marr i e = case bounds marr of (l,u) -> unsafeWrite marr (index (l,u) i) e {-# INLINE getElems #-} -- | Return a list of all the elements of a mutable array getElems :: (MArray a e m, Ix i) => a i e -> m [e] getElems marr = case bounds marr of (l,u) -> sequence [unsafeRead marr i | i <- [0 .. rangeSize (l,u) - 1]] {-# INLINE getAssocs #-} -- | Return a list of all the associations of a mutable array, in -- index order. getAssocs :: (MArray a e m, Ix i) => a i e -> m [(i, e)] getAssocs marr = case bounds marr of (l,u) -> sequence [do e <- unsafeRead marr (index (l,u) i); return (i,e) | i <- range (l,u)] {-# INLINE mapArray #-} -- | Constructs a new array derived from the original array by applying a -- function to each of the elements. mapArray :: (MArray a e' m, MArray a e m, Ix i) => (e' -> e) -> a i e' -> m (a i e) mapArray f marr = case bounds marr of (l,u) -> do marr' <- newArray_ (l,u) sequence_ [do e <- unsafeRead marr i unsafeWrite marr' i (f e) | i <- [0 .. rangeSize (l,u) - 1]] return marr' {-# INLINE mapIndices #-} -- | Constructs a new array derived from the original array by applying a -- function to each of the indices. mapIndices :: (MArray a e m, Ix i, Ix j) => (i,i) -> (i -> j) -> a j e -> m (a i e) mapIndices (l,u) f marr = do marr' <- newArray_ (l,u) sequence_ [do e <- readArray marr (f i) unsafeWrite marr' (unsafeIndex (l,u) i) e | i <- range (l,u)] return marr' ----------------------------------------------------------------------------- -- Polymorphic non-strict mutable arrays (ST monad) instance HasBounds (STArray s) where {-# INLINE bounds #-} bounds = ArrST.boundsSTArray instance MArray (STArray s) e (ST s) where {-# INLINE newArray #-} newArray = ArrST.newSTArray {-# INLINE unsafeRead #-} unsafeRead = ArrST.unsafeReadSTArray {-# INLINE unsafeWrite #-} unsafeWrite = ArrST.unsafeWriteSTArray ----------------------------------------------------------------------------- -- Typeable instance for STArray sTArrayTc :: TyCon sTArrayTc = mkTyCon "STArray" instance (Typeable a, Typeable b, Typeable c) => Typeable (STArray a b c) where typeOf a = mkAppTy sTArrayTc [typeOf ((undefined :: STArray a b c -> a) a), typeOf ((undefined :: STArray a b c -> b) a), typeOf ((undefined :: STArray a b c -> c) a)] ----------------------------------------------------------------------------- -- Flat unboxed mutable arrays (ST monad) -- | A mutable array with unboxed elements, that can be manipulated in -- the 'ST' monad. The type arguments are as follows: -- -- * @s@: the state variable argument for the 'ST' type -- -- * @i@: the index type of the array (should be an instance of @Ix@) -- -- * @e@: the element type of the array. Only certain element types -- are supported. -- -- An 'STUArray' will generally be more efficient (in terms of both time -- and space) than the equivalent boxed version ('STArray') with the same -- element type. However, 'STUArray' is strict in its elements - so -- don\'t use 'STUArray' if you require the non-strictness that -- 'STArray' provides. data STUArray s i a = STUArray !i !i !(MutableByteArray s) stUArrayTc = mkTyCon "STUArray"; instance (Typeable a, Typeable b, Typeable c) => Typeable (STUArray a b c) where { typeOf a = mkAppTy stUArrayTc [typeOf ((undefined :: STUArray a b c -> a) a), typeOf ((undefined :: STUArray a b c -> b) a), typeOf ((undefined :: STUArray a b c -> c) a)] } instance HasBounds (STUArray s) where {-# INLINE bounds #-} bounds (STUArray l u _) = (l,u) newMBArray_ :: (Ix i, Storable e) => (i,i) -> ST s (STUArray s i e) newMBArray_ = makeArray undefined where makeArray :: (Ix i, Storable e) => e -> (i,i) -> ST s (STUArray s i e) makeArray dummy (l,u) = do marr <- newMutableByteArray (rangeSize (l,u) * sizeOf dummy) return (STUArray l u marr) unsafeReadMBArray :: Storable e => STUArray s i e -> Int -> ST s e unsafeReadMBArray (STUArray _ _ marr) = readMutableByteArray marr unsafeWriteMBArray :: Storable e => STUArray s i e -> Int -> e -> ST s () unsafeWriteMBArray (STUArray _ _ marr) = writeMutableByteArray marr instance MArray (STUArray s) Bool (ST s) where newArray_ (l,u) = do marr <- newMutableByteArray (bOOL_SCALE (rangeSize (l,u))) return (STUArray l u marr) unsafeRead (STUArray _ _ marr) i = do let ix = bOOL_INDEX i bit = bOOL_SUBINDEX i w <- readMutableByteArray marr ix return (testBit (w::BitSet) bit) unsafeWrite (STUArray _ _ marr) i e = do let ix = bOOL_INDEX i bit = bOOL_SUBINDEX i w <- readMutableByteArray marr ix writeMutableByteArray marr ix (if e then setBit (w::BitSet) bit else clearBit w bit) instance MArray (STUArray s) Char (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Int (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) (Ptr a) (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) (FunPtr a) (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Float (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Double (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) (StablePtr a) (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Int8 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Int16 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Int32 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Int64 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Word8 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Word16 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Word32 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray instance MArray (STUArray s) Word64 (ST s) where newArray_ = newMBArray_ unsafeRead = unsafeReadMBArray unsafeWrite = unsafeWriteMBArray type BitSet = Word8 bitSetSize = bitSize (0::BitSet) bOOL_SCALE :: Int -> Int bOOL_SCALE n = (n + bitSetSize - 1) `div` bitSetSize bOOL_INDEX :: Int -> Int bOOL_INDEX i = i `div` bitSetSize bOOL_SUBINDEX :: Int -> Int bOOL_SUBINDEX i = i `mod` bitSetSize ----------------------------------------------------------------------------- -- Freezing -- | Converts a mutable array (any instance of 'MArray') to an -- immutable array (any instance of 'IArray') by taking a complete -- copy of it. freeze :: (Ix i, MArray a e m, IArray b e) => a i e -> m (b i e) freeze marr = case bounds marr of (l,u) -> do ies <- sequence [do e <- unsafeRead marr i; return (i,e) | i <- [0 .. rangeSize (l,u) - 1]] return (unsafeArray (l,u) ies) -- In-place conversion of mutable arrays to immutable ones places -- a proof obligation on the user: no other parts of your code can -- have a reference to the array at the point where you unsafely -- freeze it (and, subsequently mutate it, I suspect). {- | Converts an mutable array into an immutable array. The implementation may either simply cast the array from one type to the other without copying the array, or it may take a full copy of the array. Note that because the array is possibly not copied, any subsequent modifications made to the mutable version of the array may be shared with the immutable version. It is safe to use, therefore, if the mutable version is never modified after the freeze operation. The non-copying implementation is supported between certain pairs of array types only; one constraint is that the array types must have identical representations. In GHC, The following pairs of array types have a non-copying O(1) implementation of 'unsafeFreeze'. Because the optimised versions are enabled by specialisations, you will need to compile with optimisation (-O) to get them. * 'Data.Array.IO.IOUArray' -> 'Data.Array.Unboxed.UArray' * 'Data.Array.ST.STUArray' -> 'Data.Array.Unboxed.UArray' * 'Data.Array.IO.IOArray' -> 'Data.Array.Array' * 'Data.Array.ST.STArray' -> 'Data.Array.Array' -} {-# INLINE unsafeFreeze #-} unsafeFreeze :: (Ix i, MArray a e m, IArray b e) => a i e -> m (b i e) unsafeFreeze = freeze {-# RULES "unsafeFreeze/STArray" unsafeFreeze = ArrST.unsafeFreezeSTArray "unsafeFreeze/STUArray" unsafeFreeze = unsafeFreezeSTUArray #-} ----------------------------------------------------------------------------- -- Thawing -- | Converts an immutable array (any instance of 'IArray') into a -- mutable array (any instance of 'MArray') by taking a complete copy -- of it. thaw :: (Ix i, IArray a e, MArray b e m) => a i e -> m (b i e) thaw arr = case bounds arr of (l,u) -> do marr <- newArray_ (l,u) sequence_ [unsafeWrite marr i (unsafeAt arr i) | i <- [0 .. rangeSize (l,u) - 1]] return marr thawSTUArray :: Ix i => UArray i e -> ST s (STUArray s i e) thawSTUArray (UArray l u arr) = do marr <- thawByteArray arr return (STUArray l u marr) -- In-place conversion of immutable arrays to mutable ones places -- a proof obligation on the user: no other parts of your code can -- have a reference to the array at the point where you unsafely -- thaw it (and, subsequently mutate it, I suspect). {- | Converts an immutable array into a mutable array. The implementation may either simply cast the array from one type to the other without copying the array, or it may take a full copy of the array. Note that because the array is possibly not copied, any subsequent modifications made to the mutable version of the array may be shared with the immutable version. It is safe to use, therefore, if the immutable version is never referenced again. The non-copying implementation is supported between certain pairs of array types only; one constraint is that the array types must have identical representations. In GHC, The following pairs of array types have a non-copying O(1) implementation of 'unsafeFreeze'. Because the optimised versions are enabled by specialisations, you will need to compile with optimisation (-O) to get them. * 'Data.Array.Unboxed.UArray' -> 'Data.Array.IO.IOUArray' * 'Data.Array.Unboxed.UArray' -> 'Data.Array.ST.STUArray' * 'Data.Array.Array' -> 'Data.Array.IO.IOArray' * 'Data.Array.Array' -> 'Data.Array.ST.STArray' -} {-# INLINE unsafeThaw #-} unsafeThaw :: (Ix i, IArray a e, MArray b e m) => a i e -> m (b i e) unsafeThaw = thaw -- | Casts an 'STUArray' with one element type into one with a -- different element type. All the elements of the resulting array -- are undefined (unless you know what you\'re doing...). castSTUArray :: STUArray s ix a -> ST s (STUArray s ix b) castSTUArray (STUArray l u marr) = return (STUArray l u marr)
OS2World/DEV-UTIL-HUGS
libraries/Data/Array/Base.hs
bsd-3-clause
44,964
30
18
10,907
11,180
5,842
5,338
-1
-1
{-# LANGUAGE NPlusKPatterns, TypeOperators #-} module Chap03 where import Prelude hiding (foldr, sum, product, length, round, reverse) import GHC.Int -- | Ex 3.1 -- -- FX = 1 + N * X -- -- <[zero, plus] . Foutl, [zero, succ . outr] . Foutr> = [zeross, pluss] を示す. -- foldr (c, f) = u where u [] = c u (x:xs) = f (x, u xs) sum :: Num a => [a] -> a sum = foldr (zero, plus) where zero = 0 plus (x, y) = x + y length = foldr (zero, succ . outr) where zero = 0 outr (x, y) = y succ n = 1 + n average = div' . pair (sum, length) where pair (f, g) x = (f x, g x) div' (x, y) = x / y avg = div' . foldr (zeross, pluss) where zeross = (0, 0) pluss (a, (b, n)) = (a + b, n + 1) div' (x, y) = x / y -- -- <[zero, plus] . Foutl, [zero, succ . outr] . Foutr> -- == -- <[zero, plus] . (id + id * outl), [zero, succ . outr] . (id + id * outr)> -- == -- <[zero, plus . (id, outl)], [zero, succ . outr . (id * outr)]> -- == -- [<zero, zero>, <plus . (id * outl), succ . outr . (id * outr)>] -- -- 第2成分をポイントワイズに解くと -- <plus . (id * outl), succ . outr . (id * outr)> (a, (b, n)) -- == -- (plus . (id * outl) (a, (b, n)), succ . outr . (id * outr) (a, (b, n))) -- == -- (plus (a, b), succ (outr (a, n))) -- == -- (a+b, 1+n) -- -- | Ex 3.2 -- -- <Foutl, Foutr> : FA * FB <- F(A * B) -- -- <Foutl, Foutr> -- FA * FB <----------------- F(A * B) -- | | -- Fh * Fk | | F(h * k) -- | | -- v v -- FC * FD <----------------- F(C * D) -- <Foutl, Foutr> -- -- -- (Fh * Fk) . <Foutl, Foutr> -- == {- 積の吸収則 (2.8) -} -- <Fh . Foutl, Fk . Foutr> -- == {- 下図 -} -- <Foutl . F(h * k), Foutr . F(h * k)> -- == {- 積の融合則 (2.6) -} -- <Foutl, Foutr> . F(h * k) -- -- Fh . Foutl = Foutl . F(h * k) Fk . Foutr = Foutr . F(h * k) -- -- Foutl Foutr -- FA <----- F(A * B) FB <----- F(A * B) -- | | | | -- Fh| | F(h * k) Fk| | F(h * k) -- v v v v -- FC <----- F(C * D) FD <----- F(C * D) -- Foutl Foutr -- -- | Ex 3.3 -- naiveSteep [] = True naiveSteep (a:x) = a > sum x && naiveSteep x -- [nil, cons] -- TB <-------- 1 + B * TB -- /| | -- / | | -- steep/ | | -- / |(|c,f|) |1 + 1 * <steep, sum> -- / | = <steep,sum> | -- / | | -- v | | -- A <- A * B <-------- 1 + B * (A * B) -- outl [c, f] -- steep = outl . foldr (c, f) where outl (x, _) = x c = (True, 0) f (a, (b, x)) = (a > x && b, a + x) test_naiveSteep = naiveSteep $ map (2^) ([5000,4999..0] :: [Integer]) test_steep = steep $ map (2^) ([5000,4999..0] :: [Integer]) -- | Ex 3.4 -- -- a -- T <------- FT -- | | -- f| |F<f, (|h|)> -- | | -- v v -- A <------- F(A * B) -- g -- -- a -- T <------------------------- FT -- /|\ /|\ -- / | \ / | \ -- / | \ / | \ -- f/ | \ Ff/ | \ -- / (|k|) \ (|h|) / F(|k|) \F(|h|) -- / = \ / = \ -- / <f,(|h|)> \ / F<f,(|h|)> \ -- v v v v v v -- A <--- A * B ---> B FA <- F(A * B) -> FB -- ^ outl ^ ^ Foutl / Foutr / -- \ \ \__________________________/ -- \ \ h / -- \ \_____________________ / -- \ k / -- \___________________________/ -- g -- -- k の定義 -- k : A * B <- F(A * B) -- k = <g, h . Foutr> -- -- cata の普遍性から -- (|k|) = <f, (|h|)> == <f, (|h|)> . a = k . F<f, (|h|)> -- -- k . F<f, (|h|)> -- == {- k の定義 -} -- <g, h . Foutr> . F<f, (|h|)> -- == {- 対の融合 -} -- <g . F<f, (|h|)>, h . Foutr . F<f, (|h|)>>> -- == {- 関手則および対の消去則 -} -- <g . F<f, (|h|)>, h . F(|h|)> -- == {- 所与の図式および cata -} -- <f . a, (|h|) . a> -- == {- 対の融合 -} -- <f, (|h|)> . a -- -- | Ex 3.5 -- data Tree a = Null | Node (Tree a, a, Tree a) deriving Show -- -- balanced -- x 1/3 <= n/(n+m+1) <= 2/3 -- o n + 1 <= 3(m + 1) /\ m + 1 <= 3(n + 1) -- -- [null, node] -- Ta <------------- 1 + Ta * a * Ta -- | | -- u| | 1 + u * 1 * u -- v v -- X <------------- 1 + X * a * X -- [c, f] foldt :: (a, (a, b, a) -> a) -> Tree b -> a foldt (c, f) = u where u Null = c u (Node (l, x, r)) = f (u l, x, u r) size :: Fractional b => Tree a -> b size = foldt (c, f) where c = 1 -- point! f (n, a, m) = n + 1 + m naiveBalanced :: Tree a -> Bool naiveBalanced Null = True naiveBalanced (Node (x, a, y)) = balanced x && balanced y && 1/3 <= v && v <= 2/3 where (n, m) = (size x, size y) v = n / (n + m + 1) balanced :: Tree a -> Bool balanced = outl . foldt (c, f) where outl (x, _) = x c = (True, 1) f ((b, n), a, (c, m)) = (b && c && 1/3 <= v && v <= 2/3, n + 1 + m) where v = n / (n + m + 1) -- | Ex 3.6 -- foldn (c, f) 0 = c foldn (c, f) (n+1) = f (foldn (c, f) n) preds = outl . foldn (c, f) where outl (x, _) = x c = ([], 0) f (ns, n) = (n+1:ns, n+1) -- | Ex 3.7 -- product = foldr (c, f) where c = 1 f (n, m) = n * m -- fact = product . preds fact = outl . foldn (c, f) where outl (x, _) = x c = (1, 0) f (m, n) = ((n+1)*m, n+1) -- | Ex 3.8 -- -- a a -- T <-------------- FT T <-------------- FT -- | /|\ | /|\ -- | Ff/ | \Fg | Ff/ | \Fg -- | / | \ | / | \ -- | v | v | v | v -- f| FA F<f,g> FB g| FA F<f,g> FB -- | ^ | ^ | ^ | ^ -- | Foutl\ | /Foutr | Foutl\ | /Foutr -- | \ | / | \ | / -- | \v/ | \v/ -- A <----------- F(A x B) A <----------- F(A x B) -- h k -- 上図から -- -- a -- T <-------------------- FT -- /|\ /|\ -- f/ | \g Ff/ | \Fg -- / | \ / | \ -- v | v v | v -- A <f,g> B FA F<f,g> FB -- ^ | ^ ^ | ^ -- \ | / \ | / -- outl\ | /outr Foutl\ | /Foutr -- \v/ \v/ -- A x B <--------------- F(A x B) -- <h,k> -- -- <f,g> = (|<h,k>|) -- -- バナナスプリット則 -- f := (|f|) g := (|g|) -- h := f . Foutl k := g . Foutr -- と置き換えると -- <(|f|), (|g|)> = (|<f . Foutl, g . Foutr>|) -- -- Ex 3.4 -- f := f g := (|h|) -- h := g k := h . Foutr -- と置き換えると -- <f, (|h|)> = (|<g, h . Foutr>|) -- -- | Ex 3.9 -- tri f = foldr (c, g) where c = [] g (x, xs) = x:map f xs slice = tri tail -- | Ex 3.10 -- bhp = prod . tri sqr prod = foldr (1, mul) mul (x, y) = x * y sqr x = x^2 -- bhp -- == {- bhp の定義 -} -- prod . tri sqr -- == {- prod の定義 -} -- (|1, mul|) . tri sqr -- == {- 後述 -} -- (|1, mul . (id x sqr)|) -- -- -- 上記の後述とあるステップは, -- -- ホーナー則 -- (|c, g|) . tri f = (|c, g . (id x f)|) <= f . c = c かつ f . g = g . (f x f) -- -- (|1, mul|) . tri sqr のケースを考える -- c := 1, g := mul, f := sqr と置くと -- sqr . 1 = 1 これは sqr 1 = 1 なので真 -- sqr . mul = mul . (sqr x sqr) これも sqr (mul (a, b)) = sqr (a * b) = sqr a * sqr b = mul (sqr x sqr) (a, b) なので真 -- よって, (|1, mul|) . tri sqr = (|1, mul . (id x sqr)|) とできる bhp' = foldr (c, f) where c = 1 f (a, b) = mul (a, sqr b) -- | Ex 3.11 -- -- tri の定義 -- tri :: (a -> a) -> Ta -> Ta -- tri f = (|a . F(id, Tf)|) -- -- 命題 -- (|g|) . tri f = (|g . F(id, h)|) <= h . g = g . F(f, h) -- -- これは tri f = (|a . F(id, Tf)|) と cata で書けるので融合則(2.12) -- -- h . (|f|) = (|g|) <= h . f = g . Fh -- -- に当てはめることができる. -- 分かりやすくするために命題の tri f を置き換えて -- -- (|g|) . (|a . F(id, Tf)|) = (|g . F(id, h)|) -- ~~~~~ ~~~~~~~~~~~~~ ~~~~~~~~~~~~ -- h f g -- -- と対応を取ればよいので満たすべき融合則の条件式 -- h . f = g . Fh (これは融合則 (2.12) の条件式) -- は -- h . f = g . F(id, h) -- であることに注意すると -- -- (|g|) . a . F(id, Tf) = g . F(id, h) . F(id, (|g|)) -- -- となる.つまりこの条件を満たせば命題の主張の結言を言える. -- したがって,この条件式が命題の前言と同じであることを証明すれば良い? -- -- | Ex 3.12 -- -- tri f [a0, a1 .. an] = [a0, f a1 .. f^n an] -- -- なので多項式評価は -- -- ( (|[0, (+)]|) . tri f ) [a0, a1 .. an] = a0 + f a1 + f^2 a2 + .. f^n an -- = a0 + (*x) a1 + ((*x) . (*x)) a2 ... ((*x) . (*x) .. (*x)) an -- = a0 + a1*x + a2*x*x + a3*x*x*x + .. an*x*x..*x -- = a0 + (a1 + (a2 + .. (an + 0) * x) * x) * x -- -- F はリストの台関手なので F(a,x) = 1 + a * x -- g は上記の通り [0, (+)] になる. -- -- h . [0, (+)] = [0, (+)] . F(f, h) -- == -- h . [0, (+)] = [0, (+)] . (1 + f * h) -- == -- h 0 = 0 /\ h . (+) = (+) . (f * h) -- -- h 0 = 0 -- h (x + y) = f x + h y -- -- h x = h (x + 0) = f x + h 0 = f x -- ゆえに h = f しか取りえない. -- -- h = f とすれば, -- (|0, (+)|) . tri f = (|[0, (+)] . F(id, f)|) = (|[0, (+)] . (id + id * f)|) = (|0, (+) . (id * f) |) honer f = foldr (0, \(a, b) -> a + f b) -- | Ex 3.13 -- -- \sum_{i=0}^{n-1} i*a_i = 0*a0 + 1*a1 + 2*a2 + .. * i*ai + .. + (n-1)*a_{n-1} -- -- ws = sum . listr mul . tri (succ * id) . listr <zero, id> -- naiveWs = sum . map mul . tri (cross ((1+), id)) . map (pair (const 0, id)) pair (f, g) x = (f x, g x) cross (f, g) (x, y) = (f x, g y) outl (x, y) = x outr (x, y) = y plus (x, y) = x + y -- ここで tri (cross ((1+), id)) . map (pair (const 0, id)) が何をするか見ておく -- -- tri (cross ((1+), id)) . map (pair (const 0, id)) $ [10,20,30,40,50] -- => [(0,10),(1,20),(2,30),(3,40),(4,50)] -- -- ws -- == {- 仕様 -} -- sum . listr mul . tri (succ * id) . listr <zero, id> -- == {- sum = (|zero, plus|) -} -- (|zero, plus|) . listr mul . tri (succ * id) . listr <zero, id> -- == {- 型関手融合 (2.14) : (|h|) . Tg = (|h . F(g, id)|) -} -- (|[zero, plus] . F(mul, id)|) . tri (succ * id) . listr <zero, id> -- == {- F はリストの台関手 F(a,b) = 1 + a * b -} -- (|zero, plus . (mul * id)|) . tri (succ * id) . listr <zero, id> -- == {- ヒント: ペアの二番目は ws に与えられた引数の sum に当たる -} -- outl . <(|zero, plus . (mul * id)|), (|zero, plus . (outr * id)|)> . tri (succ * id) . listr <zero, id> -- == {- バナナスプリット則 : <(|h|),(|k|)> == (|<h . Foutl, k . Foutr>|) -} -- outl . (|<[zero, plus . (mul * id)] . Foutl, [zero, plus . (outr * id)] . Foutr >|) . tri (succ * id) . listr <zero, id> -- == {- F はリストの台関手 -} -- outl . (|<[zero, plus . (mul * id)] . (id + id * outl), [zero, plus . (outr * id)] . (id + id * outr)>|) -- . tri (succ * id) . listr <zero, id> -- == {- 余積関手の融合則 [f,g] . (h + k) = [f . h, g . k] -} -- outl . (|<[zero, plus . (mul * outl)], [zero, plus . (outr * outr)]>|) . tri (succ * id) . listr <zero, id> -- == {- Ex 2.27 交換則 : <[f,g],[h,k]> == [<f,h>,<g,k>] -} -- outl . (|<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>|) . tri (succ * id) . listr <zero, id> -- == {- h = <plus, outr> としてホーナー則を適用 : (|g|) . tri f = (|g . F(id, h)|) <= h . g = g . F(f, h) -} -- outl . (|[<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>] . F(id, <plus, outr>)|) . listr <zero, id> -- == {- 型関手融合 -} -- outl . (|[<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>] . F(id, <plus, outr>) . F(<zero, id>, id)|) -- == {- 関手則 -} -- outl . (|[<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>] . F(<zero, id>, <plus, outr>)|) -- == {- F はリストの台関手 F(a,b) = 1 + a * b -} -- outl . (|[<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>] . (1 + <zero, id> * <plus, outr>)|) -- == {- 余積関手の融合則 [f,g] . (h + k) = [f . h, g . k] -} -- outl . (|<zero, zero>, <plus . (mul * outl), plus . (outr * outr)> . (<zero, id> * <plus, outr>)|) -- == {- 後述 -} -- outl . (|<zero, zero>, k|) where k (x, (y, z)) = (y+z, x+z) -- 上記においてホーナー則の適用については <plus, outr> . g = g . F((succ * id), <plus, outr>) を示す必要がある. -- f = (succ * id), g = [<zero, zero>, <plus . (mul * outl), plus . (outr * outr)>], h = <plus, outr> -- -- [(0,0),<plus . (mul * outl), plus . (outr * outr)>] -- (0,0) + (a*b+c,b+d) <------------------------------------------------------- 1 + ((a, b), (c, d)) -- | | -- <plus, outr> | | F((succ * id), <plus, outr>) -- | | = id + (succ * id) * <plus, outr> -- v v -- (0,0) + (a*b+c+b+d, b+d) <------------------------------------------------------ 1 + ((a+1, b), (c+d, d)) -- = [(0,0),<plus . (mul * outl), plus . (outr * outr)>] -- (0,0) + ((a+1)*b+c+d, b+d) -- ~ (a*b+b+c+d, b+d) -- -- 可換になる. よってホーナー則が適用できる. -- -- 後述の箇所の論証 -- -- <plus . (mul * outl), plus . (outr * outr)> . (<zero, id> * <plus, outr>) $ (x, (y, z)) -- == -- <plus . (mul * outl), plus . (outr * outr)> $ (<zero, id> * <plus, outr>) (x, (y, z)) -- == -- <plus . (mul * outl), plus . (outr * outr)> $ (<zero, id> x, <plus, outr> (y, z)) -- == -- <plus . (mul * outl), plus . (outr * outr)> ((0, x), (y+z, z)) -- == -- ((plus . (mul * outl)) ((0, x), (y+z, z)), (plus . (outr * outr)) ((0, x), (y+z, z))) -- == -- ((plus $ (mul * outl) ((0, x), (y+z, z)), plus $ (outr * outr) ((0, x), (y+z, z))) -- == -- (plus $ (mul (0, x) , outl (y+z, z)), plus $ (outr (0, x), outr (y+z, z))) -- == -- (plus (0 , y+z), plus $ (x, z)) -- == -- (y+z, x+z) -- ws = outl . foldr ((0, 0), k) where k (x, (y, z)) = (y+z, x+z) -- | Ex 3.14 -- data Treee a = Tip a | Nod (Treee a, Treee a) deriving Show -- [tip,node] -- Ta <---- a + Ta * Ta -- | | -- u=(|f,g|)| | id + u * u -- v v -- X <---- a + X * X -- [f,g] foldTreee (f, g) = u where u (Tip x) = f x u (Nod (l, r)) = g (u l, u r) mapTreee f = foldTreee (Tip . f, Nod) -- tri f = (|a . F(id, Tf)|) -- = (|[tip, bin] . (id + Tf * Tf)|) -- = (|tip, bin . (Tf * Tf)|) triTreee f = foldTreee (Tip, Nod . (cross (mapTreee f, mapTreee f))) max' :: Ord a => Treee a -> a max' = foldTreee (id, uncurry max) zero = const 0 one = const 1 depths = triTreee succ . mapTreee zero depth = max' . depths depth' = foldTreee (zero, succ . uncurry max) sumTreee = foldTreee (id, plus) wpl = sumTreee . mapTreee mul . triTreee (cross (succ, id)) . mapTreee (pair (one, id)) -- wpl -- == {- sumTree -} -- (|id, plus|) . map mul . tri (succ * id) . map <one, id> -- == {- 型関手融合 (2.14) : (|h|) . Tg = (|h . F(g, id)|) -} -- (|[id, plus] . F(mul, id)|) . tri (succ * id) . map <one, id> -- == {- F は木型の台関手 F(a, b) = a + b * b -} -- (|[id, plus] . (mul + (id * id))|) . tri (succ * id) . map <one, id> -- == {- 余積関手の融合則 [f,g] . (h + k) = [f . h, g . k] -} -- (|mul, plus|) . tri (succ * id) . map <one, id> -- == {- ヒント: ペアの二番目は wsl に与えられた引数の sum に当たる -} -- outl . <(|mul, plus|), (|outr, plus|)> . tri (succ * id) . map <one, id> -- == {- バナナスプリット則 : <(|h|),(|k|)> == (|<h . Foutl, k . Foutr>|) -} -- outl . (|<[mul, plus] . Foutl, [outr, plus] . Foutr>|) . tri (succ * id) . map <one, id> -- == {- F は木型の台関手 F(a, b) = a + b * b -} -- outl . (|<[mul, plus] . (id + outl * outl), [outr, plus] . (id + outr * outr)>|) . tri (succ * id) . map <one, id> -- == {- 余積関手の融合則 [f,g] . (h + k) = [f . h, g . k] -} -- outl . (|<[mul, plus . (outl * outl)],[outr, plus . (outr * outr)]>|) . tri (succ * id) . map <one, id> -- == {- Ex 2.27 交換則 : <[f,g],[h,k]> == [<f,h>,<g,k>] -} -- outl . (|<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>|) . tri (succ * id) . map <one, id> -- == {- h = <plus, outr> としてホーナー則を適用 : (|g|) . tri f = (|g . F(id, h)|) <= h . g = g . F(f, h) -} -- outl . (|[<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] . F(id, <plus, outr>)|) . map <one, id> -- == {- 型関手融合 (2.14) : (|h|) . Tg = (|h . F(g, id)|) -} -- outl . (|[<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] . F(id, <plus, outr>) . F(<one, id>, id)|) -- == {- 関手則 -} -- outl . (|[<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] . F(<one, id>, <plus, outr>)|) -- == {- F は木型の台関手 F(a, b) = a + b * b -} -- outl . (|[<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] . (<one, id> + (<plus, outr> * <plus, outr>))|) -- == {- 後述 -} -- outl . (|f, g|) where f a = (a, a) /\ g ((a, b), (c, d)) = (a+b+c+d, b+d) wpl' = foldTreee (f, g) where f a = (a, a) g ((a, b), (c, d)) = (a+b+c+d, b+d) -- ホーナー則の適用についての論証は以下. -- f = (succ * id), g = [<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>], h = <plus, outr> として -- h . g = g . F(f, h) を示す必要がある. -- <plus, outr> . g = g . F((succ * id), <plus, outr>) を示す. -- -- -- [<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] -- (a*b,b) + (x+z,y+w) <--------------------------------------------- (a, b) + ((x, y), (z, w)) -- | | -- <plus, outr> | | F((succ * id), <plus, outr>) -- | | = (succ * id) + (<plus, outr> * <plus, outr>) -- v v -- (a*b+b,b) + (x+z+y+w, y+w) <------------------------------------------ (a+1,b) + ((x+y, y), (z+w, w)) -- = [<mul, outr>, <plus . (outl * outl), plus . (outr * outr)>] -- ((a+1)*b,b) + ((x+y)+(z+w),y+w) -- ~ (a+b+b,b) + (x+y+z+w,y+w) -- -- 可換になる. よってホーナー則が適用できる. -- -- -- 最後のステップはポイントワイズに計算すれば良い. -- -- <mul, outr> . <one, id> a -- == -- <mul, outr> $ (1, a) -- == -- <mul, outr> (1, a) -- == -- (a, a) -- -- <plus . (outl * outl), plus . (outr * outr)> . (<plus, outr> * <plus, outr>) $ ((a,b), (c, d)) -- == -- <plus . (outl * outl), plus . (outr * outr)> $ (<plus, outr> * <plus, outr>) ((a,b), (c, d)) -- == -- <plus . (outl * outl), plus . (outr * outr)> $ ((a+b, b), (c+d, d)) -- == -- (a+b+c+d,b+d) -- val :: [Float] -> Float val = foldr (zero, shift) where zero = 0 shift (d, r) = (d+r)/10 intern :: [Float] -> Int32 intern = round . val round :: Float -> Int32 round r = floor $ (2^17 * r + 1)/2 round' = halve . convert where halve n = (n+1) `div` 2 convert r = floor (2^17 * r) -- | Ex 3.15 -- naiveVal = sum . tri (/10) . map (/10) -- val -- == {- 定義 -} -- sum . tri (/10) . listr (/10) -- == {- sum = (|zero, plus|) -} -- (|zero, plus|) . tri (/10) . listr (/10) -- == {- h = (/10) でホーナー則 (|g|) . tri f = (|g . F(id, h)|) <= h . g = g . F(f, h) -} -- (|[zero,plus] . F(id, (/10))|) . listr (/10) -- == {- -} -- (|[zero,plus] . (id + id * (/10))|) . listr (/10) -- == {- -} -- (|zero, plus . (id * (/10))|) . listr (/10) -- == {- 型関手融合 (2.14) : (|h|) . Tg = (|h . F(g, id)|) -} -- (|[zero, plus . (id * (/10))] . F((/10), id)|) -- == {- -} -- (|[zero, plus . (id * (/10))] . (id + ((/10) * id))|) -- == {- -} -- (|zero, plus . (id * (/10)) . ((/10) * id)|) -- == {- -} -- (|zero, plus . ((/10) * (/10))|) -- == {- 後述 -} -- (|zero, (/10) . plus|) val' = foldr (0, (/10) . plus) -- ホーナー則の適用については g = [zero, plus], f = (/10), h = (/10) として h . g = g . F(f, h) を示せばよい. -- つまり (/10) . [zero, plus] = [zero, plus] . F((/10), (/10)) を示せばよい. -- -- [zero, plus] -- 0+(a+b) <----------- * + (a,b) -- | | -- (/10) | | F((/10), (/10)) -- v v -- 0/10+(a+b)/10 <----------- * + (a/10,b/10) -- [zero, plus] -- -- 可換であることが確認できるのでホーナー則を適用できる. -- intern' = halve . foldr (0, cshift) where halve n = (n+1) `div` 2 cshift (d, n) = (2^17 * d + n) `div` 10 -- | Ex 3.16 -- intern2 = halve . foldr (0, cshift) where halve n = (n+1) `div` 2 cshift (d, n) = (2^3 * d + n) `div` 10 -- ??? -- | Ex 3.17 -- -- ??? -- | Ex 3.18 -- -- 間接的同値則 -- forall k. k <= m == k <= n <=> m == n -- -- 任意の k について等しいなら, k := m と取った時 m <= m == m <= n で m <= n が真であり,かつ -- k := n と取った時 n <= m == n <= n で n <= m も真である. よって反対称律から m == n なので => の向きの imply は証明できる. -- 逆は自明. -- -- 上の証明は整数であることや整数上の順序であることを使っていない. -- 反対称律を使っているので半順序集合であることだけが要求される. -- よって任意の poset (半順序集合) において成り立つ. -- -- | Ex 3.19 -- -- 切り下げの普遍性 -- n <= x == n <= floor x -- を使って切り下げ則 -- floor ((a+r)/b) == floor ((a+foor(r))/b) -- を証明する. -- -- n <= floor ((a+r)/b) -- == {- floor の普遍性 -} -- n <= (a+r)/b -- == {- b > 0 と計算から -} -- n * b - a <= r -- == {- floor の普遍性 (n * b - a は整数) -} -- n * b - a <= floor (r) -- == {- 計算 -} -- n <= (a + floor (r))/b -- == {- floor の普遍性 -} -- n <= floor ((a + floor (r))/b) -- -- | Ex 3.20 -- -- 切り下げ則 -- floor ((a+r)/b) == floor ((a + floor(r))/b) -- a = 0 b = 2/3 r = 3/2 とすると左辺は -- floor ((0+3/2)/(2/3)) -- == -- floor ((3/2)/(2/3)) -- == -- floor (9/4) -- == -- 2 -- -- 右辺は -- floor ((0 + floor(3/2))/2/3) -- == -- floor ((1/(2/3))) -- == -- floor (3/2) -- == -- 1 -- | Ex 3.21 -- -- f : A <- B が単射なら任意の二項演算 (+) : B <- C x B に対して -- f (c (+) b) = c (*) f b -- なる二項演算 (*) : A <- A x B の存在を示す. -- -- f が単射なら f^-1 が存在して, f^-1 . f = id. -- (*) を -- c (*) d = f (c (+) (f^-1 d)) -- と定義する. -- すると, -- c (*) f b = f (c (+) (f^-1 (f b))) -- = f (c (+) id b) -- = f (c (+) b) -- | Ex 3.22 -- -- f : A <- B かつ (+) : B <- C x B として -- f b0 = f b1 かつ f (c (+) b0) /= f (c (+) b1) ならば -- f (c (+) b) = c (*) f b となるような (*) は存在しない. -- -- この定理を使って round . val に融合則を適用できないことを示す. -- -- intern = round . val = round . foldr (0, shift) -- where shift (d, r) = (d+r)/10 -- なので,ここで融合則 -- h . (|f|) = (|g|) <= h . f = g . Fh -- を適用しようとすると, round . [0, shift] = g . Fround なる g が存在することを示す必要があるが, -- これが上記の定理を使って使えないことを示せばよい. -- g = [c, (*)] とした場合, round [0, shift] = [c, (*)] . (id + id * round) = [c, (*) . (id * round)] -- よって, round 0 = 0 = c なので c = 0 として, round . shift = (*) . (id * round) が融合則の前件になる. -- ポイントワイズにすると, round (shift (d, n)) = ((*) . (id * round)) (d, n) = (*) (d, round n) = d (*) round n -- 一方shift = (+) とおくと, round (d (+) n) = d (*) round n となる. -- これを定理に当てはめるとすると, -- round (d (+) n) = d (*) round n -- f (c (+) b) = c (*) f b -- なので定理から, round b0 = round b1 かつ round (c `shift` b0) /= round (c `shift` b1) を示せれば融合則の前件を却下できる. ex_3_22 = let (c, b0, b1) = (0, 0.100001, 0.100000) d `shift` r = (d+r)/10 round r = floor $ (2^17 * r + 1)/2 in round b0 == round b1 && round (c `shift` b0) /= round (c `shift` b1) -- ??? -- 原書の回答はどうも Int32 と Float なら微妙に成り立ちそう. -- 2147483647 は 2^31-1 のことで maxBound :: Int32 である. だが現在のGHCだとアンダーフローして同じにはならない. -- 型をInt32とFloatで固めると round 10000.1 = 655366528 と round 10000.0 = 655360000 となり公開されている回答とも整合しそう. -- | Ex 3.23 -- -- outl : A <- A x 0 -- outr : 0 <- A x 0 -- i : A <- 0 -- -- outl = i . outr -- unnull . null = id /\ null . unnull = id となる unnull が存在しなければならないことを示す. -- -- unnull = <i, id>, null = outr ととれば任意の圏で成り立つ. -- -- null . unnull -- == -- outr . <i, id> -- == -- id -- -- 逆向きも論証する. -- -- unnull . null -- == -- <i, id> . outr -- == {- 対構成の融合 -} -- <i . outr, outr> -- == {- outl = i . outr なので -} -- <outl, outr> -- == {- 反射則 -} -- id -- | Ex 3.24 -- -- Rel は分配的ではない. undistr :: Either (a, b) (a, c) -> (a, Either b c) undistr = either (cross (id, inl)) (cross (id, inr)) where inl = Left inr = Right distr :: (a, Either b c) -> Either (a, b) (a, c) distr (a, Left b) = Left (a, b) distr (a, Right c) = Right (a, c) -- Rel における積の定義は余積の定義と一致する. -- すなわち A * B = A + B と定義できる. -- 依って分配則 -- -- A * (B + C) == (A * B) + (A * C) -- -- は -- -- A + (B + C) == (A + B) + (A + C) -- -- となるがこれは一致しない. -- (もし分からなければ and と quad の議論などを振り返れ) -- -- | Ex 3.25 -- -- ? の定義 -- -- p? :: A + A <- A -- p? = (unit+unit).distr.<id,p> -- -- ただし p :: Bool <- A -- -- ヒント: (! + !) . distr = (! + !) . outr を証明する. -- (! + !) . distr -- == {- (2.3) !の融合則 !a . f = !b (f:a <- b) -} -- (!.outr + !.outr) . distr -- == {- 余積関手 -} -- (! + !) . [inl . outr, inr . outr] . distr -- == {- outr は outr <- (x) な自然変換 -} -- (! + !) . [outr . (id x inl), outr . (id x inr)] . distr -- == {- 余積関手の融合則 -} -- (! + !) . outr . [(id x inl), (id x inr)] . distr -- == {- undistr = [(id x inl), (id x inr)] -} -- (! + !) . outr . undistr . distr -- == {- undistr . distr = id -} -- (! + !) . outr -- -- (p?)?? -- == {- p? の定義 -} -- ((unit + unit) . distr . <id, p>)?? -- == {- ??の定義 t?? = (!+!).t -} -- (! + !) . ((unit + unit) . distr . <id, p>) -- == {- unit = outl (unit : A <- A x 1) -} -- (! + !) . (outl + outl) . distr . <id, p> -- == {- 余積関手の合成 (f + g) . (h + k) = f . h + g . k -} -- (! . outl + ! . outl) . distr . <id, p> -- == {- (2.3) !の融合則 !a . f = !b (f:a <- b) -} -- (! + !) . distr . <id, p> -- == {- ヒント -} -- (! + !) . outr . <id, p> -- == {- 対構成の消去則 (2.5) -} -- (! + !) . p -- == {- p : 1+1 <- A で !の反射則(2.2) !_1 = id_1 から -} -- p -- -- 最後のところ, ! : 1 <- A であることに注意して, p : 1 + 1 <- A なので, -- p で 1 + 1 に移したものをそれぞれ単に id_1 : 1 <- 1 で写しただけなので -- (! + !) を単に消去してもよくなることから求まる. -- | Ex 3.26 -- -- (unit + unit) . distr が F <- G な自然変換(多相関数)であることを示す. -- F(A) = A + A -- G(A) = A * Bool -- distr : A*(B+C) <- A*B + A*D -- -- 以下図式が可換になることから証明できる. -- -- unit+unit distr -- F(A) = A + A <---- A*1 + A*1 <----- A * Bool = G(A) -- | | -- F(f)=f+f | | f*id=G(f) -- | | -- v v -- F(B) = B + B <---- B*1 + B*1 <----- B * Bool = G(B) -- unit+unit distr -- -- | Ex 3.27 -- -- 分配的な圏で h: 0 <- A がある. -- h が同型射であることと,それゆえ A が始対象でもあることを示す. -- i : A <- 0 なので h . i = id_0 は成り立つ. -- i . h -- == {- 対構成の消去則 -} -- i . outr . <id, h> -- == {- Ex 3.23 から i . outr = outl -} -- outl . <id, h> -- == {- 対構成の消去則 -} -- id -- -- ゆえに h は同型射. よって A ~= 0 なので A は始対象でもある. -- -- | Ex 3.28 -- -- f * [g, h] = [f * g, f * h] . distr を示す -- -- [f * g, f * h] . distr -- == {- 余積の消去則 -} -- [f * ([g, h] . inl), f * ([g, h] . inr)] . distr -- == {- id 単位元 -} -- [(f . id) * ([g, h] . inl), (f, id) * ([g, h] . inr)] . distr -- == {- 積関手 (f * g) . (h * k) = (f . h) * (g . k) -} -- [(f * [g, h]) . (id * inl), (f * [g, h]) . (id * inr)] . distr -- == {- 余積の融合則 m . [h, k] = [m . h, m . k] -} -- (f * [g, h]) . [(id * inl), (id * inr)] . distr -- == {- [id * inl, id * inr] = undistr で undistr . distr = id -} -- f * [g, h] -- -- | Ex 3.29 -- -- Bool = 1 + 1 -- よって -- Bool^2 = Bool * Bool = (1+1) * (1 + 1) -- (1 + 1) * (1 + 1) -- == {- 分配則 A * (B + C) == (A * B) + (A * C) -} -- ((1 + 1) * 1) + ((1 + 1) * 1) -- == {- Ex 2.26 A * B == B * A -} -- (1 * (1 + 1)) + (1 * (1 + 1)) -- == {- 分配則 A * (B + C) == (A * B) + (A * C) -} -- (1 * 1 + 1 * 1) + (1 * 1 + 1 * 1) -- == {- Ex 2.26 A * 1 == A -} -- (1 + 1) + (1 + 1) -- -- | Ex 3.30 -- -- filter p . listr f = listr f . filter (p . f) を示す. -- ただし filter p = concat . listr (p -> wrap, nil) とする. -- -- filter p . listr f -- == {- filter の定義 -} -- concat . listr (p -> wrap, nil) . listr f -- == {- listr 関手 -} -- concat . listr ((p -> wrap, nil) . f) -- == {- (3.4) -} -- concat . listr ((p . f) -> (wrap . f), nil) -- == {- wrap の自然性 -} -- concat . listr ((p . f) -> (listr f . wrap), nil) -- == {- nil の自然性 -} -- concat . listr ((p . f) -> (listr f . wrap), listr f . nil) -- == {- (3.3) -} -- concat . listr ((listr f) . ((p . f) -> wrap, nil)) -- == {- 関手則 -} -- concat . listr (listr f) . listr ((p . f) -> wrap, nil) -- == {- concat の自然性 : listr f . concat = concat . listr (listr f) -} -- listr f . concat . listr ((p . f) -> wrap, nil) -- == {- $\mathit{filter}$ の定義 -} -- listr f . filter (p . f) -- -- | Ex 3.31 -- -- cat . (nil * id) = outr -- cat . (cons * id) = cons . (id * cat) . assocr -- -- が等式 (3.6) -- cat . ([nil, cons] * id) = [outr, cons] . (id + id * cat) . phi -- ただし phi = (id + assocr) . distl -- -- と同等であることを示す. -- distl : (A * C) + (B * C) <- (A + B) * C -- assocr : A * (B * C) <- (A * B) * C -- -- 等式 (3.6) を基点に進める. -- 左辺側から -- cat . ([nil, cons] * id) -- == {- Ex 3.28 に双対(distrのケース)が表れている -} -- cat . [nil * id, cons * id] . distl -- == {- 余積の融合則 m . [h, k] = [m . h, m . k] -} -- [cat . (nil * id), cat . (cons * id)] . distl -- -- 念のため, Ex 3.28 の双対を示しておく -- -- [f * h, g * h] . distl -- == {- 余積の消去則 -} -- [([f, g] . inl) * h, ([f, g] . inr) * h] . distl -- == {- id 単位元 -} -- [([f, g] . inl) * (h . id), ([f, g] . inr) * (h . id)] . distl -- == {- 積関手 (f * g) . (h * k) = f . h * g . k -} -- [([f, g] * h) . (inl * id), ([f, g] * h) . (inr * id)] . distl -- == {- 余積の融合則 m . [h, k] = [m . h, m . k] -} -- ([f, g] * h) . [(inl * id), (inr * id)] . distl -- == {- [inl * id, inr * id] = undistl : (A*C)+(B*C)<-(A+B)*C で undistl . distl = id -} -- [f, g] * h -- -- -- 右辺側から -- [outr, cons] . (id + id * cat) . phi -- == {- 余積の吸収則 [f, g] . (h + k) = [f . h, g . k] -} -- [outr, cons . (id * cat)] . phi -- == {- phi の定義 -} -- [outr, cons . (id * cat)] . (id + assocr) . distl -- == {- 余積の吸収則 [f, g] . (h + k) = [f . h, g . k] -} -- [outr, cons . (id * cat) . assocr] . distl -- -- distl が同型射であることと余積の普遍性から同等である. -- | Ex 3.32 -- -- 余積を持つ任意のデカルト閉圏つまり積, 余積, 指数がある任意の圏は分配的であることを示す. -- 分配的な圏の要件は2つだった. -- -- 1. distl : (A * C) + (B * C) <- (A + B) * C が存在して同型射である.(undistlが逆射) -- 2. null : 0 <- A * 0 が存在して同型射である.(unnull が逆射) -- -- i : 0^A <- 0 (= 0 <- A <- 0) -- null : 0 <- A * 0 -- distl : (A * C) + (B * C) <- (A + B) * C -- -- inl : (A * C) + B <- (A * C) -- curry inl : (A * C) + B <- C <- A -- inr : A + (B * C) <- (B * C) -- curry inr : A + (B * C) <- C <- B -- -- 以下の通り null distl が実装できる. -- data Void = Void i a Void = Void null' = uncurry i distl = uncurry (either (curry Left) (curry Right)) -- unnull は実装できるか? -- undistl も実装できるか? -- | Ex 3.33 -- -- * A^0 ~= 1 -- - 1 <- A^0 は ! で実装可能 -- - A^0 <- 1 は以下で合成する. -- -- i unit swap -- A <--- 0 <--- 0 * 1 <--- 1 * 0 -- これで i . unit . swap : A^0 <- 1 * 0 なので -- curry (i . unit . swap) : A <- 0 <- 1 でありしたがって A^0 <- 1 である -- test_3_33_1l :: Exp a Void -> () test_3_33_1l = ban where ban = const () test_3_33_1r :: () -> Exp a Void test_3_33_1r = curry (i . unit . swap) where swap (a, b) = (b, a) unit (x, ()) = x i = undefined -- -- * A^1 ~= A -- - A <- A^1 は以下を合成する. -- -- なお apply : A <- A^B * B -- ! : 1 <- A -- なので, -- -- apply <id, !> -- A <--- A^1 * 1 <------ A^1 -- -- apply . <id, !> : A <- A^1 -- -- - A^1 <- A -- unit : A <- A * 1 なので, -- curry unit : A <- 1 <- A つまり A^1 <- A である. test_3_33_2l :: Exp a () -> a test_3_33_2l = apply . pair (id, ban) where apply = uncurry ($) ban = const () test_3_33_2r :: a -> Exp a () test_3_33_2r = curry unit where unit (x, ()) = x -- -- * A^(B+C) ~= A^B * A^C -- - A^B * A^C <- A^(B+C) は以下で合成する. -- まず, -- -- apply id * inl -- A <---------- A^(B+C) * (B+C) <-------- A^(B+C) * B -- -- apply id * inr -- A <---------- A^(B+C) * (B+C) <-------- A^(B+C) * C -- -- これらをそれぞれカリー化すると, -- curry (apply . (id * inl)) : A <- B <- A^(B+C) == A^B <- A^(B+C) -- curry (apply . (id * inr)) : A <- C <- A^(B+C) == A^C <- A^(B+C) -- よって -- pair (curry (apply . (id * inl)), curry (apply . (id * inr))) : A^B * A^C <- A^(B+C) -- type Exp a b = b -> a type a :+: b = Either a b type a :*: b = (a, b) test_3_33_3l :: Exp a (b :+: c) -> (Exp a b) :*: (Exp a c) test_3_33_3l = pair (l, r) where l = curry (apply . (cross (id, Left))) r = curry (apply . (cross (id, Right))) apply = uncurry ($) -- -- - A^(B+C) <- A^B * A^C は以下で合成する. -- まず, -- -- apply outl * id -- A <------- A^B * B <---------- (A^B * A^C) * B -- -- apply outr * id -- A <------- A^C * C <---------- (A^B * A^C) * C -- -- [l, r] distr -- A <---------- ((A^B * A^C) * B) + ((A^B * A^C) * C) <------- (A^B * A^C) * (B + C) -- つまり [l, r] . distr : A <- (A^B * A^C) * (B + C) -- よって curry ([l, r] . distr) : A <- (B + C) <- (A^B * A^C) == A^(B+C) <- (A^B * A^C) -- test_3_33_3r :: (Exp a b) :*: (Exp a c) -> Exp a (b :+: c) test_3_33_3r = curry (either l r . distr) where l = apply . cross (outl, id) r = apply . cross (outr, id) apply = uncurry ($) -- | Ex 3.34 -- -- f : A <- B と g : A^B <- 1 の間の全単射を構成する. -- f を curry (f . unit) に移せばよい. -- -- f unit swap -- A <--- B <--- B * 1 <--- 1 * B -- -- これをカリー化すると curry (f . unit . swap) : A <- B <- 1 test_3_34 :: (b -> a) -> (() -> Exp a b) test_3_34 f = curry (f . unit . swap) where unit (x, ()) = x swap (x, y) = (y, x) -- -- A^B <--- 1 -- || -- (A <--- B <--- 1) test_3_34inv :: (() -> Exp a b) -> (b -> a) test_3_34inv g = g () -- | Ex 3.35 -- -- 前順序(A, <=)は練習問題2.6より -- 対象をAの要素として a <= b のときに a <- b がただ1本だけ存在する圏である. -- -- デカルト閉は積があることと対象aとbに対して指数a^bが存在すること. -- 指数の定義 -- 2つの対象 A と B の指数とは,対象 A^B と射 apply : A <- A^B * B のことであり, -- 各 f : A <- C * B に対して -- apply . (curry f * id) = f -- となるような一意な射 curry f : A^B <- C が存在するようなもの. -- -- 指数の普遍性による定義 -- g = curry f == apply . (g * id) = f -- -- L g = apply . (g * id) = uncurry ($) . (cross (g, id)) = uncurry g が左随伴 -- R f = curry f が右随伴 -- -- ??? -- | Ex 3.36 -- -- f^B = curry (f . apply) -- -- f : A <- C -- apply : C <- C^B * B -- -- f apply -- A <--- C <--- C^B * B -- -- すると curry (f . apply) : A <- B <- C^B == (A <- B) <- (C <- B) == A^B <- C^B == F(A) <- F(C) -- ただし F(X) = X^B とする -- つまり curry (f . apply) が共変関手であることが分かる. -- -- | Ex 3.37 -- -- Aがデカルト閉ならA^Bもデカルト閉であることを示す. -- A^Bは Ex 2.19 によると -- 対象: 関手 A <- B -- 射: 自然変換 -- -- A^Bは圏Bから圏Aへの関手を対象とする圏で,そのコドメインAがデカルト閉である. -- つまりコドメインがデカルト閉であるような関手圏はデカルト閉であることを示す. -- -- B -- -------------- -- -- f -- x ----> y -- -- -- / | \ -- / | \ -- H / |G \ F -- / | \ -- v v v -- -- A -- -------------- -- -- phi -- Gx -------------> Fx -- | | -- Gf| |Ff -- v v -- Gy -------------> Fy -- phi -- -- さて,この圏Aはデカルト閉なので任意の有限積と指数がある. -- つまり任意の対象FとGについて指数F^Gが存在する. -- すなわち指数は (F^G)(x) = Fx^Gx である. -- -- | Ex 3.38 -- -- map : (b -> a) -> [b] -> [a] == a^b -> [a]^[b] -- -- G(a,b) : [a]^[b] -- F(a,b) : a^b -- map : G <- F -- -- map f -- [a]^[b] = G(a,b) <------- F(a, b) = a^b a <--- b -- | | | ^ -- G(h,k)| |F(h,k) |h |k -- v v v | -- [c]^[d] = G(c,d) <------- F(c, d) = c^d c <--- d -- map h . f . k -- -- F,G : Fun <- Fun * Fun^op -- -- G(h,k) . map = map . F(h, k) -- つまり map は G <- F という関手の間の自然変換である. -- | Ex 3.39 -- -- listr を使うというのはいわゆる map を使うという意味になる. listr = map -- cpr :: (a, [b]) -> [(a, b)] cpr (a, bs) = [(a, b) | b <- bs] -- cpr (a, bs) = map (\b -> (a, b)) bs -- cpr (a, bs) = map ((,) a) bs spread :: a -> [b] -> [(a, b)] spread = listr . (,) -- | Ex 3.40 -- -- この問題は集合と写像(関数)の圏 Fun において, 任意の関手 F は自然変換 FA^FB <- A^B を持つことを示せというもの. -- いくつか具体的に考えてみる. -- F として例えば Maybe 関手とすると, f : A <- B から Maybe A ^ Maybe B への自然変換(多相関数)がある. -- -- fmap f Nothing = Nothing -- fmap f (Just x) = Just (f x) -- -- F として今度は [] 関手を取ったとすると, f : A <- B から [A] ^ [B] への自然変換(多相関数)がある. -- -- fmap f [] = [] -- fmap f (x:xs) = f x : fmap f xs -- -- つまり fmap がそのような自然変換である. -- ちなみにどんな型構成子であっても fmap は実装できるので全ての F (型構成子) について fmap がある, -- あるいは言い換えるとどんな F (型構成子) もFunctorのインスタンスにできるということでもある. -- -- ところで FA^FB <- A^B なる自然変換は以下の図式を可換にする. -- -- map -- FA^FB <---------- A^B -- | | -- F(f.) | | (f.) where f : C <- A -- v v -- FC^FB <---------- C^B -- map -- -- このような図式が可換でなくなるような圏と関手Fを考える. -- ??? -- -- | Ex 3.41 -- -- f . (id * a) = h . Gf . phi -- -- が任意のhに対して一意解を持つ条件を示したい. -- -- 定理 3.1 -- -- phi が G(h * id) . phi = phi . (Fh * id) の意味で自然変換なら -- f . (a * id) = h . Gf . phi -- である. -- -- したがって,同様の議論により -- phi が G(id * h) . phi = phi . (id * Fh) なる自然変換であればよい. -- -- つまりこの設問は (3.7) において積を交換しても問題ない, -- もしくは定義 3.1 は積について可換であることを意味している. -- -- | Ex 3.42 -- -- g : A <- B -- h : A <- A -- iter : A <- (Nat * B) -- -- iter (g, h) . (zero * id) = g . outr -- iter (g, h) . (succ * id) = h . iter (g, h) -- -- 1つ目の等式を図示する -- -- iter (g, h) (zero*id) -- A <------------ Nat*B <----------- 1*B -- ^ / -- \ g outr / -- +-------------- B <------------+ -- -- 2つ目の等式を図示する -- -- iter (g, h) (succ*id) -- A <------------ Nat*B <----------- Nat*B -- ^ / -- \ h iter (g, h) / -- +-------------- A <------------+ -- -- これら2つの図式を合わせたものを図式に示す. -- -- iter (g, h) ([zero, succ]*id) -- A <------------ Nat*B <-------------------- (1+Nat)*B -- ^ | -- \ |distl -- \ [g . outr, h] (id + iter (g, h)) v -- +---------- (1*B)+A <------------------- (1*B)+(Nat*B) -- -- この図式から等式を導出する. -- (前問の構造再帰定理(定理 3.1)に当てはめてみる) -- -- f . ( a * id) = h . Gf . phi -- || -- iter (g, h) . ([zero, succ] * id) = [g . outr, h] . (id + iter (g, h)) . distl -- -- つまり始型 (a, T) は a = [zero, succ], T = Nat. -- f = iter (g, h) -- Gf = id + f. -- ちなみに始型がNatとなる台関手FはMaybe関手である. -- -- plus ([zero, succ]*id) -- Nat <---------- Nat*Nat <-------------------- (1+Nat)*Nat -- ^ | -- \ |distl -- \ [id . outr, succ] (id + plus) v -- +-------- (1*Nat)+Nat <----------------- (1*Nat)+(Nat*Nat) -- -- plus = iter (id, succ) -- -- | Ex 3.43 -- -- id * iter (id, h) -- Nat * A <-------------------------------------- Nat * (Nat * A) -- | | -- | | -- iter (id, h) | | assocl -- | | -- v v -- Nat <--------------- Nat * A <------------ (Nat * Nat) * A -- iter (id, h) plus * id -- -- iter (id, h) . (plus * id) . assocl = .. -- iter (id, h) . (id * iter (id, h)) = .. -- -- ??? -- -- f : A <- Nat * A -- f . (plus * id) . assoc = f . (id * f) -- -- | Ex 3.44 -- -- a = [tip, node] -- TA <-------------------- A + TA^A -- | | -- (|f, g|) | | id_A + (|f, g|)^id_A -- v | -- X <-------------------- A + X^A -- [f, g] -- -- Fun では a が同型射ではなくなるため意味がない. ??? -- だが以下の様に書くことはできる. -- data Tree' a = Tip' a | Node' (a -> Tree' a) -- | Ex 3.45 -- -- 任意の a に対して k a e = a かつ k . f = g . Fk なる e があるとする. -- -- (|f|) x = (|g|) x e を証明する. -- -- a -- TA <---------- A + TA x TA -- | | -- (|f|) | | F(|f|) -- v f v -- [A] <---------- A + [A] x [A] -- | | -- k | | Fk -- v v -- [A]^[A] <---------- A + [A]^[A] x [A]^[A] -- g -- -- 融合則から (|g|) = k . (|f|) である. -- -- (|f|) x -- == {- k a e = a-} -- k ((|f|) x) e -- == {- 合成 -} -- (k . (|f|)) x e -- == {- k . f = g . Fk だから融合則が使える -} -- (|g|) x e -- -- | Ex 3.46 -- -- convert : Listr A <- Listl A data ListL a = Nil | Snoc (ListL a) a deriving Show cataListL :: (a, (a, b) -> a) -> ListL b -> a cataListL (c, f) = u where u Nil = c u (Snoc xs x) = f (u xs, x) naiveConvert = cataListL ([], snocr) where snocr (xs, x) = xs ++ [x] -- -- (|f|) = cataListL ([], snocr) -- -- [Nil, Snoc] -- ListL A <---------- 1 + ListL A x A -- | | -- (|f|) | | F(|f|) = id + (|f|) x id -- v v -- [A] <---------- 1 + [A] x A -- | [[], snocr] | -- | | -- k=(++) | | Fk = F(++) = 1 + (++) x id -- | | -- v v -- [A]^[A] <---------- 1 + [A]^[A] x A -- g = [g0,g1] -- -- k = (++) なので単位元 e = [] である. -- -- この図式で g = [g0, g1] を求める. -- k . f = g . Fk つまり下の四角の図式が可換である条件から g を求めればよい. -- (++) . [[], snocr] = [g0, g1] . (1 + (++) x id) -- それぞれポイントワイズに展開する. -- -- (++) [] = g0 * -- = {- 引数を追加-} -- [] ++ ys = g0 * ys -- = {- [] は単位元 -} -- ys = g0 * ys -- したがって, g0 _ ys = ys -- しかし, Nil は引数を取らないようにしてあるので g0 ys = ys. -- 例えば 構成子を Nil () としておき cataListL (c, f) も c を引数 () を取るようにすれば -- g0 () ys = ys となって公開されている回答に合致する. -- -- (++) (snocr (xs, x)) = g1 (((++) xs), x) -- = {- 引数を追加 -} -- (snocr (xs, x)) ++ ys = g1 (((++) xs), x) ys -- = {- snocr (xs, x) = xs ++ [x] -} -- (xs ++ [x]) ++ ys = g1 ((xs ++), x) ys -- = {- k = (++) は結合的 -} -- (xs ++) ([x] ++ ys) = g1 ((xs ++), x) ys -- = {- f = (xs ++) とする -} -- f (x:ys) = g1 (f, x) ys -- したがって, g1 (f, x) ys = f (x:ys) -- convert xs = cataListL (g0, g1) xs [] where g0 ys = ys g1 (f, x) ys = f (x:ys) -- | Ex 3.47 -- cataListR :: (a, (b, a) -> a) -> [b] -> a cataListR (c, f) = u where u [] = c u (x:xs) = f (x, u xs) naiveReverse = cataListR ([], snocr) where snocr (x, xs) = xs ++ [x] -- -- (|f|) = cataListR ([], snocr) -- -- [[], (:)] -- [A] <---------- 1 + A x [A] -- | | -- (|f|) | | F(|f|) = id + id x (|f|) -- v v -- [A] <---------- 1 + A x [A] -- | [[], snocr] | -- | | -- k=(++) | | Fk = F(++) = 1 + id x (++) -- | | -- v v -- [A]^[A] <---------- 1 + A x [A]^[A] -- g = [g0,g1] -- -- k = (++) なので単位元 e = [] である. -- -- この図式で g = [g0, g1] を求める. -- k . f = g . Fk つまり下の四角の図式が可換である条件から g を求めればよい. -- (++) . [[], snocr] = [g0, g1] . (1 + id x (++)) -- それぞれポイントワイズに展開する. -- -- (++) [] = g0 * -- = {- 引数を追加-} -- [] ++ ys = g0 * ys -- = {- [] は単位元 -} -- ys = g0 * ys -- したがって, g0 _ ys = ys -- しかし, [] は引数を取らないようにしてあるので g0 ys = ys. -- 例えば 構成子を [] () としておき cataListR (c, f) も c を引数 () を取るようにすれば -- g0 () ys = ys となって公開されている回答に合致する. -- -- (++) (snocr (x, xs)) = g1 (x, ((++) xs)) -- = {- 引数を追加 -} -- (snocr (x, xs)) ++ ys = g1 (x, ((++) xs)) ys -- = {- snocr (x, xs) = xs ++ [x] -} -- (xs ++ [x]) ++ ys = g1 (x, (xs ++)) ys -- = {- k = (++) は結合的 -} -- (xs ++) ([x] ++ ys) = g1 (x, (xs ++)) ys -- = {- f = (xs ++) とする -} -- f (x:ys) = g1 (x, f) ys -- したがって, g1 (x, f) ys = f (x:ys) -- reverse xs = cataListR (g0, g1) xs [] where g0 ys = ys g1 (x, f) ys = f (x:ys) -- -- これは 前問の cataListL における convert と同じ -- -- | Ex 3.48 -- -- ヒント: k a n = mapTreee (+n) a かつ e = 0 を取れ. -- -- Ex 3.14 で depths は定義済. -- depths -- = {- depths のナイーブな実装 -} -- triTreee succ . mapTreee zero -- = {- triTreee f = foldTreee (Tip, Nod . (mapTreee f * mapTreee f)) -} -- foldTreee (Tip, Nod . (mapTreee succ * mapTreee succ)) . mapTreee zero -- = {- 型関手の融合則 (|f|) . Tg = (|f . F(g, 1)|) -} -- foldTreee ([Tip, Nod . (mapTreee succ * mapTreee succ)] . F(zero, 1)) -- = {- F(a, b) = a + b * b -} -- foldTreee ([Tip, Nod . (mapTreee succ * mapTreee succ)] . (zero + 1 * 1)) -- = {-余積の分配 -} -- foldTreee (Tip . zero, Nod . (mapTreee succ * mapTreee succ)) depths'' = foldTreee (Tip . zero, Nod . (cross (mapTreee succ, mapTreee succ))) -- -- ここから Ex 3.45 を適用する. -- -- [Tip, Nod] -- Ta <------------------------------------- a + Ta * Ta -- | | -- (|f|) | | zero + (|f|) * (|f|) -- v v -- TI <------------------------------------- I + TI * TI -- | f=[Tip . zero, Nod . (Tsucc * Tsucc)] | -- k a n = | | id + k * k -- T(+n) a | | -- v v -- TI^I <------------------------------------- I + TI^I * TI^I -- [g0, g1] -- -- k :: Treee Int -> (Int -> Treee Int) -- n = 0 つまり k a 0 = mapTreee (+0) a = a なので単位元 e = 0. -- -- k . f = g . Fk つまり下の四角の図式が可換である条件から g を求める. -- (\a n -> T(+n) a) . [Tip . zero, Nod . (Tsucc * Tsucc)] = [g0, g1] . (id + (\a n -> T(+n) a) * (\a n -> T(+n) a)) -- -- 第一項 -- -- k (Tip 0) = g0 x -- = -- k (Tip 0) n = g0 x n -- = -- T(+n) (Tip 0) = g0 x n -- = -- Tip n = g0 x n -- -- 第二項 -- -- k (Nod (Tsucc ls, Tsucc rs)) = g1 (k ls, k rs) -- = -- k (Nod (Tsucc ls, Tsucc rs)) n = g1 (k ls, k rs) n -- = -- T(+n) (Nod (Tsucc ls, Tsucc rs)) = g1 (k ls, k rs) n -- = -- Nod (T(+(n+1)) ls, T(+(n+1)) rs) = g1 (k ls, k rs) n -- = -- Nod (k ls (n+1), k rs (n+1)) = g1 (k ls, k rs) n -- = {- k ls = sf, k rs = tf とおく -} -- Nod (sf (n+1), tf (n+1)) = g1 (sf, tf) n -- depths' xs = foldTreee (g0, g1) xs 0 where g0 _ n = Tip n g1 (sf, tf) n = Nod (sf (n+1), tf (n+1)) -- | Ex 3.49 -- naiveShallow = foldTreee (zero, succ . uncurry min) -- k a (n, m) = min (a + n, m) -- e = (0, Inf) -- -- [Tip, Nod] -- Ta <---------------------- a + Ta * Ta -- | | -- (|f|) | | zero + (|f|) * (|f|) -- v v -- I <---------------------- I + I * I -- k a (n, m) = | f=[zero, succ . min] | -- min (a+n, m) | | id + k * k -- v v -- I^(I*I) <---------------------- I + I^(I*I) * I^(I*I) -- [g0, g1] -- -- 第一項 -- -- k 0 = g0 x -- = -- k 0 (n, m) = g0 x (n, m) -- = -- min (n+0, m) = g0 x (n, m) -- = -- min (n, m) = g0 x (n, m) -- -- 第二項 -- -- k (succ (min (ls, rs))) = g1 (k ls, k rs) -- = -- k (succ (min (ls, rs))) (n, m) = g1 (k ls, k rs) (n, m) -- = -- min (succ (min (ls, rs)) + n, m) = g1 (k ls, k rs) (n, m) -- = -- min (min (ls, rs) + (n+1), m) = g1 (k ls, k rs) (n, m) -- = -- n+1 >= m => m -- otherwise => min (ls+(n+1), min (rs+(n+1), m)) -- = -- min (ls+(n+1), min (rs+(n+1), m)) = g1 (k ls, k rs) (n, m) -- = -- k ls (n+1, min (rs+(n+1), m)) = g1 (k ls, k rs) (n, m) -- = -- k ls (n+1, k rs (n+1, m)) = g1 (k ls, k rs) (n, m) -- = -- ka (n+1, kb (n+1, m)) = g1 (ka, kb) (n, m) -- shallow xs = foldTreee (g0, g1) xs (0, 1/0) where g0 a (n, m) = n `min` m g1 (ka, kb) (n, m) | m <= n + 1 = m | otherwise = ka(n+1, kb(n+1, m)) -- | Ex 3.50 -- assocr ((x,y),z) = (x,(y,z)) assocl (x,(y,z)) = ((x,y),z) -- -- まず等式論証して loop h を実装してみる -- -- loop h . (a * id) = [id, loop h . (id * h) . assocr] . distl -- where a = [nil, snoc] -- -- forall h. loop h は一意に定まることを示す. -- -- a * id distl -- La * b <------- (1 + La * a) * b ----------> 1 * b + (La * a) * b -- | | -- | | id + assocr -- | v -- loop h | 1 * b + La * (a * b) -- | | -- | | id + (id * h) -- v v -- b <-------------------------------------- 1 * b + La * b -- [outr, loop h] -- -- loop h . (a * id) = [outr, loop h] . (id + (id * h)) . (id + assocr) . distl -- -- 左辺 -- -- loop h . (a * id) -- = {- a = [nil, snoc] -} -- loop h . ([nil, snoc] * id) -- -- 右辺 -- -- [outr, loop h] . (id + (id * h)) . (id + assocr) . distl -- = {- 余積の合成 -} -- [outr, loop h] . (id + (id * h) . assocr) . distl -- = {- 余積の融合則 -} -- [outr, loop h . (id * h) . assocr] . distl -- -- loop h (Nil, b) = outr (Nil, b) = b -- loop h (Snoc as a, b) = loop h (as, h (a, b)) -- -- とすると上の式は -- loop h . (a * id) = [outr, loop h] . (id + (id * h)) . (id + assocr) . distl -- = {- 余積の合成 -} -- loop h . (a * id) = [outr, loop h] . (id + (id * h). assocr) . distl -- = {- 余積の融合則を逆に -} -- loop h . (a * id) = [outr, id] . (id + loop h) . (id + (id * h) . assocr) . distl -- -- これを定理 3.1 に照らすと -- loop h . (a * id) = [outr, id] . (id + loop h) . (id + (id * h) . assocr) . distl -- ~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- f h Gf phi -- f = loop h -- Gh = id + h -- phi = (id + (id * h) . assocr) . distl -- Fh = id + (h * id) -- -- ??? loop :: ((a, b) -> b) -> (ListL a, b) -> b loop h = u where u (Nil, b) = b u (Snoc as a, b) = u (as, h (a, b)) loop' :: (a -> Exp b b) -> ListL a -> Exp b b loop' h = u where u Nil = id u (Snoc as a) = u as . h a -- | Ex 3.51 convcat x y = convert x ++ y prop_check arg = uncurry convcat arg == loop cons arg where cons = uncurry (:) test_3_51 = prop_check (Snoc (Snoc Nil 1) 2, [3,4,5]) -- -- uncurry convcat = loop cons -- -- convcat x y = convert x ++ y なので -- convert : [a] <- ListL a から -- convcat : [a] <- [a] <- ListL a -- 一方, -- loop : b <- (ListL a, b) <- (b <- (a, b)) -- であり cons : [a] <- (a, [a]) なので loop cons : [a] <- (ListL a, [a]) -- であり単にconvcatを非カリー化したものであることが分かる. -- ??? --
cutsea110/aop
src/Chap03.hs
bsd-3-clause
58,201
1
14
18,894
5,946
3,902
2,044
223
2
-- | This module provides fast, validated encoding and decoding functions -- between 'ByteString's and 'String's. It does not exactly match the -- output of the Codec.Binary.UTF8.String output for invalid encodings -- as the number of replacement characters is sometimes longer. module Data.ByteString.UTF8 ( B.ByteString , decode , replacement_char , uncons , splitAt , take , drop , span , break , fromString , toString , foldl , foldr , length , lines , lines' ) where import Data.Bits import Data.Word import qualified Data.ByteString as B import Prelude hiding (take,drop,splitAt,span,break,foldr,foldl,length,lines) import Codec.Binary.UTF8.String(encode) -- | Converts a Haskell string into a UTF8 encoded bytestring. fromString :: String -> B.ByteString fromString xs = B.pack (encode xs) -- | Convert a UTF8 encoded bytestring into a Haskell string. -- Invalid characters are replaced with '\xFFFD'. toString :: B.ByteString -> String toString bs = foldr (:) [] bs -- | This character is used to mark errors in a UTF8 encoded string. replacement_char :: Char replacement_char = '\xfffd' -- | Try to extract a character from a byte string. -- Returns 'Nothing' if there are no more bytes in the byte string. -- Otherwise, it returns a decoded character and the number of -- bytes used in its representation. -- Errors are replaced by character '\0xFFFD'. -- XXX: Should we combine sequences of errors into a single replacement -- character? decode :: B.ByteString -> Maybe (Char,Int) decode bs = do (c,cs) <- B.uncons bs return (choose (fromEnum c) cs) where choose :: Int -> B.ByteString -> (Char, Int) choose c cs | c < 0x80 = (toEnum $ fromEnum c, 1) | c < 0xc0 = (replacement_char, 1) | c < 0xe0 = bytes2 (mask c 0x1f) cs | c < 0xf0 = bytes3 (mask c 0x0f) cs | c < 0xf8 = bytes4 (mask c 0x07) cs | otherwise = (replacement_char, 1) mask :: Int -> Int -> Int mask c m = fromEnum (c .&. m) combine :: Int -> Word8 -> Int combine acc r = shiftL acc 6 .|. fromEnum (r .&. 0x3f) follower :: Int -> Word8 -> Maybe Int follower acc r | r .&. 0xc0 == 0x80 = Just (combine acc r) follower _ _ = Nothing {-# INLINE get_follower #-} get_follower :: Int -> B.ByteString -> Maybe (Int, B.ByteString) get_follower acc cs = do (x,xs) <- B.uncons cs acc1 <- follower acc x return (acc1,xs) bytes2 :: Int -> B.ByteString -> (Char, Int) bytes2 c cs = case get_follower c cs of Just (d, _) | d >= 0x80 -> (toEnum d, 2) | otherwise -> (replacement_char, 1) _ -> (replacement_char, 1) bytes3 :: Int -> B.ByteString -> (Char, Int) bytes3 c cs = case get_follower c cs of Just (d1, cs1) -> case get_follower d1 cs1 of Just (d, _) | (d >= 0x800 && d < 0xd800) || (d > 0xdfff && d < 0xfffe) -> (toEnum d, 3) | otherwise -> (replacement_char, 3) _ -> (replacement_char, 2) _ -> (replacement_char, 1) bytes4 :: Int -> B.ByteString -> (Char, Int) bytes4 c cs = case get_follower c cs of Just (d1, cs1) -> case get_follower d1 cs1 of Just (d2, cs2) -> case get_follower d2 cs2 of Just (d,_) | d >= 0x10000 -> (toEnum d, 4) | otherwise -> (replacement_char, 4) _ -> (replacement_char, 3) _ -> (replacement_char, 2) _ -> (replacement_char, 1) -- | Split after a given number of characters. -- Negative values are treated as if they are 0. splitAt :: Int -> B.ByteString -> (B.ByteString,B.ByteString) splitAt x bs = loop 0 x bs where loop a n _ | n <= 0 = B.splitAt a bs loop a n bs1 = case decode bs1 of Just (_,y) -> loop (a+y) (n-1) (B.drop y bs1) Nothing -> (bs, B.empty) -- | @take n s@ returns the first @n@ characters of @s@. -- If @s@ has less then @n@ characters, then we return the whole of @s@. take :: Int -> B.ByteString -> B.ByteString take n bs = fst (splitAt n bs) -- | @drop n s@ returns the @s@ without its first @n@ characters. -- If @s@ has less then @n@ characters, then we return the an empty string. drop :: Int -> B.ByteString -> B.ByteString drop n bs = snd (splitAt n bs) -- | Split a string into two parts: the first is the longest prefix -- that contains only characters that satisfy the predicate; the second -- part is the rest of the string. -- Invalid characters are passed as '\0xFFFD' to the predicate. span :: (Char -> Bool) -> B.ByteString -> (B.ByteString, B.ByteString) span p bs = loop 0 bs where loop a cs = case decode cs of Just (c,n) | p c -> loop (a+n) (B.drop n cs) _ -> B.splitAt a bs -- | Split a string into two parts: the first is the longest prefix -- that contains only characters that do not satisfy the predicate; the second -- part is the rest of the string. -- Invalid characters are passed as '\0xFFFD' to the predicate. break :: (Char -> Bool) -> B.ByteString -> (B.ByteString, B.ByteString) break p bs = span (not . p) bs -- | Get the first character of a byte string, if any. -- Malformed characters are replaced by '\0xFFFD'. uncons :: B.ByteString -> Maybe (Char,B.ByteString) uncons bs = do (c,n) <- decode bs return (c, B.drop n bs) -- | Traverse a bytestring (right biased). foldr :: (Char -> a -> a) -> a -> B.ByteString -> a foldr cons nil cs = case uncons cs of Just (a,as) -> cons a (foldr cons nil as) Nothing -> nil -- | Traverse a bytestring (left biased). -- This fuction is strict in the acumulator. foldl :: (a -> Char -> a) -> a -> B.ByteString -> a foldl add acc cs = case uncons cs of Just (a,as) -> let v = add acc a in seq v (foldl add v as) Nothing -> acc -- | Counts the number of characters encoded in the bytestring. -- Note that this includes replacment characters. length :: B.ByteString -> Int length b = loop 0 b where loop n xs = case decode xs of Just (_,m) -> loop (n+1) (B.drop m xs) Nothing -> n -- | Split a string into a list of lines. -- Lines are termianted by '\n' or the end of the string. -- Empty line may not be terminated by the end of the string. -- See also 'lines\''. lines :: B.ByteString -> [B.ByteString] lines bs | B.null bs = [] lines bs = case B.elemIndex 10 bs of Just x -> let (xs,ys) = B.splitAt x bs in xs : lines (B.tail ys) Nothing -> [bs] -- | Split a string into a list of lines. -- Lines are termianted by '\n' or the end of the string. -- Empty line may not be terminated by the end of the string. -- This function preserves the terminators. -- See also 'lines'. lines' :: B.ByteString -> [B.ByteString] lines' bs | B.null bs = [] lines' bs = case B.elemIndex 10 bs of Just x -> let (xs,ys) = B.splitAt (x+1) bs in xs : lines' ys Nothing -> [bs]
abuiles/turbinado-blog
tmp/dependencies/utf8-string-0.3.3/Data/ByteString/UTF8.hs
bsd-3-clause
7,253
0
20
2,111
2,098
1,103
995
124
8
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -- | Main stack tool entry point. module Main where import Control.Exception import Control.Monad import Control.Monad.IO.Class import Control.Monad.Logger import Control.Monad.Reader (ask) import Data.Attoparsec.Args (withInterpreterArgs) import Data.Char (toLower) import Data.List import qualified Data.List as List import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe import Data.Monoid import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.IO as T import Data.Traversable import Network.HTTP.Client import Options.Applicative.Args import Options.Applicative.Builder.Extra import Options.Applicative.Simple import Options.Applicative.Types (readerAsk) import Path import qualified Paths_stack as Meta import Plugins import Prelude hiding (pi) import Stack.Build import Stack.Build.Types import Stack.Config import Stack.Constants import qualified Stack.Docker as Docker import Stack.Dot import Stack.Exec import Stack.Fetch import Stack.FileWatch import Stack.Init import Stack.New import qualified Stack.PackageIndex import Stack.Repl import Stack.Ide import Stack.Setup import Stack.Solver (solveExtraDeps) import Stack.Types import Stack.Types.Internal import Stack.Types.StackT import Stack.Upgrade import qualified Stack.Upload as Upload import System.Directory (canonicalizePath) import System.Environment (getArgs, getProgName) import System.Exit import System.FilePath (searchPathSeparator,dropTrailingPathSeparator) import System.IO (hIsTerminalDevice, stderr, stdin, stdout, hSetBuffering, BufferMode(..)) import System.Process.Read -- | Commandline dispatcher. main :: IO () main = withInterpreterArgs stackProgName $ \args isInterpreter -> do -- Line buffer the output by default, particularly for non-terminal runs. -- See https://github.com/commercialhaskell/stack/pull/360 hSetBuffering stdout LineBuffering hSetBuffering stdin LineBuffering hSetBuffering stderr NoBuffering when False $ do -- https://github.com/commercialhaskell/stack/issues/322 plugins <- findPlugins (T.pack stackProgName) tryRunPlugin plugins progName <- getProgName isTerminal <- hIsTerminalDevice stdout execExtraHelp args dockerHelpOptName (Docker.dockerOptsParser True) ("Only showing --" ++ Docker.dockerCmdName ++ "* options.") let versionString' = $(simpleVersion Meta.version) eGlobalRun <- try $ simpleOptions versionString' "stack - The Haskell Tool Stack" "" (extraHelpOption progName (Docker.dockerCmdName ++ "*") dockerHelpOptName <*> globalOpts isTerminal) (do addCommand "build" "Build the project(s) in this directory/configuration" (buildCmd DoNothing) (buildOpts Build) addCommand "install" "Build executables and install to a user path" installCmd (buildOpts Build) addCommand "test" "Build and test the project(s) in this directory/configuration" (\(rerun, bopts) -> buildCmd (DoTests rerun) bopts) ((,) <$> boolFlags True "rerun-tests" "running already successful tests" idm <*> (buildOpts Test)) addCommand "bench" "Build and benchmark the project(s) in this directory/configuration" (buildCmd DoBenchmarks) (buildOpts Build) addCommand "haddock" "Generate haddocks for the project(s) in this directory/configuration" (buildCmd DoNothing) (buildOpts Haddock) addCommand "new" "Create a brand new project" newCmd initOptsParser addCommand "init" "Initialize a stack project based on one or more cabal packages" initCmd initOptsParser addCommand "solver" "Use a dependency solver to try and determine missing extra-deps" solverCmd solverOptsParser addCommand "setup" "Get the appropriate ghc for your project" setupCmd setupParser addCommand "path" "Print out handy path information" pathCmd (fmap catMaybes (sequenceA (map (\(desc,name,_) -> flag Nothing (Just name) (long (T.unpack name) <> help desc)) paths))) addCommand "unpack" "Unpack one or more packages locally" unpackCmd (some $ strArgument $ metavar "PACKAGE") addCommand "update" "Update the package index" updateCmd (pure ()) addCommand "upgrade" "Upgrade to the latest stack (experimental)" upgradeCmd (switch ( long "git" <> help "Clone from Git instead of downloading from Hackage (more dangerous)" )) addCommand "upload" "Upload a package to Hackage" uploadCmd (many $ strArgument $ metavar "TARBALL/DIR") addCommand "dot" "Visualize your project's dependency graph using Graphviz dot" dotCmd dotOptsParser addCommand "exec" "Execute a command" execCmd (execOptsParser Nothing) addCommand "ghc" "Run ghc" execCmd (execOptsParser $ Just "ghc") addCommand "ghci" "Run ghci in the context of project(s)" replCmd ((,,,) <$> fmap (map T.pack) (many (strArgument (metavar "TARGET" <> help "If none specified, use all packages defined in current directory"))) <*> fmap (fromMaybe []) (optional (argsOption (long "ghc-options" <> metavar "OPTION" <> help "Additional options passed to GHCi"))) <*> fmap (fromMaybe "ghc") (optional (strOption (long "with-ghc" <> metavar "GHC" <> help "Use this command for the GHC to run"))) <*> flag False True (long "no-load" <> help "Don't load modules on start-up")) addCommand "ide" "Run ide-backend-client with the correct arguments" ideCmd ((,) <$> fmap (map T.pack) (many (strArgument (metavar "TARGET" <> help "If none specified, use all packages defined in current directory"))) <*> fmap (fromMaybe []) (optional (argsOption (long "ghc-options" <> metavar "OPTION" <> help "Additional options passed to GHCi")))) addCommand "runghc" "Run runghc" execCmd (execOptsParser $ Just "runghc") addCommand "clean" "Clean the local packages" cleanCmd (pure ()) addSubCommands Docker.dockerCmdName "Subcommands specific to Docker use" (do addCommand Docker.dockerPullCmdName "Pull latest version of Docker image from registry" dockerPullCmd (pure ()) addCommand "reset" "Reset the Docker sandbox" dockerResetCmd (flag False True (long "keep-home" <> help "Do not delete sandbox's home directory")) addCommand Docker.dockerCleanupCmdName "Clean up Docker images and containers" dockerCleanupCmd dockerCleanupOpts) ) -- commandsFromPlugins plugins pluginShouldHaveRun) https://github.com/commercialhaskell/stack/issues/322 case eGlobalRun of Left (exitCode :: ExitCode) -> do when isInterpreter $ putStrLn $ concat [ "\nIf you are trying to use " , stackProgName , " as a script interpreter, a\n'-- " , stackProgName , " [options] runghc [options]' comment is required." , "\nSee https://github.com/commercialhaskell/stack/wiki/Script-interpreter" ] throwIO exitCode Right (global,run) -> do when (globalLogLevel global == LevelDebug) $ putStrLn versionString' run global `catch` \e -> do -- This special handler stops "stack: " from being printed before the -- exception case fromException e of Just ec -> exitWith ec Nothing -> do printExceptionStderr e exitFailure where dockerHelpOptName = Docker.dockerCmdName ++ "-help" -- Try to run a plugin tryRunPlugin :: Plugins -> IO () tryRunPlugin plugins = do args <- getArgs case dropWhile (List.isPrefixOf "-") args of ((T.pack -> name):args') | isJust (lookupPlugin plugins name) -> do callPlugin plugins name args' `catch` onPluginErr exitSuccess _ -> return () -- TODO(danburton): use logger onPluginErr :: PluginException -> IO () onPluginErr (PluginNotFound _ name) = do T.hPutStr stderr $ "Stack plugin not found: " <> name exitFailure onPluginErr (PluginExitFailure _ i) = do exitWith (ExitFailure i) -- TODO(danburton): improve this, although it should never happen pluginShouldHaveRun :: Plugin -> GlobalOpts -> IO () pluginShouldHaveRun _plugin _globalOpts = do fail "Plugin should have run" -- | Print out useful path information in a human-readable format (and -- support others later). pathCmd :: [Text] -> GlobalOpts -> IO () pathCmd keys go = withBuildConfig go ExecStrategy (do env <- ask let cfg = envConfig env bc = envConfigBuildConfig cfg menv <- getMinimalEnvOverride snap <- packageDatabaseDeps local <- packageDatabaseLocal snaproot <- installationRootDeps localroot <- installationRootLocal distDir <- distRelativeDir forM_ (filter (\(_,key,_) -> null keys || elem key keys) paths) (\(_,key,path) -> $logInfo ((if length keys == 1 then "" else key <> ": ") <> path (PathInfo bc menv snap local snaproot localroot distDir)))) -- | Passed to all the path printers as a source of info. data PathInfo = PathInfo {piBuildConfig :: BuildConfig ,piEnvOverride :: EnvOverride ,piSnapDb :: Path Abs Dir ,piLocalDb :: Path Abs Dir ,piSnapRoot :: Path Abs Dir ,piLocalRoot :: Path Abs Dir ,piDistDir :: Path Rel Dir } -- | The paths of interest to a user. The first tuple string is used -- for a description that the optparse flag uses, and the second -- string as a machine-readable key and also for @--foo@ flags. The user -- can choose a specific path to list like @--global-stack-root@. But -- really it's mainly for the documentation aspect. -- -- When printing output we generate @PathInfo@ and pass it to the -- function to generate an appropriate string. Trailing slashes are -- removed, see #506 paths :: [(String, Text, PathInfo -> Text)] paths = [ ( "Global stack root directory" , "global-stack-root" , \pi -> T.pack (toFilePathNoTrailing (configStackRoot (bcConfig (piBuildConfig pi))))) , ( "Project root (derived from stack.yaml file)" , "project-root" , \pi -> T.pack (toFilePathNoTrailing (bcRoot (piBuildConfig pi)))) , ( "Configuration location (where the stack.yaml file is)" , "config-location" , \pi -> T.pack (toFilePathNoTrailing (bcStackYaml (piBuildConfig pi)))) , ( "PATH environment variable" , "bin-path" , \pi -> T.pack (intercalate ":" (eoPath (piEnvOverride pi)))) , ( "Installed GHCs (unpacked and archives)" , "ghc-paths" , \pi -> T.pack (toFilePathNoTrailing (configLocalPrograms (bcConfig (piBuildConfig pi))))) , ( "Local bin path where stack installs executables" , "local-bin-path" , \pi -> T.pack (toFilePathNoTrailing (configLocalBin (bcConfig (piBuildConfig pi))))) , ( "Extra include directories" , "extra-include-dirs" , \pi -> T.intercalate ", " (Set.elems (configExtraIncludeDirs (bcConfig (piBuildConfig pi))))) , ( "Extra library directories" , "extra-library-dirs" , \pi -> T.intercalate ", " (Set.elems (configExtraLibDirs (bcConfig (piBuildConfig pi))))) , ( "Snapshot package database" , "snapshot-pkg-db" , \pi -> T.pack (toFilePathNoTrailing (piSnapDb pi))) , ( "Local project package database" , "local-pkg-db" , \pi -> T.pack (toFilePathNoTrailing (piLocalDb pi))) , ( "Snapshot installation root" , "snapshot-install-root" , \pi -> T.pack (toFilePathNoTrailing (piSnapRoot pi))) , ( "Local project installation root" , "local-install-root" , \pi -> T.pack (toFilePathNoTrailing (piLocalRoot pi))) , ( "Dist work directory" , "dist-dir" , \pi -> T.pack (toFilePathNoTrailing (piDistDir pi)))] where toFilePathNoTrailing = dropTrailingPathSeparator . toFilePath data SetupCmdOpts = SetupCmdOpts { scoGhcVersion :: !(Maybe Version) , scoForceReinstall :: !Bool } setupParser :: Parser SetupCmdOpts setupParser = SetupCmdOpts <$> (optional $ argument readVersion (metavar "VERSION")) <*> boolFlags False "reinstall" "Reinstall GHC, even if available (implies no-system-ghc)" idm where readVersion = do s <- readerAsk case parseVersionFromString s of Nothing -> readerError $ "Invalid version: " ++ s Just x -> return x setupCmd :: SetupCmdOpts -> GlobalOpts -> IO () setupCmd SetupCmdOpts{..} go@GlobalOpts{..} = do (manager,lc) <- loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.reexecWithOptionalContainer (lcProjectRoot lc) (runStackLoggingT manager globalLogLevel globalTerminal $ do (ghc, mstack) <- case scoGhcVersion of Just v -> return (v, Nothing) Nothing -> do bc <- lcLoadBuildConfig lc globalResolver ExecStrategy return (bcGhcVersionExpected bc, Just $ bcStackYaml bc) mpaths <- runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ ensureGHC SetupOpts { soptsInstallIfMissing = True , soptsUseSystem = (configSystemGHC $ lcConfig lc) && not scoForceReinstall , soptsExpected = ghc , soptsStackYaml = mstack , soptsForceReinstall = scoForceReinstall , soptsSanityCheck = True , soptsSkipGhcCheck = False , soptsSkipMsys = configSkipMsys $ lcConfig lc } case mpaths of Nothing -> $logInfo "GHC on PATH would be used" Just ps -> $logInfo $ "Would add the following to PATH: " <> T.pack (intercalate [searchPathSeparator] ps) ) withConfig :: GlobalOpts -> StackT Config IO () -> IO () withConfig go@GlobalOpts{..} inner = do (manager, lc) <- loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.reexecWithOptionalContainer (lcProjectRoot lc) $ runStackT manager globalLogLevel (lcConfig lc) globalTerminal inner withBuildConfig :: GlobalOpts -> NoBuildConfigStrategy -> StackT EnvConfig IO () -> IO () withBuildConfig go@GlobalOpts{..} strat inner = do (manager, lc) <- loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.reexecWithOptionalContainer (lcProjectRoot lc) $ do bconfig <- runStackLoggingT manager globalLogLevel globalTerminal $ lcLoadBuildConfig lc globalResolver strat envConfig <- runStackT manager globalLogLevel bconfig globalTerminal setupEnv runStackT manager globalLogLevel envConfig globalTerminal inner cleanCmd :: () -> GlobalOpts -> IO () cleanCmd () go = withBuildConfig go ThrowException clean -- | Parser for package names readPackageName :: ReadM PackageName readPackageName = do s <- readerAsk case parsePackageNameFromString s of Nothing -> readerError $ "Invalid package name: " ++ s Just x -> return x -- | Parser for package:[-]flag readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool)) readFlag = do s <- readerAsk case break (== ':') s of (pn, ':':mflag) -> do pn' <- case parsePackageNameFromString pn of Nothing | pn == "*" -> return Nothing | otherwise -> readerError $ "Invalid package name: " ++ pn Just x -> return $ Just x let (b, flagS) = case mflag of '-':x -> (False, x) _ -> (True, mflag) flagN <- case parseFlagNameFromString flagS of Nothing -> readerError $ "Invalid flag name: " ++ flagS Just x -> return x return $ Map.singleton pn' $ Map.singleton flagN b _ -> readerError "Must have a colon" -- | Helper for build and install commands buildCmdHelper :: NoBuildConfigStrategy -> FinalAction -> BuildOpts -> GlobalOpts -> IO () buildCmdHelper strat finalAction opts go | boptsFileWatch opts = fileWatch inner | otherwise = inner $ const $ return () where inner setLocalFiles = withBuildConfig go strat $ Stack.Build.build setLocalFiles opts { boptsFinalAction = finalAction } -- | Build the project. buildCmd :: FinalAction -> BuildOpts -> GlobalOpts -> IO () buildCmd = buildCmdHelper ThrowException -- | Install installCmd :: BuildOpts -> GlobalOpts -> IO () installCmd opts = buildCmdHelper ExecStrategy DoNothing opts { boptsInstallExes = True } -- | Unpack packages to the filesystem unpackCmd :: [String] -> GlobalOpts -> IO () unpackCmd names go = withConfig go $ do menv <- getMinimalEnvOverride Stack.Fetch.unpackPackages menv "." names -- | Update the package index updateCmd :: () -> GlobalOpts -> IO () updateCmd () go = withConfig go $ getMinimalEnvOverride >>= Stack.PackageIndex.updateAllIndices upgradeCmd :: Bool -> GlobalOpts -> IO () upgradeCmd fromGit go = withConfig go $ upgrade fromGit (globalResolver go) -- | Upload to Hackage uploadCmd :: [String] -> GlobalOpts -> IO () uploadCmd args go = do (manager,lc) <- loadConfigWithOpts go let config = lcConfig lc if null args then error "To upload the current project, please run 'stack upload .'" else liftIO $ do uploader <- Upload.mkUploader config $ Upload.setGetManager (return manager) Upload.defaultUploadSettings mapM_ (Upload.upload uploader) args data ExecOpts = ExecOpts { eoCmd :: !String , eoArgs :: ![String] , eoExtra :: !ExecOptsExtra } data ExecOptsExtra = ExecOptsPlain | ExecOptsEmbellished { eoEnvSettings :: !EnvSettings , eoPackages :: ![String] } execOptsParser :: Maybe String -- ^ command -> Parser ExecOpts execOptsParser mcmd = ExecOpts <$> maybe eoCmdParser pure mcmd <*> eoArgsParser <*> (eoPlainParser <|> ExecOptsEmbellished <$> eoEnvSettingsParser <*> eoPackagesParser) where eoCmdParser :: Parser String eoCmdParser = strArgument (metavar "CMD") eoArgsParser :: Parser [String] eoArgsParser = many (strArgument (metavar "-- ARGS (e.g. stack ghc -- X.hs -o x)")) eoEnvSettingsParser :: Parser EnvSettings eoEnvSettingsParser = EnvSettings <$> pure True <*> boolFlags True "ghc-package-path" "setting the GHC_PACKAGE_PATH variable for the subprocess" idm <*> boolFlags True "stack-exe" "setting the STACK_EXE environment variable to the path for the stack executable" idm eoPackagesParser :: Parser [String] eoPackagesParser = many (strOption (long "package" <> help "Additional packages that must be installed")) eoPlainParser :: Parser ExecOptsExtra eoPlainParser = flag' ExecOptsPlain (long "plain" <> help "Use an unmodified environment (only useful with Docker)") -- | Execute a command. execCmd :: ExecOpts -> GlobalOpts -> IO () execCmd ExecOpts {..} go@GlobalOpts{..} = case eoExtra of ExecOptsPlain -> do (manager,lc) <- liftIO $ loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.execWithOptionalContainer (lcProjectRoot lc) (return (eoCmd, eoArgs, id)) $ runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ exec plainEnvSettings eoCmd eoArgs ExecOptsEmbellished {..} -> withBuildConfig go ExecStrategy $ do let targets = concatMap words eoPackages unless (null targets) $ Stack.Build.build (const $ return ()) defaultBuildOpts { boptsTargets = map T.pack targets } exec eoEnvSettings eoCmd eoArgs -- | Run the REPL in the context of a project. replCmd :: ([Text], [String], FilePath, Bool) -> GlobalOpts -> IO () replCmd (targets,args,path,noload) go@GlobalOpts{..} = withBuildConfig go ExecStrategy $ do repl targets args path noload -- | Run ide-backend in the context of a project. ideCmd :: ([Text], [String]) -> GlobalOpts -> IO () ideCmd (targets,args) go@GlobalOpts{..} = withBuildConfig go ExecStrategy $ do ide targets args -- | Pull the current Docker image. dockerPullCmd :: () -> GlobalOpts -> IO () dockerPullCmd _ go@GlobalOpts{..} = do (manager,lc) <- liftIO $ loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.preventInContainer Docker.pull -- | Reset the Docker sandbox. dockerResetCmd :: Bool -> GlobalOpts -> IO () dockerResetCmd keepHome go@GlobalOpts{..} = do (manager,lc) <- liftIO (loadConfigWithOpts go) runStackLoggingT manager globalLogLevel globalTerminal$ Docker.preventInContainer $ Docker.reset (lcProjectRoot lc) keepHome -- | Cleanup Docker images and containers. dockerCleanupCmd :: Docker.CleanupOpts -> GlobalOpts -> IO () dockerCleanupCmd cleanupOpts go@GlobalOpts{..} = do (manager,lc) <- liftIO $ loadConfigWithOpts go runStackT manager globalLogLevel (lcConfig lc) globalTerminal $ Docker.preventInContainer $ Docker.cleanup cleanupOpts -- | Command sum type for conditional arguments. data Command = Build | Test | Haddock deriving (Eq) -- | Parser for build arguments. buildOpts :: Command -> Parser BuildOpts buildOpts cmd = fmap process $ BuildOpts <$> target <*> libProfiling <*> exeProfiling <*> optimize <*> haddock <*> haddockDeps <*> finalAction <*> dryRun <*> ghcOpts <*> flags <*> installExes <*> preFetch <*> testArgs <*> onlySnapshot <*> coverage <*> fileWatch' <*> keepGoing <*> noTests where process bopts = if boptsCoverage bopts then bopts { boptsExeProfile = True , boptsLibProfile = True , boptsGhcOptions = "-fhpc" : boptsGhcOptions bopts} else bopts optimize = maybeBoolFlags "optimizations" "optimizations for TARGETs and all its dependencies" idm target = fmap (map T.pack) (many (strArgument (metavar "TARGET" <> help "If none specified, use all packages defined in current directory"))) libProfiling = boolFlags False "library-profiling" "library profiling for TARGETs and all its dependencies" idm exeProfiling = boolFlags False "executable-profiling" "library profiling for TARGETs and all its dependencies" idm haddock = boolFlags (cmd == Haddock) "haddock" "building Haddocks" idm haddockDeps = if cmd == Haddock then maybeBoolFlags "haddock-deps" "building Haddocks for dependencies" idm else pure Nothing finalAction = pure DoNothing installExes = pure False dryRun = flag False True (long "dry-run" <> help "Don't build anything, just prepare to") ghcOpts = (++) <$> flag [] ["-Wall", "-Werror"] ( long "pedantic" <> help "Turn on -Wall and -Werror (note: option name may change in the future" ) <*> many (fmap T.pack (strOption (long "ghc-options" <> metavar "OPTION" <> help "Additional options passed to GHC"))) flags = fmap (Map.unionsWith Map.union) $ many (option readFlag ( long "flag" <> metavar "PACKAGE:[-]FLAG" <> help "Override flags set in stack.yaml (applies to local packages and extra-deps)" )) preFetch = flag False True (long "prefetch" <> help "Fetch packages necessary for the build immediately, useful with --dry-run") testArgs = fmap (fromMaybe []) (if cmd == Test then optional (argsOption (long "test-arguments" <> metavar "TEST_ARGS" <> help "Arguments passed in to the test suite program")) else pure Nothing) onlySnapshot = flag False True (long "only-snapshot" <> help "Only build packages for the snapshot database, not the local database") coverage = if cmd == Test then flag False True (long "coverage" <> help "Generate a code coverage report") else pure False noTests = if cmd == Test then flag False True (long "no-run-tests" <> help "Disable running of tests. (Tests will still be built.)") else pure False fileWatch' = flag False True (long "file-watch" <> help "Watch for changes in local files and automatically rebuild") keepGoing = maybeBoolFlags "keep-going" "continue running after a step fails (default: false for build, true for test/bench)" idm -- | Parser for docker cleanup arguments. dockerCleanupOpts :: Parser Docker.CleanupOpts dockerCleanupOpts = Docker.CleanupOpts <$> (flag' Docker.CleanupInteractive (short 'i' <> long "interactive" <> help "Show cleanup plan in editor and allow changes (default)") <|> flag' Docker.CleanupImmediate (short 'y' <> long "immediate" <> help "Immediately execute cleanup plan") <|> flag' Docker.CleanupDryRun (short 'n' <> long "dry-run" <> help "Display cleanup plan but do not execute") <|> pure Docker.CleanupInteractive) <*> opt (Just 14) "known-images" "LAST-USED" <*> opt Nothing "unknown-images" "CREATED" <*> opt (Just 0) "dangling-images" "CREATED" <*> opt Nothing "stopped-containers" "CREATED" <*> opt Nothing "running-containers" "CREATED" where opt def' name mv = fmap Just (option auto (long name <> metavar (mv ++ "-DAYS-AGO") <> help ("Remove " ++ toDescr name ++ " " ++ map toLower (toDescr mv) ++ " N days ago" ++ case def' of Just n -> " (default " ++ show n ++ ")" Nothing -> ""))) <|> flag' Nothing (long ("no-" ++ name) <> help ("Do not remove " ++ toDescr name ++ case def' of Just _ -> "" Nothing -> " (default)")) <|> pure def' toDescr = map (\c -> if c == '-' then ' ' else c) -- | Parser for global command-line options. globalOpts :: Bool -> Parser GlobalOpts globalOpts defaultTerminal = GlobalOpts <$> logLevelOpt <*> configOptsParser False <*> optional resolverParser <*> flag defaultTerminal False (long "no-terminal" <> help "Override terminal detection in the case of running in a false terminal") <*> (optional (strOption (long "stack-yaml" <> metavar "STACK-YAML" <> help "Override project stack.yaml file (overrides any STACK_YAML environment variable)"))) -- | Parse for a logging level. logLevelOpt :: Parser LogLevel logLevelOpt = fmap parse (strOption (long "verbosity" <> metavar "VERBOSITY" <> help "Verbosity: silent, error, warn, info, debug")) <|> flag defaultLogLevel verboseLevel (short 'v' <> long "verbose" <> help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\"")) where verboseLevel = LevelDebug showLevel l = case l of LevelDebug -> "debug" LevelInfo -> "info" LevelWarn -> "warn" LevelError -> "error" LevelOther x -> T.unpack x parse s = case s of "debug" -> LevelDebug "info" -> LevelInfo "warn" -> LevelWarn "error" -> LevelError _ -> LevelOther (T.pack s) resolverParser :: Parser Resolver resolverParser = option readResolver (long "resolver" <> metavar "RESOLVER" <> help "Override resolver in project file") -- | Default logging level should be something useful but not crazy. defaultLogLevel :: LogLevel defaultLogLevel = LevelInfo -- | Parsed global command-line options. data GlobalOpts = GlobalOpts { globalLogLevel :: LogLevel -- ^ Log level , globalConfigMonoid :: ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , globalResolver :: Maybe Resolver -- ^ Resolver override , globalTerminal :: Bool -- ^ We're in a terminal? , globalStackYaml :: Maybe FilePath -- ^ Override project stack.yaml } deriving (Show) -- | Load the configuration with a manager. Convenience function used -- throughout this module. loadConfigWithOpts :: GlobalOpts -> IO (Manager,LoadConfig (StackLoggingT IO)) loadConfigWithOpts GlobalOpts{..} = do manager <- newTLSManager mstackYaml <- case globalStackYaml of Nothing -> return Nothing Just fp -> do path <- canonicalizePath fp >>= parseAbsFile return $ Just path lc <- runStackLoggingT manager globalLogLevel globalTerminal (loadConfig globalConfigMonoid mstackYaml) return (manager,lc) -- | Project initialization initCmd :: InitOpts -> GlobalOpts -> IO () initCmd initOpts go = withConfig go $ initProject initOpts -- | Project creation newCmd :: InitOpts -> GlobalOpts -> IO () newCmd initOpts go@GlobalOpts{..} = withConfig go $ do newProject initProject initOpts -- | Fix up extra-deps for a project solverCmd :: Bool -- ^ modify stack.yaml automatically? -> GlobalOpts -> IO () solverCmd fixStackYaml go = withBuildConfig go ThrowException (solveExtraDeps fixStackYaml) -- | Parser for @solverCmd@ solverOptsParser :: Parser Bool solverOptsParser = boolFlags False "modify-stack-yaml" "Automatically modify stack.yaml with the solver's recommendations" idm -- | Visualize dependencies dotCmd :: DotOpts -> GlobalOpts -> IO () dotCmd dotOpts go = withBuildConfig go ThrowException (dot dotOpts)
cocreature/stack
src/main/Main.hs
bsd-3-clause
36,685
0
30
13,998
7,161
3,624
3,537
814
9
{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Ivory.Tower.AST.Monitor where #if MIN_VERSION_mainland_pretty(0,6,0) import Text.PrettyPrint.Mainland.Class #endif import Text.PrettyPrint.Mainland import Ivory.Tower.Types.Unique import Ivory.Tower.AST.Handler data Monitor = Monitor { monitor_name :: Unique , monitor_handlers :: [Handler] , monitor_external :: MonitorExternal } deriving (Eq, Show, Ord) monitorName :: Monitor -> String monitorName = showUnique . monitor_name data MonitorExternal = MonitorDefined | MonitorExternal deriving (Show, Read, Eq, Ord) instance Pretty Monitor where ppr m@(Monitor{..}) = hang 2 $ text (monitorName m) <+> parens (ppr monitor_external) <> colon </> hang 2 ("Handlers:" </> stack (map ppr monitor_handlers)) instance Pretty MonitorExternal where ppr MonitorDefined = "defined" ppr MonitorExternal = "external"
GaloisInc/tower
tower/src/Ivory/Tower/AST/Monitor.hs
bsd-3-clause
957
0
12
164
244
138
106
25
1
-- (c) The University of Glasgow, 1992-2006 {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleInstances #-} {-# OPTIONS_GHC -fno-omit-interface-pragmas #-} -- Workaround for Trac #5252 crashes the bootstrap compiler without -O -- When the earliest compiler we want to boostrap with is -- GHC 7.2, we can make RealSrcLoc properly abstract -- | This module contains types that relate to the positions of things -- in source files, and allow tagging of those things with locations module SrcLoc ( -- * SrcLoc RealSrcLoc, -- Abstract SrcLoc(..), -- ** Constructing SrcLoc mkSrcLoc, mkRealSrcLoc, mkGeneralSrcLoc, noSrcLoc, -- "I'm sorry, I haven't a clue" generatedSrcLoc, -- Code generated within the compiler interactiveSrcLoc, -- Code from an interactive session advanceSrcLoc, -- ** Unsafely deconstructing SrcLoc -- These are dubious exports, because they crash on some inputs srcLocFile, -- return the file name part srcLocLine, -- return the line part srcLocCol, -- return the column part -- * SrcSpan RealSrcSpan, -- Abstract SrcSpan(..), -- ** Constructing SrcSpan mkGeneralSrcSpan, mkSrcSpan, mkRealSrcSpan, noSrcSpan, wiredInSrcSpan, -- Something wired into the compiler srcLocSpan, realSrcLocSpan, combineSrcSpans, -- ** Deconstructing SrcSpan srcSpanStart, srcSpanEnd, realSrcSpanStart, realSrcSpanEnd, srcSpanFileName_maybe, showUserSpan, pprUserRealSpan, -- ** Unsafely deconstructing SrcSpan -- These are dubious exports, because they crash on some inputs srcSpanFile, srcSpanStartLine, srcSpanEndLine, srcSpanStartCol, srcSpanEndCol, -- ** Predicates on SrcSpan isGoodSrcSpan, isOneLineSpan, containsSpan, -- * Located Located, RealLocated, GenLocated(..), -- ** Constructing Located noLoc, mkGeneralLocated, -- ** Deconstructing Located getLoc, unLoc, -- ** Combining and comparing Located values eqLocated, cmpLocated, combineLocs, addCLoc, leftmost_smallest, leftmost_largest, rightmost, spans, isSubspanOf, sortLocated ) where import Util import Outputable import FastString #if __GLASGOW_HASKELL__ < 709 import Data.Foldable ( Foldable ) import Data.Traversable ( Traversable ) #endif import Data.Bits import Data.Data import Data.List import Data.Ord {- ************************************************************************ * * \subsection[SrcLoc-SrcLocations]{Source-location information} * * ************************************************************************ We keep information about the {\em definition} point for each entity; this is the obvious stuff: -} -- | Represents a single point within a file data RealSrcLoc = SrcLoc FastString -- A precise location (file name) {-# UNPACK #-} !Int -- line number, begins at 1 {-# UNPACK #-} !Int -- column number, begins at 1 data SrcLoc = RealSrcLoc {-# UNPACK #-}!RealSrcLoc | UnhelpfulLoc FastString -- Just a general indication deriving Show {- ************************************************************************ * * \subsection[SrcLoc-access-fns]{Access functions} * * ************************************************************************ -} mkSrcLoc :: FastString -> Int -> Int -> SrcLoc mkSrcLoc x line col = RealSrcLoc (mkRealSrcLoc x line col) mkRealSrcLoc :: FastString -> Int -> Int -> RealSrcLoc mkRealSrcLoc x line col = SrcLoc x line col -- | Built-in "bad" 'SrcLoc' values for particular locations noSrcLoc, generatedSrcLoc, interactiveSrcLoc :: SrcLoc noSrcLoc = UnhelpfulLoc (fsLit "<no location info>") generatedSrcLoc = UnhelpfulLoc (fsLit "<compiler-generated code>") interactiveSrcLoc = UnhelpfulLoc (fsLit "<interactive session>") -- | Creates a "bad" 'SrcLoc' that has no detailed information about its location mkGeneralSrcLoc :: FastString -> SrcLoc mkGeneralSrcLoc = UnhelpfulLoc -- | Gives the filename of the 'RealSrcLoc' srcLocFile :: RealSrcLoc -> FastString srcLocFile (SrcLoc fname _ _) = fname -- | Raises an error when used on a "bad" 'SrcLoc' srcLocLine :: RealSrcLoc -> Int srcLocLine (SrcLoc _ l _) = l -- | Raises an error when used on a "bad" 'SrcLoc' srcLocCol :: RealSrcLoc -> Int srcLocCol (SrcLoc _ _ c) = c -- | Move the 'SrcLoc' down by one line if the character is a newline, -- to the next 8-char tabstop if it is a tab, and across by one -- character in any other case advanceSrcLoc :: RealSrcLoc -> Char -> RealSrcLoc advanceSrcLoc (SrcLoc f l _) '\n' = SrcLoc f (l + 1) 1 advanceSrcLoc (SrcLoc f l c) '\t' = SrcLoc f l (((((c - 1) `shiftR` 3) + 1) `shiftL` 3) + 1) advanceSrcLoc (SrcLoc f l c) _ = SrcLoc f l (c + 1) {- ************************************************************************ * * \subsection[SrcLoc-instances]{Instance declarations for various names} * * ************************************************************************ -} -- SrcLoc is an instance of Ord so that we can sort error messages easily instance Eq SrcLoc where loc1 == loc2 = case loc1 `cmpSrcLoc` loc2 of EQ -> True _other -> False instance Eq RealSrcLoc where loc1 == loc2 = case loc1 `cmpRealSrcLoc` loc2 of EQ -> True _other -> False instance Ord SrcLoc where compare = cmpSrcLoc instance Ord RealSrcLoc where compare = cmpRealSrcLoc sortLocated :: [Located a] -> [Located a] sortLocated things = sortBy (comparing getLoc) things cmpSrcLoc :: SrcLoc -> SrcLoc -> Ordering cmpSrcLoc (UnhelpfulLoc s1) (UnhelpfulLoc s2) = s1 `compare` s2 cmpSrcLoc (UnhelpfulLoc _) (RealSrcLoc _) = GT cmpSrcLoc (RealSrcLoc _) (UnhelpfulLoc _) = LT cmpSrcLoc (RealSrcLoc l1) (RealSrcLoc l2) = (l1 `compare` l2) cmpRealSrcLoc :: RealSrcLoc -> RealSrcLoc -> Ordering cmpRealSrcLoc (SrcLoc s1 l1 c1) (SrcLoc s2 l2 c2) = (s1 `compare` s2) `thenCmp` (l1 `compare` l2) `thenCmp` (c1 `compare` c2) instance Outputable RealSrcLoc where ppr (SrcLoc src_path src_line src_col) = hcat [ pprFastFilePath src_path <> colon , int src_line <> colon , int src_col ] -- I don't know why there is this style-based difference -- if userStyle sty || debugStyle sty then -- hcat [ pprFastFilePath src_path, char ':', -- int src_line, -- char ':', int src_col -- ] -- else -- hcat [text "{-# LINE ", int src_line, space, -- char '\"', pprFastFilePath src_path, text " #-}"] instance Outputable SrcLoc where ppr (RealSrcLoc l) = ppr l ppr (UnhelpfulLoc s) = ftext s instance Data RealSrcSpan where -- don't traverse? toConstr _ = abstractConstr "RealSrcSpan" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "RealSrcSpan" instance Data SrcSpan where -- don't traverse? toConstr _ = abstractConstr "SrcSpan" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "SrcSpan" {- ************************************************************************ * * \subsection[SrcSpan]{Source Spans} * * ************************************************************************ -} {- | A SrcSpan delimits a portion of a text file. It could be represented by a pair of (line,column) coordinates, but in fact we optimise slightly by using more compact representations for single-line and zero-length spans, both of which are quite common. The end position is defined to be the column /after/ the end of the span. That is, a span of (1,1)-(1,2) is one character long, and a span of (1,1)-(1,1) is zero characters long. -} data RealSrcSpan = SrcSpanOneLine -- a common case: a single line { srcSpanFile :: !FastString, srcSpanLine :: {-# UNPACK #-} !Int, srcSpanSCol :: {-# UNPACK #-} !Int, srcSpanECol :: {-# UNPACK #-} !Int } | SrcSpanMultiLine { srcSpanFile :: !FastString, srcSpanSLine :: {-# UNPACK #-} !Int, srcSpanSCol :: {-# UNPACK #-} !Int, srcSpanELine :: {-# UNPACK #-} !Int, srcSpanECol :: {-# UNPACK #-} !Int } | SrcSpanPoint { srcSpanFile :: !FastString, srcSpanLine :: {-# UNPACK #-} !Int, srcSpanCol :: {-# UNPACK #-} !Int } deriving (Eq, Typeable) data SrcSpan = RealSrcSpan !RealSrcSpan | UnhelpfulSpan !FastString -- Just a general indication -- also used to indicate an empty span deriving (Eq, Ord, Typeable, Show) -- Show is used by Lexer.x, because we -- derive Show for Token -- | Built-in "bad" 'SrcSpan's for common sources of location uncertainty noSrcSpan, wiredInSrcSpan :: SrcSpan noSrcSpan = UnhelpfulSpan (fsLit "<no location info>") wiredInSrcSpan = UnhelpfulSpan (fsLit "<wired into compiler>") -- | Create a "bad" 'SrcSpan' that has not location information mkGeneralSrcSpan :: FastString -> SrcSpan mkGeneralSrcSpan = UnhelpfulSpan -- | Create a 'SrcSpan' corresponding to a single point srcLocSpan :: SrcLoc -> SrcSpan srcLocSpan (UnhelpfulLoc str) = UnhelpfulSpan str srcLocSpan (RealSrcLoc l) = RealSrcSpan (realSrcLocSpan l) realSrcLocSpan :: RealSrcLoc -> RealSrcSpan realSrcLocSpan (SrcLoc file line col) = SrcSpanPoint file line col -- | Create a 'SrcSpan' between two points in a file mkRealSrcSpan :: RealSrcLoc -> RealSrcLoc -> RealSrcSpan mkRealSrcSpan loc1 loc2 | line1 == line2 = if col1 == col2 then SrcSpanPoint file line1 col1 else SrcSpanOneLine file line1 col1 col2 | otherwise = SrcSpanMultiLine file line1 col1 line2 col2 where line1 = srcLocLine loc1 line2 = srcLocLine loc2 col1 = srcLocCol loc1 col2 = srcLocCol loc2 file = srcLocFile loc1 -- | Create a 'SrcSpan' between two points in a file mkSrcSpan :: SrcLoc -> SrcLoc -> SrcSpan mkSrcSpan (UnhelpfulLoc str) _ = UnhelpfulSpan str mkSrcSpan _ (UnhelpfulLoc str) = UnhelpfulSpan str mkSrcSpan (RealSrcLoc loc1) (RealSrcLoc loc2) = RealSrcSpan (mkRealSrcSpan loc1 loc2) -- | Combines two 'SrcSpan' into one that spans at least all the characters -- within both spans. Assumes the "file" part is the same in both inputs combineSrcSpans :: SrcSpan -> SrcSpan -> SrcSpan combineSrcSpans (UnhelpfulSpan _) r = r -- this seems more useful combineSrcSpans l (UnhelpfulSpan _) = l combineSrcSpans (RealSrcSpan span1) (RealSrcSpan span2) = RealSrcSpan (combineRealSrcSpans span1 span2) -- | Combines two 'SrcSpan' into one that spans at least all the characters -- within both spans. Assumes the "file" part is the same in both inputs combineRealSrcSpans :: RealSrcSpan -> RealSrcSpan -> RealSrcSpan combineRealSrcSpans span1 span2 = if line_start == line_end then if col_start == col_end then SrcSpanPoint file line_start col_start else SrcSpanOneLine file line_start col_start col_end else SrcSpanMultiLine file line_start col_start line_end col_end where (line_start, col_start) = min (srcSpanStartLine span1, srcSpanStartCol span1) (srcSpanStartLine span2, srcSpanStartCol span2) (line_end, col_end) = max (srcSpanEndLine span1, srcSpanEndCol span1) (srcSpanEndLine span2, srcSpanEndCol span2) file = srcSpanFile span1 {- ************************************************************************ * * \subsection[SrcSpan-predicates]{Predicates} * * ************************************************************************ -} -- | Test if a 'SrcSpan' is "good", i.e. has precise location information isGoodSrcSpan :: SrcSpan -> Bool isGoodSrcSpan (RealSrcSpan _) = True isGoodSrcSpan (UnhelpfulSpan _) = False isOneLineSpan :: SrcSpan -> Bool -- ^ True if the span is known to straddle only one line. -- For "bad" 'SrcSpan', it returns False isOneLineSpan (RealSrcSpan s) = srcSpanStartLine s == srcSpanEndLine s isOneLineSpan (UnhelpfulSpan _) = False -- | Tests whether the first span "contains" the other span, meaning -- that it covers at least as much source code. True where spans are equal. containsSpan :: RealSrcSpan -> RealSrcSpan -> Bool containsSpan s1 s2 = srcSpanFile s1 == srcSpanFile s2 && (srcSpanStartLine s1, srcSpanStartCol s1) <= (srcSpanStartLine s2, srcSpanStartCol s2) && (srcSpanEndLine s1, srcSpanEndCol s1) >= (srcSpanEndLine s2, srcSpanEndCol s2) {- %************************************************************************ %* * \subsection[SrcSpan-unsafe-access-fns]{Unsafe access functions} * * ************************************************************************ -} srcSpanStartLine :: RealSrcSpan -> Int srcSpanEndLine :: RealSrcSpan -> Int srcSpanStartCol :: RealSrcSpan -> Int srcSpanEndCol :: RealSrcSpan -> Int srcSpanStartLine SrcSpanOneLine{ srcSpanLine=l } = l srcSpanStartLine SrcSpanMultiLine{ srcSpanSLine=l } = l srcSpanStartLine SrcSpanPoint{ srcSpanLine=l } = l srcSpanEndLine SrcSpanOneLine{ srcSpanLine=l } = l srcSpanEndLine SrcSpanMultiLine{ srcSpanELine=l } = l srcSpanEndLine SrcSpanPoint{ srcSpanLine=l } = l srcSpanStartCol SrcSpanOneLine{ srcSpanSCol=l } = l srcSpanStartCol SrcSpanMultiLine{ srcSpanSCol=l } = l srcSpanStartCol SrcSpanPoint{ srcSpanCol=l } = l srcSpanEndCol SrcSpanOneLine{ srcSpanECol=c } = c srcSpanEndCol SrcSpanMultiLine{ srcSpanECol=c } = c srcSpanEndCol SrcSpanPoint{ srcSpanCol=c } = c {- ************************************************************************ * * \subsection[SrcSpan-access-fns]{Access functions} * * ************************************************************************ -} -- | Returns the location at the start of the 'SrcSpan' or a "bad" 'SrcSpan' if that is unavailable srcSpanStart :: SrcSpan -> SrcLoc srcSpanStart (UnhelpfulSpan str) = UnhelpfulLoc str srcSpanStart (RealSrcSpan s) = RealSrcLoc (realSrcSpanStart s) -- | Returns the location at the end of the 'SrcSpan' or a "bad" 'SrcSpan' if that is unavailable srcSpanEnd :: SrcSpan -> SrcLoc srcSpanEnd (UnhelpfulSpan str) = UnhelpfulLoc str srcSpanEnd (RealSrcSpan s) = RealSrcLoc (realSrcSpanEnd s) realSrcSpanStart :: RealSrcSpan -> RealSrcLoc realSrcSpanStart s = mkRealSrcLoc (srcSpanFile s) (srcSpanStartLine s) (srcSpanStartCol s) realSrcSpanEnd :: RealSrcSpan -> RealSrcLoc realSrcSpanEnd s = mkRealSrcLoc (srcSpanFile s) (srcSpanEndLine s) (srcSpanEndCol s) -- | Obtains the filename for a 'SrcSpan' if it is "good" srcSpanFileName_maybe :: SrcSpan -> Maybe FastString srcSpanFileName_maybe (RealSrcSpan s) = Just (srcSpanFile s) srcSpanFileName_maybe (UnhelpfulSpan _) = Nothing {- ************************************************************************ * * \subsection[SrcSpan-instances]{Instances} * * ************************************************************************ -} -- We want to order RealSrcSpans first by the start point, then by the -- end point. instance Ord RealSrcSpan where a `compare` b = (realSrcSpanStart a `compare` realSrcSpanStart b) `thenCmp` (realSrcSpanEnd a `compare` realSrcSpanEnd b) instance Show RealSrcLoc where show (SrcLoc filename row col) = "SrcLoc " ++ show filename ++ " " ++ show row ++ " " ++ show col -- Show is used by Lexer.x, because we derive Show for Token instance Show RealSrcSpan where show (SrcSpanOneLine file l sc ec) = "SrcSpanOneLine " ++ show file ++ " " ++ intercalate " " (map show [l,sc,ec]) show (SrcSpanMultiLine file sl sc el ec) = "SrcSpanMultiLine " ++ show file ++ " " ++ intercalate " " (map show [sl,sc,el,ec]) show (SrcSpanPoint file l c) = "SrcSpanPoint " ++ show file ++ " " ++ intercalate " " (map show [l,c]) instance Outputable RealSrcSpan where ppr span = pprUserRealSpan True span -- I don't know why there is this style-based difference -- = getPprStyle $ \ sty -> -- if userStyle sty || debugStyle sty then -- text (showUserRealSpan True span) -- else -- hcat [text "{-# LINE ", int (srcSpanStartLine span), space, -- char '\"', pprFastFilePath $ srcSpanFile span, text " #-}"] instance Outputable SrcSpan where ppr span = pprUserSpan True span -- I don't know why there is this style-based difference -- = getPprStyle $ \ sty -> -- if userStyle sty || debugStyle sty then -- pprUserSpan True span -- else -- case span of -- UnhelpfulSpan _ -> panic "Outputable UnhelpfulSpan" -- RealSrcSpan s -> ppr s showUserSpan :: Bool -> SrcSpan -> String showUserSpan show_path span = showSDocSimple (pprUserSpan show_path span) pprUserSpan :: Bool -> SrcSpan -> SDoc pprUserSpan _ (UnhelpfulSpan s) = ftext s pprUserSpan show_path (RealSrcSpan s) = pprUserRealSpan show_path s pprUserRealSpan :: Bool -> RealSrcSpan -> SDoc pprUserRealSpan show_path (SrcSpanOneLine src_path line start_col end_col) = hcat [ ppWhen show_path (pprFastFilePath src_path <> colon) , int line <> colon , int start_col , ppUnless (end_col - start_col <= 1) (char '-' <> int (end_col - 1)) ] -- For single-character or point spans, we just -- output the starting column number pprUserRealSpan show_path (SrcSpanMultiLine src_path sline scol eline ecol) = hcat [ ppWhen show_path (pprFastFilePath src_path <> colon) , parens (int sline <> comma <> int scol) , char '-' , parens (int eline <> comma <> int ecol') ] where ecol' = if ecol == 0 then ecol else ecol - 1 pprUserRealSpan show_path (SrcSpanPoint src_path line col) = hcat [ ppWhen show_path (pprFastFilePath src_path <> colon) , int line <> colon , int col ] {- ************************************************************************ * * \subsection[Located]{Attaching SrcSpans to things} * * ************************************************************************ -} -- | We attach SrcSpans to lots of things, so let's have a datatype for it. data GenLocated l e = L l e deriving (Eq, Ord, Typeable, Data) deriving instance Foldable (GenLocated l) deriving instance Traversable (GenLocated l) type Located e = GenLocated SrcSpan e type RealLocated e = GenLocated RealSrcSpan e unLoc :: GenLocated l e -> e unLoc (L _ e) = e getLoc :: GenLocated l e -> l getLoc (L l _) = l noLoc :: e -> Located e noLoc e = L noSrcSpan e mkGeneralLocated :: String -> e -> Located e mkGeneralLocated s e = L (mkGeneralSrcSpan (fsLit s)) e combineLocs :: Located a -> Located b -> SrcSpan combineLocs a b = combineSrcSpans (getLoc a) (getLoc b) -- | Combine locations from two 'Located' things and add them to a third thing addCLoc :: Located a -> Located b -> c -> Located c addCLoc a b c = L (combineSrcSpans (getLoc a) (getLoc b)) c -- not clear whether to add a general Eq instance, but this is useful sometimes: -- | Tests whether the two located things are equal eqLocated :: Eq a => Located a -> Located a -> Bool eqLocated a b = unLoc a == unLoc b -- not clear whether to add a general Ord instance, but this is useful sometimes: -- | Tests the ordering of the two located things cmpLocated :: Ord a => Located a -> Located a -> Ordering cmpLocated a b = unLoc a `compare` unLoc b instance Functor (GenLocated l) where fmap f (L l e) = L l (f e) instance (Outputable l, Outputable e) => Outputable (GenLocated l e) where ppr (L l e) = -- TODO: We can't do this since Located was refactored into -- GenLocated: -- Print spans without the file name etc -- ifPprDebug (braces (pprUserSpan False l)) ifPprDebug (braces (ppr l)) $$ ppr e {- ************************************************************************ * * \subsection{Ordering SrcSpans for InteractiveUI} * * ************************************************************************ -} -- | Alternative strategies for ordering 'SrcSpan's leftmost_smallest, leftmost_largest, rightmost :: SrcSpan -> SrcSpan -> Ordering rightmost = flip compare leftmost_smallest = compare leftmost_largest a b = (srcSpanStart a `compare` srcSpanStart b) `thenCmp` (srcSpanEnd b `compare` srcSpanEnd a) -- | Determines whether a span encloses a given line and column index spans :: SrcSpan -> (Int, Int) -> Bool spans (UnhelpfulSpan _) _ = panic "spans UnhelpfulSpan" spans (RealSrcSpan span) (l,c) = realSrcSpanStart span <= loc && loc <= realSrcSpanEnd span where loc = mkRealSrcLoc (srcSpanFile span) l c -- | Determines whether a span is enclosed by another one isSubspanOf :: SrcSpan -- ^ The span that may be enclosed by the other -> SrcSpan -- ^ The span it may be enclosed by -> Bool isSubspanOf src parent | srcSpanFileName_maybe parent /= srcSpanFileName_maybe src = False | otherwise = srcSpanStart parent <= srcSpanStart src && srcSpanEnd parent >= srcSpanEnd src
green-haskell/ghc
compiler/basicTypes/SrcLoc.hs
bsd-3-clause
23,216
0
15
6,462
4,171
2,227
1,944
325
3
module Network.API.Codeship.Types where newtype CodeshipKey = CodeshipKey { mkApiKey :: String } deriving Show newtype Endpoint = Endpoint { mkEndpoint :: String } deriving Show newtype Resource = Resource { mkResource :: String } deriving Show
filib/codeship
src/Network/API/Codeship/Types.hs
mit
254
0
6
45
56
39
17
10
0
-- | encoding for the "1:n" picture hanging problem -- Section 3 of http://arxiv.org/abs/1203.3602 module CO4.Example.HangStandalone where import CO4.Prelude import Data.List (inits,tails) type Pin = Nat data Dir = L | R deriving (Show, Eq) data Turn = Turn Dir Pin deriving (Show, Eq) type Hang = [ Turn ] constraint :: Nat -> (Hang, [Pin]) -> Bool constraint s (h, ps) = forall h ( \ t -> case t of Turn d p -> leNat p s ) && primitive h && forall ps ( \ p -> nullable p h ) forall xs p = all p xs matching :: Turn -> Turn -> Bool matching (Turn d1 p1) (Turn d2 p2) = not (eqDir d1 d2) && eqNat p1 p2 eqDir d1 d2 = case d1 of L -> case d2 of L -> True ; R -> False R -> case d2 of L -> False ; R -> True primitive :: Hang -> Bool primitive h = not $ or $ zipWith matching h $ tail h -- * reducibility checking that relies on caching. -- | does the Hang reduce to [] when pin p is removed? -- with cache, this is equivalent to CYK parsing, -- since the second argument is always a substring -- of the full sequence. nullable :: Pin -> Hang -> Bool nullable p h = case h of [] -> True x:xs -> case xs of [] -> case x of Turn d q -> eqNat p q y:ys -> ( matching x (last xs) && nullable p (init xs) ) || or (map (\(l,r) -> nullable p l && nullable p r) $ nonempty_splits $ x:xs ) nonempty_splits xs = tail $ init $ splits xs splits xs = zip (inits xs) $ tails xs
apunktbau/co4
test/CO4/Example/HangStandalone.hs
gpl-3.0
1,434
0
21
372
573
298
275
32
4
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ForeignFunctionInterface #-} module Foreign.CUDA.Cublas.FFI where import Foreign.CUDA.Cublas.Types import Foreign.CUDA.Cublas.TH import Foreign.C.Types $(doIO $ makeFFIDecs "cublas" cublasFile) $(doIO $ makeAllFuncs "cublas" cublasFile)
kathawala/symdiff
cublas/Foreign/CUDA/Cublas/FFI.hs
gpl-3.0
287
0
8
31
62
36
26
8
0
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.SWF.DeprecateDomain -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Deprecates the specified domain. After a domain has been deprecated it -- cannot be used to create new workflow executions or register new types. -- However, you can still use visibility actions on this domain. -- Deprecating a domain also deprecates all activity and workflow types -- registered in the domain. Executions that were started before the domain -- was deprecated will continue to run. -- -- This operation is eventually consistent. The results are best effort and -- may not exactly reflect recent updates and changes. -- -- __Access Control__ -- -- You can use IAM policies to control this action\'s access to Amazon SWF -- resources as follows: -- -- - Use a 'Resource' element with the domain name to limit the action to -- only specified domains. -- - Use an 'Action' element to allow or deny permission to call this -- action. -- - You cannot use an IAM policy to constrain this action\'s parameters. -- -- If the caller does not have sufficient permissions to invoke the action, -- or the parameter values fall outside the specified constraints, the -- action fails. The associated event attribute\'s __cause__ parameter will -- be set to OPERATION_NOT_PERMITTED. For details and example IAM policies, -- see -- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to Manage Access to Amazon SWF Workflows>. -- -- /See:/ <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_DeprecateDomain.html AWS API Reference> for DeprecateDomain. module Network.AWS.SWF.DeprecateDomain ( -- * Creating a Request deprecateDomain , DeprecateDomain -- * Request Lenses , dName -- * Destructuring the Response , deprecateDomainResponse , DeprecateDomainResponse ) where import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response import Network.AWS.SWF.Types import Network.AWS.SWF.Types.Product -- | /See:/ 'deprecateDomain' smart constructor. newtype DeprecateDomain = DeprecateDomain' { _dName :: Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DeprecateDomain' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dName' deprecateDomain :: Text -- ^ 'dName' -> DeprecateDomain deprecateDomain pName_ = DeprecateDomain' { _dName = pName_ } -- | The name of the domain to deprecate. dName :: Lens' DeprecateDomain Text dName = lens _dName (\ s a -> s{_dName = a}); instance AWSRequest DeprecateDomain where type Rs DeprecateDomain = DeprecateDomainResponse request = postJSON sWF response = receiveNull DeprecateDomainResponse' instance ToHeaders DeprecateDomain where toHeaders = const (mconcat ["X-Amz-Target" =# ("SimpleWorkflowService.DeprecateDomain" :: ByteString), "Content-Type" =# ("application/x-amz-json-1.0" :: ByteString)]) instance ToJSON DeprecateDomain where toJSON DeprecateDomain'{..} = object (catMaybes [Just ("name" .= _dName)]) instance ToPath DeprecateDomain where toPath = const "/" instance ToQuery DeprecateDomain where toQuery = const mempty -- | /See:/ 'deprecateDomainResponse' smart constructor. data DeprecateDomainResponse = DeprecateDomainResponse' deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DeprecateDomainResponse' with the minimum fields required to make a request. -- deprecateDomainResponse :: DeprecateDomainResponse deprecateDomainResponse = DeprecateDomainResponse'
olorin/amazonka
amazonka-swf/gen/Network/AWS/SWF/DeprecateDomain.hs
mpl-2.0
4,436
0
12
918
423
265
158
57
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.Glacier.RemoveTagsFromVault -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- This operation removes one or more tags from the set of tags attached to -- a vault. For more information about tags, see -- <http://docs.aws.amazon.com/amazonglacier/latest/dev/tagging.html Tagging Amazon Glacier Resources>. -- This operation is idempotent. The operation will be successful, even if -- there are no tags attached to the vault. -- -- /See:/ <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-RemoveTagsFromVault.html AWS API Reference> for RemoveTagsFromVault. module Network.AWS.Glacier.RemoveTagsFromVault ( -- * Creating a Request removeTagsFromVault , RemoveTagsFromVault -- * Request Lenses , rtfvTagKeys , rtfvAccountId , rtfvVaultName -- * Destructuring the Response , removeTagsFromVaultResponse , RemoveTagsFromVaultResponse ) where import Network.AWS.Glacier.Types import Network.AWS.Glacier.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | The input value for 'RemoveTagsFromVaultInput'. -- -- /See:/ 'removeTagsFromVault' smart constructor. data RemoveTagsFromVault = RemoveTagsFromVault' { _rtfvTagKeys :: !(Maybe [Text]) , _rtfvAccountId :: !Text , _rtfvVaultName :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'RemoveTagsFromVault' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rtfvTagKeys' -- -- * 'rtfvAccountId' -- -- * 'rtfvVaultName' removeTagsFromVault :: Text -- ^ 'rtfvAccountId' -> Text -- ^ 'rtfvVaultName' -> RemoveTagsFromVault removeTagsFromVault pAccountId_ pVaultName_ = RemoveTagsFromVault' { _rtfvTagKeys = Nothing , _rtfvAccountId = pAccountId_ , _rtfvVaultName = pVaultName_ } -- | A list of tag keys. Each corresponding tag is removed from the vault. rtfvTagKeys :: Lens' RemoveTagsFromVault [Text] rtfvTagKeys = lens _rtfvTagKeys (\ s a -> s{_rtfvTagKeys = a}) . _Default . _Coerce; -- | The 'AccountId' value is the AWS account ID of the account that owns the -- vault. You can either specify an AWS account ID or optionally a single -- apos'-'apos (hyphen), in which case Amazon Glacier uses the AWS account -- ID associated with the credentials used to sign the request. If you use -- an account ID, do not include any hyphens (apos-apos) in the ID. rtfvAccountId :: Lens' RemoveTagsFromVault Text rtfvAccountId = lens _rtfvAccountId (\ s a -> s{_rtfvAccountId = a}); -- | The name of the vault. rtfvVaultName :: Lens' RemoveTagsFromVault Text rtfvVaultName = lens _rtfvVaultName (\ s a -> s{_rtfvVaultName = a}); instance AWSRequest RemoveTagsFromVault where type Rs RemoveTagsFromVault = RemoveTagsFromVaultResponse request = postJSON glacier response = receiveNull RemoveTagsFromVaultResponse' instance ToHeaders RemoveTagsFromVault where toHeaders = const mempty instance ToJSON RemoveTagsFromVault where toJSON RemoveTagsFromVault'{..} = object (catMaybes [("TagKeys" .=) <$> _rtfvTagKeys]) instance ToPath RemoveTagsFromVault where toPath RemoveTagsFromVault'{..} = mconcat ["/", toBS _rtfvAccountId, "/vaults/", toBS _rtfvVaultName, "/tags"] instance ToQuery RemoveTagsFromVault where toQuery = const (mconcat ["operation=remove"]) -- | /See:/ 'removeTagsFromVaultResponse' smart constructor. data RemoveTagsFromVaultResponse = RemoveTagsFromVaultResponse' deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'RemoveTagsFromVaultResponse' with the minimum fields required to make a request. -- removeTagsFromVaultResponse :: RemoveTagsFromVaultResponse removeTagsFromVaultResponse = RemoveTagsFromVaultResponse'
fmapfmapfmap/amazonka
amazonka-glacier/gen/Network/AWS/Glacier/RemoveTagsFromVault.hs
mpl-2.0
4,562
0
12
870
558
336
222
72
1
{- Copyright 2012 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} module Main (main) where import Plush.Main -- | This is never actually called. See src/main.c for the actual main function. -- that function also calls 'plushMain', but does so after some preparation of -- the execution state prior to GHC's RTS initialization. -- -- This file is required, as cabal passes a single Haskell file to GHC's -make -- mode, and that needs a "Main" module. The GHC flag -no-hs-main is used to -- ensure that GHC doesn't output a C main function, and ours can be linked in -- without conflict. main :: IO () main = plushMain
kustomzone/plush
src-main/Main.hs
apache-2.0
1,133
0
6
196
38
26
12
4
1
{-# LANGUAGE RecordWildCards #-} -- | -- Module : Criterion -- Copyright : (c) 2009-2014 Bryan O'Sullivan -- -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : GHC -- -- Core benchmarking code. module Criterion ( -- * Benchmarkable code Benchmarkable -- * Creating a benchmark suite , Benchmark , env , bench , bgroup -- ** Running a benchmark , nf , whnf , nfIO , whnfIO -- * For interactive use , benchmark , benchmarkWith , benchmark' , benchmarkWith' ) where import Control.Monad (void) import Criterion.IO.Printf (note) import Criterion.Internal (runAndAnalyseOne) import Criterion.Main.Options (defaultConfig) import Criterion.Measurement (initializeTime) import Criterion.Monad (withConfig) import Criterion.Types -- | Run a benchmark interactively, and analyse its performance. benchmark :: Benchmarkable -> IO () benchmark bm = void $ benchmark' bm -- | Run a benchmark interactively, analyse its performance, and -- return the analysis. benchmark' :: Benchmarkable -> IO Report benchmark' = benchmarkWith' defaultConfig -- | Run a benchmark interactively, and analyse its performance. benchmarkWith :: Config -> Benchmarkable -> IO () benchmarkWith cfg bm = void $ benchmarkWith' cfg bm -- | Run a benchmark interactively, analyse its performance, and -- return the analysis. benchmarkWith' :: Config -> Benchmarkable -> IO Report benchmarkWith' cfg bm = do initializeTime withConfig cfg $ do _ <- note "benchmarking...\n" Analysed rpt <- runAndAnalyseOne 0 "function" bm return rpt
rrnewton/criterion
Criterion.hs
bsd-2-clause
1,659
0
11
342
291
165
126
36
1