code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
module Feature.QueryLimitedSpec where import Network.Wai (Application) import Network.HTTP.Types import Test.Hspec import Test.Hspec.Wai import Test.Hspec.Wai.JSON import Protolude hiding (get) import SpecHelper spec :: SpecWith ((), Application) spec = describe "Requesting many items with server limits(max-rows) enabled" $ do it "restricts results" $ get "/items?order=id" `shouldRespondWith` [json| [{"id":1},{"id":2}] |] { matchHeaders = ["Content-Range" <:> "0-1/*"] } it "respects additional client limiting" $ do request methodGet "/items" (rangeHdrs $ ByteRangeFromTo 0 0) "" `shouldRespondWith` [json| [{"id":1}] |] { matchHeaders = ["Content-Range" <:> "0-0/*"] } it "works on all levels" $ get "/users?select=id,tasks(id)&order=id.asc&tasks.order=id.asc" `shouldRespondWith` [json|[{"id":1,"tasks":[{"id":1},{"id":2}]},{"id":2,"tasks":[{"id":5},{"id":6}]}]|] { matchHeaders = ["Content-Range" <:> "0-1/*"] } it "succeeds in getting parent embeds despite the limit, see #647" $ get "/tasks?select=id,project:projects(id)&id=gt.5" `shouldRespondWith` [json|[{"id":6,"project":{"id":3}},{"id":7,"project":{"id":4}}]|] { matchHeaders = ["Content-Range" <:> "0-1/*"] } it "can offset the parent embed, being consistent with the other embed types" $ get "/tasks?select=id,project:projects(id)&id=gt.5&project.offset=1" `shouldRespondWith` [json|[{"id":6,"project":null}, {"id":7,"project":null}]|] { matchHeaders = ["Content-Range" <:> "0-1/*"] } context "count=estimated" $ do it "uses the query planner guess when query rows > maxRows" $ request methodHead "/getallprojects_view" [("Prefer", "count=estimated")] "" `shouldRespondWith` "" { matchStatus = 206 , matchHeaders = ["Content-Range" <:> "0-1/2019"] } it "gives exact count when query rows <= maxRows" $ request methodHead "/getallprojects_view?id=lt.3" [("Prefer", "count=estimated")] "" `shouldRespondWith` "" { matchHeaders = ["Content-Range" <:> "0-1/2"] } it "only uses the query planner guess if it's indeed greater than the exact count" $ request methodHead "/get_projects_above_view" [("Prefer", "count=estimated")] "" `shouldRespondWith` "" { matchStatus = 206 , matchHeaders = ["Content-Range" <:> "0-1/3"] }
steve-chavez/postgrest
test/Feature/QueryLimitedSpec.hs
mit
2,608
0
15
665
460
262
198
-1
-1
{-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ViewPatterns #-} module Data.FingerTree.Lazy ( FingerTree , Measured (..) , fromFoldable , Split (..) , splitTree , split ) where import Prelude hiding (foldl, foldr) import Data.FingerTree.Internal data Node v a = Node2 v a a | Node3 v a a a deriving (Show, Foldable) node2 :: Measured a v => a -> a -> Node v a node2 a b = Node2 v a b where v = measure [a, b] node3 :: Measured a v => a -> a -> a -> Node v a node3 a b c = Node3 v a b c where v = measure [a, b, c] data FingerTree v a = Empty | Single a | Deep v [a] (FingerTree v (Node v a)) [a] deriving (Show, Foldable) deep :: Measured a v => [a] -> FingerTree v (Node v a) -> [a] -> FingerTree v a deep pr m sf = Deep v pr m sf where v = measure pr <> measure m <> measure sf instance Monoid v => Measured (Node v a) v where measure (Node2 v _ _) = v measure (Node3 v _ _ _) = v instance Measured a v => Measured [a] v where measure = mconcat . map measure instance Measured a v => Measured (FingerTree v a) v where measure Empty = mempty measure (Single a) = measure a measure (Deep v _ _ _) = v deepL :: Measured a v => [a] -> FingerTree v (Node v a) -> [a] -> FingerTree v a deepL [] (viewL -> Nothing) sf = fromFoldable sf deepL [] (viewL -> Just (pr, m)) sf = deep (toList pr) m sf deepL pr m sf = deep pr m sf deepR :: Measured a v => [a] -> FingerTree v (Node v a) -> [a] -> FingerTree v a deepR pr (viewR -> Nothing) [] = fromFoldable pr deepR pr (viewR -> Just (m, sf)) [] = deep pr m (toList sf) deepR pr m sf = deep pr m sf instance Measured a v => Deque (FingerTree v) a where a <| Empty = Single a a <| Single b = deep [a] Empty [b] a <| Deep _ [b, c, d, e] m sf = deep [a, b] (node3 c d e <| m) sf a <| Deep _ pr m sf = deep ([a] ++ pr) m sf Empty |> a = Single a Single b |> a = deep [b] Empty [a] Deep _ pr m [e, d, c, b] |> a = deep pr (m |> node3 e d c) [b, a] Deep _ pr m sf |> a = deep pr m (sf ++ [a]) viewL Empty = fail "" viewL (Single x) = return (x, Empty) viewL (Deep _ pr m sf) = return (head pr, deepL (tail pr) m sf) viewR Empty = fail "" viewR (Single x) = return (Empty, x) viewR (Deep _ pr m sf) = return (deepR pr m (init sf), last sf) fromFoldable :: (Foldable t, Measured a v) => t a -> FingerTree v a fromFoldable = foldr (<|) Empty instance Measured a v => Monoid (FingerTree v a) where mempty = Empty xs `mappend` ys = app3 xs [] ys app3 :: Measured a v => FingerTree v a -> [a] -> FingerTree v a -> FingerTree v a app3 Empty ts xs = foldr (<|) xs ts app3 xs ts Empty = foldl (|>) xs ts app3 (Single x) ts xs = x <| foldr (<|) xs ts app3 xs ts (Single x) = foldl (|>) xs ts |> x app3 (Deep _ pr1 m1 sf1) ts (Deep _ pr2 m2 sf2) = deep pr1 (app3 m1 (nodes (sf1 ++ ts ++ pr2)) m2) sf2 where nodes [a, b] = [node2 a b] nodes [a, b, c] = [node3 a b c] nodes [a, b, c, d] = [node2 a b, node2 c d] nodes (a:b:c:xs) = node3 a b c:nodes xs data Split f a = Split (f a) a (f a) splitList :: Measured a v => (v -> Bool) -> v -> [a] -> Split [] a splitList _ _ [x] = Split [] x [] splitList p i (x:xs) | p i' = Split [] x xs | otherwise = let Split l a r = splitList p i' xs in Split (x:l) a r where i' = i <> measure x splitTree :: Measured a v => (v -> Bool) -> v -> FingerTree v a -> Split (FingerTree v) a splitTree _ _ (Single x) = Split Empty x Empty splitTree p i (Deep _ pr m sf) | p vpr = let Split l x r = splitList p i pr in Split (fromFoldable l) x (deepL r m sf) | p vm = let Split ml xs mr = splitTree p vpr m Split l x r = splitList p (vpr <> measure ml) (toList xs) in Split (deepR pr ml l) x (deepL r mr sf) | otherwise = let Split l x r = splitList p vm sf in Split (deepR pr m l) x (fromFoldable r) where vpr = i <> measure pr vm = vpr <> measure m split :: Measured a v => (v -> Bool) -> FingerTree v a -> (FingerTree v a, FingerTree v a) split _ Empty = (Empty, Empty) split p xs | p (measure xs) = (l, x <| r) | otherwise = (xs, Empty) where Split l x r = splitTree p mempty xs
meimisaki/FingerTree
Data/FingerTree/Lazy.hs
mit
4,293
0
13
1,230
2,391
1,200
1,191
102
4
module Oczor.Test.Tests where import Oczor.Parser.Parser import Oczor.Syntax.Syntax import ClassyPrelude import Oczor.Test.TestEngine import Oczor.Test.Files import Oczor.Infer.Infer import Oczor.Converter.Converter import Oczor.Converter.Rewriter as Rewriter import Oczor.CodeGen.CodeGenJs import Oczor.Test.TestCompiler import Control.Arrow import Oczor.Utl import Oczor.Compiler.Compiler import Oczor.Pretty.Pretty a = p >> i >> t >> c >> cc >> g af = a >> f mp = ("parser", (show +++ show) . fmap removeMD . parseExpr) mi = ("infer", Right . either show id . map show . fmap lastType . inferTxt) my = ("pretty", Right . either show id . map prettyShow . inferTxt) mt = ("inferast", (show +++ show) . fmap (removeContext . snd) . inferAllTxt) mc = ("converter", (show +++ show) . convertTxt2) mcc = ("converter-class", Right . either show show . compileJsPartTxt . pack) mr = ("rewriter", (show +++ (pshow . Rewriter.rewrite "js")) . convertTxt2) mg = ("codegen", Right . either show show . compileJsPartTxt . pack) p = checkDir mp refreshp = refreshFile mp refreshpDir = refreshDir mp i = checkDir mi refreshi = refreshFile mi refreshiDir = refreshDir mi y = checkDir my refreshy = refreshFile my refreshyDir = refreshDir my t = checkDir mt refresht = refreshFile mt refreshtDir = refreshDir mt c = checkDir mc refreshc = refreshFile mc refreshcDir = refreshDir mc cc = checkDir mcc refreshcc = refreshFile mcc refreshccDir = refreshDir mcc r = checkDir mr refreshr = refreshFile mr refreshrDir = refreshDir mr g = checkDir mg refreshg = refreshFile mg refreshgDir = refreshDir mg convertTxt2 = inferAllTxt >=> return . uncurry convert2 convertTxt = putStrLn . pack . either show pshow . convertTxt2 inferAstTxt = putStrLn . pack . either show pshow . inferAstTxt2 codegenTxt2 x = do (context, tast) <- inferAllTxt x return $ codeGen $ convert2 context tast codegenTxt, convertTxt, inferAstTxt :: String -> IO () codegenTxt = putStrLn . pack . either show show . codegenTxt2
ptol/oczor
src/Oczor/Test/Tests.hs
mit
2,000
0
12
329
700
377
323
-1
-1
conceito :: Bool -> Bool -> String conceito _ True = "O" conceito True False = "A" conceito False False = "R" main = do putStrLn "Entre com a nota: " line <- getLine let nota = read line putStrLn "Entre com limiar: " line <- getLine let limiar = read line putStrLn "Entre com as faltas: " line <- getLine let faltas = read line putStrLn "Entre com max. faltas: " line <- getLine let maxFaltas = read line let c = conceito ((nota :: Float) > (limiar :: Float)) ((faltas :: Int) > (maxFaltas :: Int)) putStrLn c
folivetti/PI-UFABC
AULA_02/Haskell/NotaConceito.hs
mit
616
0
13
206
207
97
110
19
1
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-} module GHCJS.DOM.JSFFI.Generated.SVGTextPositioningElement (js_getX, getX, js_getY, getY, js_getDx, getDx, js_getDy, getDy, js_getRotate, getRotate, SVGTextPositioningElement, castToSVGTextPositioningElement, gTypeSVGTextPositioningElement, IsSVGTextPositioningElement, toSVGTextPositioningElement) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord) import Data.Typeable (Typeable) import GHCJS.Types (JSVal(..), JSString) import GHCJS.Foreign (jsNull) import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..)) import GHCJS.Marshal (ToJSVal(..), FromJSVal(..)) import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..)) import Control.Monad.IO.Class (MonadIO(..)) import Data.Int (Int64) import Data.Word (Word, Word64) import GHCJS.DOM.Types import Control.Applicative ((<$>)) import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName) import GHCJS.DOM.JSFFI.Generated.Enums foreign import javascript unsafe "$1[\"x\"]" js_getX :: SVGTextPositioningElement -> IO (Nullable SVGAnimatedLengthList) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTextPositioningElement.x Mozilla SVGTextPositioningElement.x documentation> getX :: (MonadIO m, IsSVGTextPositioningElement self) => self -> m (Maybe SVGAnimatedLengthList) getX self = liftIO (nullableToMaybe <$> (js_getX (toSVGTextPositioningElement self))) foreign import javascript unsafe "$1[\"y\"]" js_getY :: SVGTextPositioningElement -> IO (Nullable SVGAnimatedLengthList) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTextPositioningElement.y Mozilla SVGTextPositioningElement.y documentation> getY :: (MonadIO m, IsSVGTextPositioningElement self) => self -> m (Maybe SVGAnimatedLengthList) getY self = liftIO (nullableToMaybe <$> (js_getY (toSVGTextPositioningElement self))) foreign import javascript unsafe "$1[\"dx\"]" js_getDx :: SVGTextPositioningElement -> IO (Nullable SVGAnimatedLengthList) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTextPositioningElement.dx Mozilla SVGTextPositioningElement.dx documentation> getDx :: (MonadIO m, IsSVGTextPositioningElement self) => self -> m (Maybe SVGAnimatedLengthList) getDx self = liftIO (nullableToMaybe <$> (js_getDx (toSVGTextPositioningElement self))) foreign import javascript unsafe "$1[\"dy\"]" js_getDy :: SVGTextPositioningElement -> IO (Nullable SVGAnimatedLengthList) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTextPositioningElement.dy Mozilla SVGTextPositioningElement.dy documentation> getDy :: (MonadIO m, IsSVGTextPositioningElement self) => self -> m (Maybe SVGAnimatedLengthList) getDy self = liftIO (nullableToMaybe <$> (js_getDy (toSVGTextPositioningElement self))) foreign import javascript unsafe "$1[\"rotate\"]" js_getRotate :: SVGTextPositioningElement -> IO (Nullable SVGAnimatedNumberList) -- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTextPositioningElement.rotate Mozilla SVGTextPositioningElement.rotate documentation> getRotate :: (MonadIO m, IsSVGTextPositioningElement self) => self -> m (Maybe SVGAnimatedNumberList) getRotate self = liftIO (nullableToMaybe <$> (js_getRotate (toSVGTextPositioningElement self)))
manyoo/ghcjs-dom
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGTextPositioningElement.hs
mit
3,586
30
11
507
778
448
330
61
1
module Graph.Type.DenseList where type Vertex = Int type Vertices = [Bool] data Graph = Graph [Vertices] deriving (Eq, Show) type Clique = [Int]
banacorn/Graphentheoretische-Paralleler-Algorithmus
src/Graph/Type/DenseList.hs
mit
148
0
7
26
54
34
20
5
0
------------------------------------------------------------------------------ -- | A better prelude. module Game.Prelude ( -- * New modules module Prelude -- * Booleans , iff , (?) -- * Composition , owl , dot , swing -- * Functors , map , twimap , (<$$>) , lmap , lcontramap , rmap -- * Debug , traceId , traceF , traceM , traceShowM ) where ------------------------------------------------------------------------------ import Control.Applicative as Prelude import Control.Arrow as Prelude import Control.Category as Prelude import Control.Lens as Prelude hiding (lmap, rmap, (<~)) import Control.Monad as Prelude import Control.Monad.State as Prelude import Data.Bifunctor.Apply as Prelude hiding (first, second, (<<$>>)) import Data.Either as Prelude import Data.Foldable as Prelude (concat, elem, notElem, foldl, foldr) import Data.Functor.Contravariant as Prelude import Data.Int as Prelude import Data.Maybe as Prelude import Data.Monoid as Prelude import Data.Word as Prelude import Debug.Trace as Prelude import Prelude hiding (concat, elem, notElem, foldl, foldr, id, map, (++), (.)) ------------------------------------------------------------------------------ -- | Function form of if-then-else. iff :: Bool -> a -> a -> a iff c t e = if c then t else e ------------------------------------------------------------------------------ -- | Variant of 'iff' with the condition at the end. (?) :: a -> a -> Bool -> a (?) = dot flip flip iff infix 1 ? ------------------------------------------------------------------------------ -- | Owl composition. -- -- > owl f a g b = f a (g b) owl :: Category cat => (a -> cat b c) -> a -> cat d b -> cat d c owl = (.) (.) ------------------------------------------------------------------------------ -- | Dot composition. -- -- > f `dot` g == (f .) . g dot :: Category cat => cat c d -> (a -> cat b c) -> a -> cat b d dot = (.) . (.) ------------------------------------------------------------------------------ -- | Swing composition. swing :: (((a -> b) -> b) -> c -> d) -> c -> a -> d swing = flip . (. flip id) ------------------------------------------------------------------------------ -- | Functor map. map :: Functor f => (a -> b) -> f a -> f b map = fmap ------------------------------------------------------------------------------ -- | Bifunctor map over both elements. twimap :: Bifunctor f => (a -> b) -> f a a -> f b b twimap = join bimap ------------------------------------------------------------------------------ -- | Operator form of twimap. (<$$>) :: Bifunctor f => (a -> b) -> f a a -> f b b (<$$>) = twimap infixl 4 <$$> ------------------------------------------------------------------------------ -- | Bifunctor map over the left element. lmap :: Bifunctor f => (a -> b) -> f a c -> f b c lmap = flip bimap id ------------------------------------------------------------------------------ -- | Profunctor map over the left element. lcontramap :: Profunctor f => (b -> a) -> f a c -> f b c lcontramap = flip dimap id ------------------------------------------------------------------------------ -- | Maps the right element of a Bifunctor or a Profunctor. -- -- Note that this assumes that they have a typical implementation of -- Functor as well. Using this function is more clear than an ordinary map -- on multi-functors, particularly bifunctors (such as functions, or pairs). rmap :: Functor f => (a -> b) -> f a -> f b rmap = map ------------------------------------------------------------------------------ -- | Shows the value and evaluates to it. traceId :: Show a => a -> a traceId = join traceShow ------------------------------------------------------------------------------ -- | Traces the result of applying the function to the value, then evaluates -- to the value. traceF :: Show b => (a -> b) -> a -> a traceF f a = traceShow (f a) a ------------------------------------------------------------------------------ -- | Monadic trace. traceM :: Monad m => String -> m () traceM s = trace s $ return () ------------------------------------------------------------------------------ -- | Monadic traceShow. traceShowM :: (Show a, Monad m) => a -> m () traceShowM a = traceShow a $ return ()
zmthy/vicarious
src/Game/Prelude.hs
mit
4,333
0
11
774
976
572
404
67
2
{-# htermination minimum :: (Ord a, Ord k) => [(a,k)] -> (a,k) #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_minimum_12.hs
mit
67
0
2
13
3
2
1
1
0
module Board where import Control.Monad import Control.Applicative import Control.Arrow import Data.Maybe import Math.Geometry.Grid.Hexagonal import Math.Geometry.Grid.HexagonalInternal import Math.Geometry.GridInternal import Math.Geometry.GridMap ((!)) import qualified Math.Geometry.GridMap as M import qualified Math.Geometry.GridMap.Lazy as M import Test.QuickCheck.Arbitrary -- to randomly generate values import Test.QuickCheck.Gen -- | Alias to be able to change the type of map quickly. type MapGrid = HexHexGrid -- | Tile coordinate alias. type TileCoord = (Int, Int) -- | Alias for a mapping between tile coordinates and tiles. type TileMap = M.LGridMap MapGrid Tile -- | Basic map tiles. Just contains tile coordinates. -- This is referred by many functions, it's a constant. board :: MapGrid board = hexHexGrid 10 -- | Convenience name for all tile coordinates. tiles :: [TileCoord] tiles = indices board -- | Convenience name for the number of tile coordinates. numTiles :: Int numTiles = tileCount board -- | Definition for the type that contains the information -- needed for a tile. Note that the list of units and cities of a player -- are also generated from this. (from TileMap to be clearer) data Tile = Tile { tileTerrain :: Terrain , tileResource :: Maybe Resource -- | Structurally implies that there cannot be stacked units. , tileUnit :: Maybe Unit , tileImprovement :: Maybe Improvement } deriving (Show, Eq) data Terrain = Desert | Grassland | Hill | Plains | Tundra deriving (Show, Eq, Enum, Bounded) data Resource = -- Strategic resources Horses | Iron | Coal | Aluminum | Oil | Uranium -- Luxury resources | Cotton | Spices | Sugar | Furs | Ivory | Silk -- Bonus resources | Wheat | Cattle deriving (Show, Eq, Enum, Bounded) -- | Definition for the type that contains the information -- needed for a unit and for turns. data Unit = Unit { -- | Kind of unit. (e.g.: Settler) unitKind :: UnitKind -- | # of movements left. If it becomes 0, then the changing -- function should set `unitInLine` to False. , unitMovementLeft :: Int -- | The way to determine if the unit is to be played -- in this turn yet. If there is no movement left, this must be -- set to False. If the user decides to skip the unit, it can be -- set to False even if there is movement left. , unitInLine :: Bool } deriving (Show, Eq) data UnitKind = Settler | Worker | Warrior | Archer deriving (Show, Eq, Enum, Bounded) data Improvement = City | Farm | Pasture | Mine | Well deriving (Show, Eq, Enum, Bounded) instance Arbitrary Terrain where arbitrary = arbitraryBoundedEnum instance Arbitrary Resource where arbitrary = arbitraryBoundedEnum instance Arbitrary Unit where arbitrary = Unit <$> arbitraryBoundedEnum <*> pure 1 <*> pure True instance Arbitrary Improvement where arbitrary = arbitraryBoundedEnum -- | Generates random tile. Should be realistic. -- e.g.: Only a minority of tiles should have resources on them. randomTile :: Gen Tile randomTile = Tile <$> arbitrary <*> frequency [(5, pure Nothing), (2, arbitrary)] <*> pure Nothing <*> pure Nothing -- | Makes a TileMap more realistic. -- Is supposed to output a better map in the future. -- e.g.: less desert next to sea, less desert in poles -- less hills/mountain next to sea, more tundra in poles educatedTileMap :: TileMap -> M.LGridMap MapGrid (Gen Tile) educatedTileMap tMap = M.mapWithKey educated tMap where educated :: TileCoord -> Tile -> Gen Tile educated key (Tile t r u i) = do t' <- if length surrounding < 6 -- next to sea then elements [Grassland, Plains] else return t r' <- if r `elem` resources -- same resource in neighbor then elements [r, Nothing] else return r return $ Tile t' r' u i where surrounding = neighbours board key terrains = map (tileTerrain . (tMap !)) surrounding resources = map (tileResource . (tMap !)) surrounding -- | Takes an output of `educatedTileMap`, and puts the entire -- TileMap into the monadic context. Similar to `Control.Monad.sequence`. sequenceMap :: Monad m => M.LGridMap MapGrid (m Tile) -> m TileMap sequenceMap gMap = M.lazyGridMap board `liftM` (mapM snd . M.toList) gMap -- | Generates completely random TileMap. Probably unrealistically. randomTileMap :: IO TileMap randomTileMap = do init <- M.lazyGridMap board `liftM` (generate . infiniteListOf) randomTile (generate . sequenceMap . educatedTileMap) init -------------------------------- -- Tile content change functions -------------------------------- -- | Changes the improvement on a tile coordinate. replaceImprovement :: TileCoord -- ^ Tile coordinate to be changed. -> Maybe Improvement -- ^ The new value. -> TileMap -- ^ The tile map to be changed. -> TileMap -- ^ Resulting tile map with the new improvement. replaceImprovement c i tMap = M.insert c newTile tMap where newTile = (tMap ! c) { tileImprovement = i } replaceUnit :: TileCoord -- ^ Tile coordinate to be changed. -> Maybe Unit -- ^ The new value. -> TileMap -- ^ The tile map to be changed. -> TileMap -- ^ Resulting tile map with the new unit. replaceUnit c u tMap = M.insert c newTile tMap where newTile = (tMap ! c) { tileUnit = u } -- | Moves the unit in a given coordinate to another coordinate. -- Ignores the unit in `to`, overwrites it. Use with caution. -- Always check if `unitExists` in `to`. moveUnit :: TileCoord -- ^ The coordinate the unit is being moved from. -> TileCoord -- ^ The coordinate the unit is being moved to. -> TileMap -- ^ The tile map to be changed. -> TileMap -- ^ Resulting map with the moved unit. moveUnit from to tMap = replaceUnit to unit $ replaceUnit from Nothing tMap where unit = tileUnit (tMap ! from) -- | Moves the unit in a given coordinate to a given direction. -- Ignores the unit in the direction. moveUnitToDirection :: TileCoord -- ^ The coordinate the unit is being moved from. -> HexDirection -- ^ The direction the unit is being moved to. -> TileMap -- ^ The tile map to be changed. -> TileMap -- ^ Resulting map with the moved unit. moveUnitToDirection c dir tMap = case to of Just x -> moveUnit c x tMap Nothing -> tMap where to :: Maybe TileCoord to = neighbour board c dir -- | Gives the coordinate of a neighbor in the given direction. newUnitPosInDirection :: TileCoord -- ^ The coordinate of the main tile. -> HexDirection -- ^ The direction to check the neighbor. -> TileCoord -- ^ The coordinate of the neighbor. newUnitPosInDirection c dir = fromMaybe c to where to = neighbour board c dir -- | Checks if there is a unit in a given coordinate. unitExists :: TileCoord -> TileMap -> Bool unitExists c tMap = (isJust . tileUnit) (tMap ! c) -- | Returns a list of all units and their coordinates. allUnits :: TileMap -> [(TileCoord, Unit)] allUnits = map (second fromJust) . filter (isJust . snd) . map (second tileUnit) . M.toList -- | Returns a list of all cities and their coordinates. allCities :: TileMap -> [(TileCoord, Improvement)] allCities = filter ((== City) . snd) . map (second fromJust) . filter (isJust . snd) . map (second tileImprovement) . M.toList -- | Checks if there are any units waiting to be played in this turn. -- Returns if it finds one. If there is none, then it's time to end the turn. findNextUnitInLine :: TileMap -> Maybe TileCoord findNextUnitInLine tMap = case u'' of [] -> Nothing (c,_):_ -> Just c where units = allUnits tMap u' = filter (unitInLine . snd) units u'' = filter ((/= 0) . unitMovementLeft . snd) u' -- checking again -- | Change the current `nextUnitInLine`'s `unitInLine` to False deactivateNextUnitInLine :: TileCoord -> TileMap -> TileMap deactivateNextUnitInLine c tMap = M.insert c newTile tMap where tile = tMap ! c unitish = tileUnit tile :: Maybe Unit newTile = maybe tile (\u -> tile {tileUnit = Just $ u {unitInLine = False}}) unitish
joom/civ
src/Board.hs
mit
8,732
0
14
2,345
1,661
930
731
165
3
-- Copyright (C) 2005 Benedikt Schmidt -- -- This program is free software; you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation; either version 2, or (at your option) -- any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with this program; see the file COPYING. If not, write to -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -- | -- Module : Darcs.Util.CommandLine -- Copyright : 2005 Benedikt Schmidt -- License : GPL -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- |A parser for commandlines, returns an arg list and expands -- format strings given in a translation table. Additionally -- the commandline can end with "%<" specifying that the command -- expects input on stdin. -- -- Some tests for the parser. -- -- > formatTable = [('s',"<insert subject here>"), -- > ('a',"<insert author here>")] -- > -- > testParser :: (Show a, Eq a) => Parser a -> String -> a -> a -- > testParser p s ok = case parse p "" s of -- > Left e -> error $ "Parser failed with: " ++ (show e) -- > Right res -> if res == ok -- > then res -- > else error $ "Parser failed: got " -- > ++ (show res) ++ ", expected " -- > ++ (show ok) -- > -- > testCases = [("a b",(["a","b"], False)), -- > ("a b %<",(["a","b"], True)), -- > ("a b %< ",(["a","b"], True)), -- > ("\"arg0 contains spaces \\\"quotes\\\"\" b", -- > (["arg0 contains spaces \"quotes\"","b"],False)), -- > ("a %s %<",(["a","<insert subject here>"], True))] -- > -- > runTests = map (uncurry $ testParser (commandline formatTable)) testCases module Darcs.Util.CommandLine ( parseCmd , addUrlencoded ) where import Control.Arrow ( (***) ) import Data.Char ( ord, intToDigit, toUpper ) import Data.List ( find ) import Text.ParserCombinators.Parsec -- | assoc list mapping characters to strings -- eg (c,s) means that %c is replaced by s type FTable = [(Char,String)] commandline :: FTable -> Parser ([String], Bool) commandline ftable = consumeAll $ do l <- sepEndBy1 (arg ftable) (try separator) redir <- formatRedir spaces return (l,redir) arg :: FTable -> Parser String arg ftable = quotedArg ftable <|> unquotedArg ftable unquotedArg :: FTable -> Parser String unquotedArg ftable = try (format ftable) <|> many1 (noneOf " \t\"%") quotedArg :: FTable -> Parser String quotedArg ftable = between quoteChar quoteChar $ quoteContent ftable where quoteChar = char '"' quoteContent :: FTable -> Parser String quoteContent ftable = do s1 <- escape <|> try (format ftable) <|> many1 (noneOf "\"\\%") s2 <- quoteContent ftable return $ s1 ++ s2 <|> return "" formatRedir :: Parser Bool formatRedir = (string "%<" >> return True) <|> return False format :: FTable -> Parser String format ftable = do _ <- char '%' c <- oneOf (map fst ftable) return $ expandFormat ftable c escape :: Parser String escape = do _ <- char '\\' c <- anyChar return [c] consumeAll :: Parser a -> Parser a consumeAll p = do r <- p eof return r separator :: Parser () separator = skipMany1 space expandFormat :: FTable -> Char -> String expandFormat ftable c = case find ((==c) . fst) ftable of Just (_,s) -> s Nothing -> error "impossible" -- | parse a commandline returning a list of strings -- (intended to be used as argv) and a bool value which -- specifies if the command expects input on stdin -- format specifiers with a mapping in ftable are accepted -- and replaced by the given strings. E.g. if the ftable is -- [('s',"Some subject")], then "%s" is replaced by "Some subject" parseCmd :: FTable -> String -> Either ParseError ([String],Bool) parseCmd ftable = parse (commandline ftable) "" urlEncode :: String -> String urlEncode = concatMap escapeC where escapeC x = if allowed x then [x] else '%' : intToHex (ord x) intToHex i = map intToDigit [i `div` 16, i `mod` 16] allowed x = x `elem` allowedChars allowedChars = ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ "!'()*-.~" -- | for every mapping (c,s), add a mapping with uppercase c -- and the urlencoded string s addUrlencoded :: FTable -> FTable addUrlencoded ftable = ftable ++ map (toUpper *** urlEncode) ftable
DavidAlphaFox/darcs
src/Darcs/Util/CommandLine.hs
gpl-2.0
5,153
0
13
1,455
875
474
401
59
2
{- dfsbuild: CD image builder Copyright (c) 2006 John Goerzen Please see COPYRIGHT for more details -} module Bootloader.Grub where import Utils import System.Cmd.Utils import System.Path import System.Posix.Files import System.Posix.Directory import System.Path.Glob import Data.ConfigFile import System.FilePath import Data.List import Actions.ConfigFiles import HSH.Command import Control.Exception; grub_eltorito env = do im "Installing bootloader: Grub raw eltorito (no HD emulation)" grub_generic env return (["-b", "boot/grub/stage2_eltorito", "-no-emul-boot", "-boot-load-size", "1", "-boot-info-table"], (\_ -> return ())) grub_hd env = do im "Installing bootloader: Grub with eltorito HD emulation" grub_generic env safeSystem "cp" ["-r", (targetdir env) ++ "/boot", workbootdir] safeSystem "rm" ["-f", workbootdir ++ "/grub/stage2_eltorito"] bracketCWD (wdir env) $ safeSystem "tar" ["-zcpf", "boot.tar.gz", "boot"] safeSystem "mkbimage" ["-f", workboottar, "-t", "hd", "-s", "ext2", "-d", wdir env] rename "hd.image" $ (targetdir env) ++ "/boot/hd.image" return (["-b", "boot/hd.image", "-hard-disk-boot", "-c", "boot/boot.catalog"], (\_ -> return ())) where workbootdir = (wdir env) ++ "/boot" workboottar = (wdir env) ++ "/boot.tar.gz" grub_generic env = do handle (\_->return()) $ createDirectory (targetdir env ++ "/boot/grub") 0o755 -- since etch (Debian 4.0) grub files are located in /usr/lib instead of /lib grubfiles_pre_etch <- glob "/lib/grub/*/*" grubfiles_since_etch <- glob "/usr/lib/grub/*/*" safeSystem "cp" $ ["-rv"] ++ grubfiles_pre_etch ++ grubfiles_since_etch ++ [targetdir env ++ "/boot/grub/"] menuText <- grubMenu env writeFile (targetdir env ++ "/boot/grub/menu.lst") menuText -- Help text not presently references writeFile (targetdir env ++ "/boot/grub/help.lst") helpText grubMenu env = do newkerns <- glob $ targetdir env ++ "/boot/vmlinu*" kerntext <- mapM kern (reverse . sort $ newkerns) newmemtest <- glob $ targetdir env ++ "/boot/memtest*" let memtext = map mt (reverse . sort $ newmemtest) return $ case get (cp env) (defaultArch env) "grubconfig" of Left _ -> "" Right line -> line ++ "\n" ++ "color cyan/blue blue/light-gray\n" ++ (concat kerntext) ++ (concat memtext) ++ fake "." ++ fake (getidstring env) where fake s = "title " ++ s ++ "\ncolor cyan/blue blue/light-gray\n" kern x = do initrd <- getinitrdname env x rootdev <- getrootdevname env x return $ "title Boot " ++ (snd . splitFileName $ x) ++ "\n" ++ "kernel /boot/" ++ (snd . splitFileName $ x) ++ " root=" ++ rootdev ++ "\n" ++ "initrd /boot/" ++ initrd ++ "\n" ++ "boot\n" mt x = "title Boot " ++ (snd . splitFileName $ x) ++ "\n" ++ "kernel /boot/" ++ (snd . splitFileName $ x) ++ "\nboot\n" helpText = "pager on\n\ \title Basic Booting Info\n\ \cat /opt/dfsruntime/dfs.html/booting.html.txt\n\ \ \n\ \title Selecting CD-ROM device\n\ \cat /opt/dfsruntime/dfs.html/dfsboot-selcd.html.txt\n\ \ \n\ \title About This CD\n\ \cat /opt/dfsruntime/buildinfo\n\ \ \n\ \title .\n\ \color cyan/black blue/light-gray\n\ \ \n\ \title Return to main menu...\n\ \configfile /boot/grub/menu.lst\n"
jgoerzen/dfsbuild
Bootloader/Grub.hs
gpl-2.0
3,793
0
22
1,118
870
439
431
71
2
module Tema_2_Spec (main, spec) where import Tema_2 import Test.Hspec main :: IO () main = hspec spec spec :: Spec spec = do describe "doble" $ it "e1" $ doble 3 `shouldBe` 6 describe "cuadruple" $ it "e1" $ cuadruple 3 `shouldBe` 12 describe "factorial" $ it "e1" $ factorial 4 `shouldBe` 24 describe "media" $ it "e1" $ media [1,5.0,3] `shouldBe` 3
jaalonso/I1M-Cod-Temas
test/Tema_2_Spec.hs
gpl-2.0
407
0
10
119
161
82
79
19
1
{-# LANGUAGE OverloadedStrings #-} module FuncTorrent.PeerThreadMain ( peerThreadMain ) where import Prelude hiding (readFile) import Control.Concurrent import Control.Monad hiding (forM, forM_, mapM, mapM_, msum, sequence, sequence_) import Data.IORef import FuncTorrent.PeerThreadData -- Sequence of events in the life of a peer thread -- 1. Initiate hand-shake and set bit field? -- 2. Send peer our status (choked/interested) -- 3. Wait for peer status. -- 4. If the peer is interested, then do further communication. -- Else show that we are interested and wait. -- 5. Send the 'have' message. -- 6. Recieve the 'have' message. -- 7. Report the peer status to the main thread. -- 8. If needed initiate request or seed. peerThreadMain :: PeerThread -> IO () peerThreadMain pt = do toDoAction <- getAction case toDoAction of InitPeerConnection -> do response <- doHandShake pt setStatus (if not response then PeerCommError else InitDone) GetPeerStatus -> setStatus PeerReady GetPieces _ -> do startDownload pt setStatus Downloading Seed -> setStatus Seeding StayIdle -> setStatus PeerReady Stop -> stopDownload pt unless (toDoAction == Stop) $ peerThreadMain pt where setStatus = putMVar (peerTStatus pt) getAction = takeMVar (peerTAction pt) -- Fork a thread to get pieces from the peer. -- The incoming requests from this peer will be handled -- By IncomingConnThread. -- startDownload :: PeerThread -> IO () startDownload pt = do tid <- forkIO $ downloadData pt writeIORef (downloadThread pt) (Just tid) stopDownload :: PeerThread -> IO () stopDownload pt = putStrLn $ "Stopping peer-thread " ++ show (peer pt) -- This will do the actual data communication with peer downloadData :: PeerThread -> IO () downloadData _ = undefined -- Hand-Shake details -- 1. Verify the Info Hash recieved from the peer. -- 2. Client connections start out as "choked" and "not interested". In other words: -- -- am_choking = 1 -- am_interested = 0 -- peer_choking = 1 -- peer_interested = 0 -- 3. Send bit-field message doHandShake :: PeerThread -> IO Bool doHandShake pt = do putStrLn $ "HandShake with " ++ show (peer pt) return True -- timeout (10*1000*1000) handShake
harshavardhana/functorrent
src/FuncTorrent/PeerThreadMain.hs
gpl-3.0
2,294
0
15
476
432
226
206
40
7
module PutJSON where import Data.List (intercalate) import SimpleJSON renderJValue :: Jvalue -> String renderJValue (JString s) = show s renderJValue (JNumber n) = show n renderJValue (JBool True) = "true" renderJValue (JBool False) = "false") renderJValue JNull = "null" renderJValue (JOBject o)= "{" ++ pairs o ++ "}" where pairs [] = "" pairs ps = intercalate ", " (map renderPair ps) renderPair (k,v) = show k ++ ": " ++ renderJValue v renderJValue (JArray a) = "[" ++ values a ++ "]" where values [] = "" values vs = intercalate ", " (map renderJValue vs) putJValue :: Jvalue -> IO () putJValue v = putStrLn (renderJValue v)
jtwool/haskell-sandbox
PutJSON.hs
gpl-3.0
713
1
10
192
266
133
133
-1
-1
module Helpers where import Data.Monoid ((<>)) hello :: String -> String hello s = "Hello .jhkjhf.. " <> s
dominicusin/gasta
src/Utils/Helpers.hs
gpl-3.0
107
0
5
19
37
22
15
4
1
module Test.Examples where -- generic programming subroutine example generic_ex1 :: Intly Agent generic_ex1 = do u1 <- primary user p <- current program ctxs <- current context checkIf (hasIntentFor ctxs) p u1 $ (\i -> (case i of Notify msg -> do dAll <- every $ deviceWhere (can $ notify u1) notifyWith d1 msg u1 Inform msg -> do d1 <- some $ deviceWhere (can $ inform u1) informWith d1 msg u1 Suggest act -> do d1 <- some $ deviceWhere (can $ inform u1) d2 <- some $ deviceWhere (can $ performActionWith act u1) informWith d1 ("Please use " ++ (show d2) ++ " to perform " ++ (show act)) u1 promise d2 act u1 Command act -> do d1 <- some $ deviceWhere (can $ notify u1) d2 <- some $ deviceWhere (can $ performActionWith act u1) notifyWith d1 ("Please use " ++ (show d2) ++ " to perform action, now") u1 waitFor d2 act u1 Query q -> do d1 <- some $ deviceWhere (can $ ask u1) response <- (askWith d1 q u1) improveContext u1 response Focus b -> do d1 <- some $ deviceWhere (can $ makeContact b) improveContext (Relate $ Personal d1 b) dElse <- every $ deviceWhere (cant $ makeContact b) improveContext $ remove (Relate $ dElse b) Constrain phy do d1 <- some $ deviceWhere $ (withCertainty High) (affectedBy phy) d2 <- some $ deviceWhere (can $ inform u1) informWith d2 ((show d1) ++ " is impared by constraint: " ++ (show phy)) u1 un $ prioritize d1 Aware instinct -> case instinct of Busy task -> do dAll <- every $ deviceWhere (can $ helpWith task) prioritize dAll _ -> learn u1 instinct Relate social -> dAll <- every $ deviceWhere (can $ disrupt social) un $ prioritize dAll _ -> learn nothing ) ) return program
mpahrens/Intentionally
Test/Example.hs
gpl-3.0
2,407
15
14
1,097
724
339
385
-1
-1
module Language.SMTLib2.Internals.Expression where import Language.SMTLib2.Internals.Type hiding (Field) import qualified Language.SMTLib2.Internals.Type as Type import Language.SMTLib2.Internals.Type.Nat import Language.SMTLib2.Internals.Type.List (List(..),CList(..)) import qualified Language.SMTLib2.Internals.Type.List as List import Data.Typeable import Text.Show import Data.GADT.Compare import Data.GADT.Show import Data.Functor.Identity import Data.Ratio import qualified GHC.TypeLits as TL type family AllEq (tp :: Type) (n :: Nat) :: [Type] where AllEq tp Z = '[] AllEq tp (S n) = tp ': (AllEq tp n) allEqToList :: Natural n -> List a (AllEq tp n) -> [a tp] allEqToList Zero Nil = [] allEqToList (Succ n) (x ::: xs) = x:allEqToList n xs allEqFromList :: [a tp] -> (forall n. Natural n -> List a (AllEq tp n) -> r) -> r allEqFromList [] f = f Zero Nil allEqFromList (x:xs) f = allEqFromList xs (\n arg -> f (Succ n) (x ::: arg)) allEqOf :: Repr tp -> Natural n -> List Repr (AllEq tp n) allEqOf tp Zero = Nil allEqOf tp (Succ n) = tp ::: allEqOf tp n mapAllEq :: Monad m => (e1 tp -> m (e2 tp)) -> Natural n -> List e1 (AllEq tp n) -> m (List e2 (AllEq tp n)) mapAllEq f Zero Nil = return Nil mapAllEq f (Succ n) (x ::: xs) = do x' <- f x xs' <- mapAllEq f n xs return (x' ::: xs') data Function (fun :: ([Type],Type) -> *) (sig :: ([Type],Type)) where Fun :: !(fun '(arg,res)) -> Function fun '(arg,res) Eq :: !(Repr tp) -> !(Natural n) -> Function fun '(AllEq tp n,BoolType) Distinct :: !(Repr tp) -> !(Natural n) -> Function fun '(AllEq tp n,BoolType) Map :: !(List Repr idx) -> !(Function fun '(arg,res)) -> Function fun '(Lifted arg idx,ArrayType idx res) Ord :: !(NumRepr tp) -> !OrdOp -> Function fun '([tp,tp],BoolType) Arith :: !(NumRepr tp) -> !ArithOp -> !(Natural n) -> Function fun '(AllEq tp n,tp) ArithIntBin :: !ArithOpInt -> Function fun '([IntType,IntType],IntType) Divide :: Function fun '([RealType,RealType],RealType) Abs :: !(NumRepr tp) -> Function fun '( '[tp],tp) Not :: Function fun '( '[BoolType],BoolType) Logic :: !LogicOp -> !(Natural n) -> Function fun '(AllEq BoolType n,BoolType) ToReal :: Function fun '( '[IntType],RealType) ToInt :: Function fun '( '[RealType],IntType) ITE :: !(Repr a) -> Function fun '([BoolType,a,a],a) BVComp :: !BVCompOp -> !(BitWidth a) -> Function fun '([BitVecType a,BitVecType a],BoolType) BVBin :: !BVBinOp -> !(BitWidth a) -> Function fun '([BitVecType a,BitVecType a],BitVecType a) BVUn :: !BVUnOp -> !(BitWidth a) -> Function fun '( '[BitVecType a],BitVecType a) Select :: !(List Repr idx) -> !(Repr val) -> Function fun '(ArrayType idx val ': idx,val) Store :: !(List Repr idx) -> !(Repr val) -> Function fun '(ArrayType idx val ': val ': idx,ArrayType idx val) ConstArray :: !(List Repr idx) -> !(Repr val) -> Function fun '( '[val],ArrayType idx val) Concat :: !(BitWidth n1) -> !(BitWidth n2) -> Function fun '([BitVecType n1,BitVecType n2],BitVecType (n1 TL.+ n2)) Extract :: !(BitWidth bw) -> !(BitWidth start) -> BitWidth len -> Function fun '( '[BitVecType bw],BitVecType len) Constructor :: (IsDatatype dt,List.Length par ~ Parameters dt) => Datatype dt -> !(List Repr par) -> !(Constr dt sig) -> Function fun '(Instantiated sig par,DataType dt par) Test :: (IsDatatype dt,List.Length par ~ Parameters dt) => Datatype dt -> !(List Repr par) -> !(Constr dt sig) -> Function fun '( '[DataType dt par],BoolType) Field :: (IsDatatype dt,List.Length par ~ Parameters dt) => Datatype dt -> !(List Repr par) -> !(Type.Field dt t) -> Function fun '( '[DataType dt par],CType t par) Divisible :: !Integer -> Function fun '( '[IntType],BoolType) PseudoBoolean :: !PBOp -> !(CList Integer n) -> !Integer -> Function fun '(AllEq BoolType n,BoolType) data AnyFunction (fun :: ([Type],Type) -> *) where AnyFunction :: Function fun '(arg,t) -> AnyFunction fun data OrdOp = Ge | Gt | Le | Lt deriving (Eq,Ord,Show) data ArithOp = Plus | Mult | Minus deriving (Eq,Ord,Show) data ArithOpInt = Div | Mod | Rem | Exp deriving (Eq,Ord,Show) data LogicOp = And | Or | XOr | Implies | AtLeast !Integer | AtMost !Integer deriving (Eq,Ord,Show) data BVCompOp = BVULE | BVULT | BVUGE | BVUGT | BVSLE | BVSLT | BVSGE | BVSGT deriving (Eq,Ord,Show) data BVBinOp = BVAdd | BVSub | BVMul | BVURem | BVSRem | BVUDiv | BVSDiv | BVSHL | BVLSHR | BVASHR | BVXor | BVAnd | BVOr deriving (Eq,Ord,Show) data BVUnOp = BVNot | BVNeg deriving (Eq,Ord,Show) data PBOp = PBEq | PBLe | PBGe deriving (Eq,Ord,Show) data LetBinding (v :: Type -> *) (e :: Type -> *) = forall (t::Type). LetBinding { letVar :: v t , letExpr :: e t } data Quantifier = Forall | Exists deriving (Typeable,Eq,Ord,Show) data Expression (v :: Type -> *) (qv :: Type -> *) (fun :: ([Type],Type) -> *) (fv :: Type -> *) (lv :: Type -> *) (e :: Type -> *) (res :: Type) where #if __GLASGOW_HASKELL__>=712 -- | Free variable. #endif Var :: !(v res) -> Expression v qv fun fv lv e res #if __GLASGOW_HASKELL__>=712 -- | Quantified variable, i.e. a variable that's bound by a forall/exists quantor. #endif QVar :: !(qv res) -> Expression v qv fun fv lv e res #if __GLASGOW_HASKELL__>=712 -- | A function argument variable. Only used in function bodies. #endif FVar :: !(fv res) -> Expression v qv fun fv lv e res #if __GLASGOW_HASKELL__>=712 -- | A variable bound by a let binding. #endif LVar :: !(lv res) -> Expression v qv fun fv lv e res #if __GLASGOW_HASKELL__>=712 -- | Function application #endif App :: !(Function fun '(arg,res)) -> List e arg -> Expression v qv fun fv lv e res #if __GLASGOW_HASKELL__>=712 -- | Constant #endif Const :: !(Value a) -> Expression v qv fun fv lv e a #if __GLASGOW_HASKELL__>=712 -- | AsArray converts a function into an array by using the function -- arguments as array indices and the return type as array element. #endif AsArray :: !(Function fun '(arg,res)) -> Expression v qv fun fv lv e (ArrayType arg res) #if __GLASGOW_HASKELL__>=712 -- | Bind variables using a forall or exists quantor. #endif Quantification :: !Quantifier -> List qv arg -> e BoolType -> Expression v qv fun fv lv e BoolType #if __GLASGOW_HASKELL__>=712 -- | Bind variables to expressions. #endif Let :: [LetBinding lv e] -> e res -> Expression v qv fun fv lv e res instance GEq fun => Eq (Function fun sig) where (==) = defaultEq class SMTOrd (t :: Type) where lt :: Function fun '( '[t,t],BoolType) le :: Function fun '( '[t,t],BoolType) gt :: Function fun '( '[t,t],BoolType) ge :: Function fun '( '[t,t],BoolType) instance SMTOrd IntType where lt = Ord NumInt Lt le = Ord NumInt Le gt = Ord NumInt Gt ge = Ord NumInt Ge instance SMTOrd RealType where lt = Ord NumReal Lt le = Ord NumReal Le gt = Ord NumReal Gt ge = Ord NumReal Ge class SMTArith t where arithFromInteger :: Integer -> Value t arith :: ArithOp -> Natural n -> Function fun '(AllEq t n,t) plus :: Natural n -> Function fun '(AllEq t n,t) minus :: Natural n -> Function fun '(AllEq t n,t) mult :: Natural n -> Function fun '(AllEq t n,t) abs' :: Function fun '( '[t],t) instance SMTArith IntType where arithFromInteger n = IntValue n arith = Arith NumInt plus = Arith NumInt Plus minus = Arith NumInt Minus mult = Arith NumInt Mult abs' = Abs NumInt instance SMTArith RealType where arithFromInteger n = RealValue (fromInteger n) arith = Arith NumReal plus = Arith NumReal Plus minus = Arith NumReal Minus mult = Arith NumReal Mult abs' = Abs NumReal functionType :: Monad m => (forall arg t. fun '(arg,t) -> m (List Repr arg,Repr t)) -> Function fun '(arg,res) -> m (List Repr arg,Repr res) functionType f (Fun fun) = f fun functionType _ (Eq tp n) = return (allEqOf tp n,BoolRepr) functionType _ (Distinct tp n) = return (allEqOf tp n,BoolRepr) functionType f (Map idx fun) = do (arg,res) <- functionType f fun return (liftType arg idx,ArrayRepr idx res) functionType _ (Ord tp _) = return (numRepr tp ::: numRepr tp ::: Nil,BoolRepr) functionType _ (Arith tp _ n) = return (allEqOf (numRepr tp) n,numRepr tp) functionType _ (ArithIntBin _) = return (IntRepr ::: IntRepr ::: Nil,IntRepr) functionType _ Divide = return (RealRepr ::: RealRepr ::: Nil,RealRepr) functionType _ (Abs tp) = return (numRepr tp ::: Nil,numRepr tp) functionType _ Not = return (BoolRepr ::: Nil,BoolRepr) functionType _ (Logic op n) = return (allEqOf BoolRepr n,BoolRepr) functionType _ ToReal = return (IntRepr ::: Nil,RealRepr) functionType _ ToInt = return (RealRepr ::: Nil,IntRepr) functionType _ (ITE tp) = return (BoolRepr ::: tp ::: tp ::: Nil,tp) functionType _ (BVComp _ n) = return (BitVecRepr n ::: BitVecRepr n ::: Nil,BoolRepr) functionType _ (BVBin _ n) = return (BitVecRepr n ::: BitVecRepr n ::: Nil,BitVecRepr n) functionType _ (BVUn _ n) = return (BitVecRepr n ::: Nil,BitVecRepr n) functionType _ (Select idx el) = return (ArrayRepr idx el ::: idx,el) functionType _ (Store idx el) = return (ArrayRepr idx el ::: el ::: idx,ArrayRepr idx el) functionType _ (ConstArray idx el) = return (el ::: Nil,ArrayRepr idx el) functionType _ (Concat bw1 bw2) = return (BitVecRepr bw1 ::: BitVecRepr bw2 ::: Nil, BitVecRepr (bwAdd bw1 bw2)) functionType _ (Extract bw start len) = return (BitVecRepr bw ::: Nil,BitVecRepr len) functionType _ (Constructor dt par con) = case instantiate (constrSig con) par of (res,Refl) -> return (res,DataRepr dt par) functionType _ (Test dt par con) = return (DataRepr dt par ::: Nil,BoolRepr) functionType _ (Field dt par field) = return (DataRepr dt par ::: Nil,ctype (fieldType field) par) functionType _ (Divisible _) = return (IntRepr ::: Nil,BoolRepr) functionType _ (PseudoBoolean _ coeff _) = return (allEqOf bool (List.lengthC coeff),bool) expressionType :: (Monad m,Functor m) => (forall t. v t -> m (Repr t)) -> (forall t. qv t -> m (Repr t)) -> (forall arg t. fun '(arg,t) -> m (List Repr arg,Repr t)) -> (forall t. fv t -> m (Repr t)) -> (forall t. lv t -> m (Repr t)) -> (forall t. e t -> m (Repr t)) -> Expression v qv fun fv lv e res -> m (Repr res) expressionType f _ _ _ _ _ (Var v) = f v expressionType _ f _ _ _ _ (QVar v) = f v expressionType _ _ _ f _ _ (FVar v) = f v expressionType _ _ _ _ f _ (LVar v) = f v expressionType _ _ f _ _ _ (App fun arg) = fmap snd $ functionType f fun expressionType _ _ _ _ _ _ (Const v) = return $ valueType v expressionType _ _ f _ _ _ (AsArray fun) = do (arg,res) <- functionType f fun return $ ArrayRepr arg res expressionType _ _ _ _ _ _ (Quantification _ _ _) = return BoolRepr expressionType _ _ _ _ _ f (Let _ body) = f body mapExpr :: (Functor m,Applicative m) => (forall t. v1 t -> m (v2 t)) -- ^ How to translate variables -> (forall t. qv1 t -> m (qv2 t)) -- ^ How to translate quantified variables -> (forall arg t. fun1 '(arg,t) -> m (fun2 '(arg,t))) -- ^ How to translate functions -> (forall t. fv1 t -> m (fv2 t)) -- ^ How to translate function variables -> (forall t. lv1 t -> m (lv2 t)) -- ^ How to translate let variables -> (forall t. e1 t -> m (e2 t)) -- ^ How to translate sub-expressions -> Expression v1 qv1 fun1 fv1 lv1 e1 r -- ^ The expression to translate -> m (Expression v2 qv2 fun2 fv2 lv2 e2 r) mapExpr f _ _ _ _ _ (Var v) = fmap Var (f v) mapExpr _ f _ _ _ _ (QVar v) = fmap QVar (f v) mapExpr _ _ _ f _ _ (FVar v) = fmap FVar (f v) mapExpr _ _ _ _ f _ (LVar v) = fmap LVar (f v) mapExpr _ _ f _ _ i (App fun args) = App <$> mapFunction f fun <*> List.mapM i args mapExpr _ _ _ _ _ _ (Const val) = pure (Const val) mapExpr _ _ f _ _ _ (AsArray fun) = fmap AsArray (mapFunction f fun) mapExpr _ f _ _ _ g (Quantification q args body) = Quantification q <$> List.mapM f args <*> g body mapExpr _ _ _ _ f g (Let args body) = Let <$> traverse (\(LetBinding v e) -> LetBinding <$> f v <*> g e ) args <*> g body mapFunction :: (Functor m,Applicative m) => (forall arg t. fun1 '(arg,t) -> m (fun2 '(arg,t))) -> Function fun1 '(arg,res) -> m (Function fun2 '(arg,res)) mapFunction f (Fun x) = fmap Fun (f x) mapFunction _ (Eq tp n) = pure (Eq tp n) mapFunction _ (Distinct tp n) = pure (Distinct tp n) mapFunction f (Map idx x) = fmap (Map idx) (mapFunction f x) mapFunction _ (Ord tp op) = pure (Ord tp op) mapFunction _ (Arith tp op n) = pure (Arith tp op n) mapFunction _ (ArithIntBin op) = pure (ArithIntBin op) mapFunction _ Divide = pure Divide mapFunction _ (Abs tp) = pure (Abs tp) mapFunction _ Not = pure Not mapFunction _ (Logic op n) = pure (Logic op n) mapFunction _ ToReal = pure ToReal mapFunction _ ToInt = pure ToInt mapFunction _ (ITE tp) = pure (ITE tp) mapFunction _ (BVComp op bw) = pure (BVComp op bw) mapFunction _ (BVBin op bw) = pure (BVBin op bw) mapFunction _ (BVUn op bw) = pure (BVUn op bw) mapFunction _ (Select idx el) = pure (Select idx el) mapFunction _ (Store idx el) = pure (Store idx el) mapFunction _ (ConstArray idx el) = pure (ConstArray idx el) mapFunction _ (Concat bw1 bw2) = pure (Concat bw1 bw2) mapFunction _ (Extract bw start len) = pure (Extract bw start len) mapFunction _ (Constructor dt par con) = pure (Constructor dt par con) mapFunction _ (Test dt par con) = pure (Test dt par con) mapFunction _ (Field dt par x) = pure (Field dt par x) mapFunction _ (Divisible x) = pure (Divisible x) mapFunction _ (PseudoBoolean op coeff res) = pure (PseudoBoolean op coeff res) instance (GShow v,GShow qv,GShow fun,GShow fv,GShow lv,GShow e) => Show (Expression v qv fun fv lv e r) where showsPrec p (Var v) = showParen (p>10) $ showString "Var " . gshowsPrec 11 v showsPrec p (QVar v) = showParen (p>10) $ showString "QVar " . gshowsPrec 11 v showsPrec p (FVar v) = showParen (p>10) $ showString "FVar " . gshowsPrec 11 v showsPrec p (LVar v) = showParen (p>10) $ showString "LVar " . gshowsPrec 11 v showsPrec p (App fun args) = showParen (p>10) $ showString "App " . showsPrec 11 fun . showChar ' ' . showsPrec 11 args showsPrec p (Const val) = showsPrec p val showsPrec p (AsArray fun) = showParen (p>10) $ showString "AsArray " . showsPrec 11 fun showsPrec p (Quantification q args body) = showParen (p>10) $ showsPrec 11 q . showChar ' ' . showsPrec 11 args . showChar ' ' . gshowsPrec 11 body showsPrec p (Let args body) = showParen (p>10) $ showString "Let " . showListWith (\(LetBinding v e) -> (gshowsPrec 10 v) . showChar '=' . (gshowsPrec 10 e) ) args . showChar ' ' . gshowsPrec 10 body instance (GShow v,GShow qv,GShow fun,GShow fv,GShow lv,GShow e) => GShow (Expression v qv fun fv lv e) where gshowsPrec = showsPrec instance (GShow fun) => Show (Function fun sig) where showsPrec p (Fun x) = gshowsPrec p x showsPrec _ (Eq _ _) = showString "Eq" showsPrec _ (Distinct _ _) = showString "Distinct" showsPrec p (Map _ x) = showParen (p>10) $ showString "Map " . showsPrec 11 x showsPrec p (Ord tp op) = showParen (p>10) $ showString "Ord " . showsPrec 11 tp . showChar ' ' . showsPrec 11 op showsPrec p (Arith tp op _) = showParen (p>10) $ showString "Arith " . showsPrec 11 tp . showChar ' ' . showsPrec 11 op showsPrec p (ArithIntBin op) = showParen (p>10) $ showString "ArithIntBin " . showsPrec 11 op showsPrec p Divide = showString "Divide" showsPrec p (Abs tp) = showParen (p>10) $ showString "Abs " . showsPrec 11 tp showsPrec _ Not = showString "Not" showsPrec p (Logic op _) = showParen (p>10) $ showString "Logic " . showsPrec 11 op showsPrec _ ToReal = showString "ToReal" showsPrec _ ToInt = showString "ToInt" showsPrec _ (ITE _) = showString "ITE" showsPrec p (BVComp op _) = showParen (p>10) $ showString "BVComp " . showsPrec 11 op showsPrec p (BVBin op _) = showParen (p>10) $ showString "BVBin " . showsPrec 11 op showsPrec p (BVUn op _) = showParen (p>10) $ showString "BVUn " . showsPrec 11 op showsPrec _ (Select _ _) = showString "Select" showsPrec _ (Store _ _) = showString "Store" showsPrec _ (ConstArray _ _) = showString "ConstArray" showsPrec _ (Concat _ _) = showString "Concat" showsPrec p (Extract bw start len) = showParen (p>10) $ showString "Extract " . showsPrec 11 bw . showChar ' ' . showsPrec 11 start . showChar ' ' . showsPrec 11 len showsPrec p (Constructor _ _ con) = showParen (p>10) $ showString "Constructor " . showString (constrName con) showsPrec p (Test _ _ con) = showParen (p>10) $ showString "Test " . showString (constrName con) showsPrec p (Field _ _ x) = showParen (p>10) $ showString "Field " . showString (fieldName x) showsPrec p (Divisible x) = showParen (p>10) $ showString "Divisible " . showsPrec 11 x showsPrec p (PseudoBoolean op coeff res) = showParen (p>10) $ showString "PseudoBoolean " . showsPrec 11 op . showChar ' ' . showsPrec 11 (List.toListC coeff) . showChar ' ' . showsPrec 11 res data RenderMode = SMTRendering deriving (Eq,Ord,Show) renderExprDefault :: (GetType qv,GShow v,GShow qv,GShow fun,GShow fv,GShow lv,GShow e) => RenderMode -> Expression v qv fun fv lv e tp -> ShowS renderExprDefault m = renderExpr m (gshowsPrec 11) (gshowsPrec 11) (gshowsPrec 11) (gshowsPrec 11) (gshowsPrec 11) (gshowsPrec 11) renderExpr :: (GetType qv) => RenderMode -> (forall tp. v tp -> ShowS) -> (forall tp. qv tp -> ShowS) -> (forall arg res. fun '(arg,res) -> ShowS) -> (forall tp. fv tp -> ShowS) -> (forall tp. lv tp -> ShowS) -> (forall tp. e tp -> ShowS) -> Expression v qv fun fv lv e tp -> ShowS renderExpr _ f _ _ _ _ _ (Var x) = f x renderExpr _ _ f _ _ _ _ (QVar x) = f x renderExpr _ _ _ _ f _ _ (FVar x) = f x renderExpr _ _ _ _ _ f _ (LVar x) = f x renderExpr SMTRendering _ _ f _ _ i (App fun args) = showChar '(' . renderFunction SMTRendering f fun . renderArgs i args . showChar ')' where renderArgs :: (forall tp. e tp -> ShowS) -> List e tps -> ShowS renderArgs f Nil = id renderArgs f (x ::: xs) = showChar ' ' . f x . renderArgs f xs renderExpr m _ _ _ _ _ _ (Const val) = renderValue m val renderExpr SMTRendering _ _ f _ _ _ (AsArray fun) = showString "(_ as-array " . renderFunction SMTRendering f fun . showChar ')' renderExpr SMTRendering _ f _ _ _ g (Quantification q args body) = showChar '(' . showString (case q of Forall -> "forall" Exists -> "exists") . showString " (" . renderArgs f args . showString ") " . g body . showChar ')' where renderArgs :: GetType qv => (forall tp. qv tp -> ShowS) -> List qv tps -> ShowS renderArgs _ Nil = id renderArgs f (x ::: xs) = showChar '(' . f x . showChar ' ' . renderType SMTRendering (getType x) . showChar ')' . (case xs of Nil -> id _ -> showChar ' ' . renderArgs f xs) renderExpr SMTRendering _ _ _ _ f g (Let args body) = showString "(let (" . renderArgs f g args . showString ") " . g body . showChar ')' where renderArgs :: (forall tp. lv tp -> ShowS) -> (forall tp. e tp -> ShowS) -> [LetBinding lv e] -> ShowS renderArgs _ _ [] = id renderArgs f g (LetBinding v e:xs) = showChar '(' . f v . showChar ' ' . g e . showChar ')' . (case xs of [] -> id _ -> showChar ' ' . renderArgs f g xs) renderValue :: RenderMode -> Value tp -> ShowS renderValue SMTRendering (BoolValue v) = if v then showString "true" else showString "false" renderValue SMTRendering (IntValue v) = if v>=0 then showsPrec 0 v else showString "(- " . showsPrec 0 (negate v) . showChar ')' renderValue SMTRendering (RealValue v) = showString "(/ " . n . showChar ' ' . d . showChar ')' where n = if numerator v >= 0 then showsPrec 0 (numerator v) else showString "(- " . showsPrec 0 (negate $ numerator v) . showChar ')' d = showsPrec 0 (denominator v) renderValue SMTRendering (BitVecValue n bw) = showString "(_ bv" . showsPrec 0 n . showChar ' ' . showsPrec 0 (bwSize bw) . showChar ')' renderValue SMTRendering (ConstrValue par con Nil) = showString (constrName con) renderValue SMTRendering (ConstrValue par con xs) = showChar '(' . showString (constrName con) . renderValues xs . showChar ')' where renderValues :: List Value arg -> ShowS renderValues Nil = id renderValues (x ::: xs) = showChar ' ' . renderValue SMTRendering x . renderValues xs renderFunction :: RenderMode -> (forall arg res. fun '(arg,res) -> ShowS) -> Function fun '(arg,res) -> ShowS renderFunction _ f (Fun x) = f x renderFunction SMTRendering _ (Eq _ _) = showChar '=' renderFunction SMTRendering _ (Distinct _ _) = showString "distinct" renderFunction SMTRendering f (Map _ fun) = showString "(map " . renderFunction SMTRendering f fun . showChar ')' renderFunction SMTRendering _ (Ord _ Ge) = showString ">=" renderFunction SMTRendering _ (Ord _ Gt) = showChar '>' renderFunction SMTRendering _ (Ord _ Le) = showString "<=" renderFunction SMTRendering _ (Ord _ Lt) = showString "<" renderFunction SMTRendering _ (Arith _ Plus _) = showChar '+' renderFunction SMTRendering _ (Arith _ Mult _) = showChar '*' renderFunction SMTRendering _ (Arith _ Minus _) = showChar '-' renderFunction SMTRendering _ (ArithIntBin Div) = showString "div" renderFunction SMTRendering _ (ArithIntBin Mod) = showString "mod" renderFunction SMTRendering _ (ArithIntBin Rem) = showString "rem" renderFunction SMTRendering _ (ArithIntBin Exp) = showString "^" renderFunction SMTRendering _ Divide = showChar '/' renderFunction SMTRendering _ (Abs _) = showString "abs" renderFunction SMTRendering _ Not = showString "not" renderFunction SMTRendering _ (Logic And _) = showString "and" renderFunction SMTRendering _ (Logic Or _) = showString "or" renderFunction SMTRendering _ (Logic XOr _) = showString "xor" renderFunction SMTRendering _ (Logic Implies _) = showString "=>" renderFunction SMTRendering _ (Logic (AtLeast n) _) = showString "(_ at-least " . showsPrec 11 n . showChar ')' renderFunction SMTRendering _ (Logic (AtMost n) _) = showString "(_ at-most " . showsPrec 11 n . showChar ')' renderFunction SMTRendering _ ToReal = showString "to_real" renderFunction SMTRendering _ ToInt = showString "to_int" renderFunction SMTRendering _ (ITE _) = showString "ite" renderFunction SMTRendering _ (BVComp op _) = showString $ case op of BVULE -> "bvule" BVULT -> "bvult" BVUGE -> "bvuge" BVUGT -> "bvugt" BVSLE -> "bvsle" BVSLT -> "bvslt" BVSGE -> "bvsge" BVSGT -> "bvsgt" renderFunction SMTRendering _ (BVBin op _) = showString $ case op of BVAdd -> "bvadd" BVSub -> "bvsub" BVMul -> "bvmul" BVURem -> "bvurem" BVSRem -> "bvsrem" BVUDiv -> "bvudiv" BVSDiv -> "bvsdiv" BVSHL -> "bvshl" BVLSHR -> "bvshr" BVASHR -> "bvashr" BVXor -> "bvxor" BVAnd -> "bvand" BVOr -> "bvor" renderFunction SMTRendering _ (BVUn op _) = showString $ case op of BVNot -> "bvnot" BVNeg -> "bvneg" renderFunction SMTRendering _ (Select _ _) = showString "select" renderFunction SMTRendering _ (Store _ _) = showString "store" renderFunction SMTRendering _ (ConstArray idx el) = showString "(as const " . renderType SMTRendering (ArrayRepr idx el) . showChar ')' renderFunction SMTRendering _ (Concat _ _) = showString "concat" renderFunction SMTRendering _ (Extract _ start len) = showString "(_ extract " . showString (show $ start'+len'-1) . showChar ' ' . showString (show start') . showChar ')' where start' = bwSize start len' = bwSize len renderFunction SMTRendering _ (Constructor dt par con) | determines dt con = showString (constrName con) | otherwise = showString "(as " . showString (constrName con) . renderType SMTRendering (DataRepr dt par) . showChar ')' renderFunction SMTRendering _ (Test _ _ con) = showString "is-" . showString (constrName con) renderFunction SMTRendering _ (Field _ _ field) = showString (fieldName field) renderFunction SMTRendering _ (Divisible n) = showString "(_ divisible " . showsPrec 10 n . showChar ')' renderFunction SMTRendering _ (PseudoBoolean op coeff res) = showString (case op of PBEq -> "(pbeq " PBGe -> "(pbge " PBLe -> "(pble ") . showsPrec 11 res . foldr (.) id (fmap (\n -> showsPrec 11 n . showChar ' ') (List.toListC coeff)) . showChar ')' renderType :: RenderMode -> Repr tp -> ShowS renderType SMTRendering BoolRepr = showString "Bool" renderType SMTRendering IntRepr = showString "Int" renderType SMTRendering RealRepr = showString "Real" renderType SMTRendering (BitVecRepr bw) = showString "(BitVec " . showString (show $ bwSize bw) . showChar ')' renderType SMTRendering (ArrayRepr idx el) = showString "(Array (" . renderTypes idx . showString ") " . renderType SMTRendering el . showChar ')' renderType _ (DataRepr dt Nil) = showString (datatypeName dt) renderType SMTRendering (DataRepr dt par) = showChar '(' . showString (datatypeName dt) . showChar ' ' . renderTypes par . showChar ')' renderTypes :: List Repr tps -> ShowS renderTypes Nil = id renderTypes (tp ::: Nil) = renderType SMTRendering tp renderTypes (tp ::: tps) = renderType SMTRendering tp . showChar ' ' . renderTypes tps instance GShow fun => GShow (Function fun) where gshowsPrec = showsPrec instance (GEq v,GEq e) => Eq (LetBinding v e) where (==) (LetBinding v1 e1) (LetBinding v2 e2) = case geq v1 v2 of Just Refl -> case geq e1 e2 of Just Refl -> True Nothing -> False Nothing -> False instance (GCompare v,GCompare e) => Ord (LetBinding v e) where compare (LetBinding v1 e1) (LetBinding v2 e2) = case gcompare v1 v2 of GEQ -> case gcompare e1 e2 of GEQ -> EQ GLT -> LT GGT -> GT GLT -> LT GGT -> GT instance (GEq v,GEq qv,GEq fun,GEq fv,GEq lv,GEq e) => GEq (Expression v qv fun fv lv e) where geq (Var v1) (Var v2) = geq v1 v2 geq (QVar v1) (QVar v2) = geq v1 v2 geq (FVar v1) (FVar v2) = geq v1 v2 geq (LVar v1) (LVar v2) = geq v1 v2 geq (App f1 arg1) (App f2 arg2) = do Refl <- geq f1 f2 Refl <- geq arg1 arg2 return Refl geq (Const x) (Const y) = geq x y geq (AsArray f1) (AsArray f2) = do Refl <- geq f1 f2 return Refl geq (Quantification q1 arg1 body1) (Quantification q2 arg2 body2) | q1==q2 = do Refl <- geq arg1 arg2 geq body1 body2 | otherwise = Nothing geq (Let bnd1 body1) (Let bnd2 body2) = if bnd1==bnd2 then geq body1 body2 else Nothing geq _ _ = Nothing instance (GEq v,GEq qv,GEq fun,GEq fv,GEq lv,GEq e) => Eq (Expression v qv fun fv lv e t) where (==) = defaultEq instance (GCompare v,GCompare qv,GCompare fun,GCompare fv,GCompare lv,GCompare e) => GCompare (Expression v qv fun fv lv e) where gcompare (Var v1) (Var v2) = gcompare v1 v2 gcompare (Var _) _ = GLT gcompare _ (Var _) = GGT gcompare (QVar v1) (QVar v2) = gcompare v1 v2 gcompare (QVar _) _ = GLT gcompare _ (QVar _) = GGT gcompare (FVar v1) (FVar v2) = gcompare v1 v2 gcompare (FVar _) _ = GLT gcompare _ (FVar _) = GGT gcompare (LVar v1) (LVar v2) = gcompare v1 v2 gcompare (LVar _) _ = GLT gcompare _ (LVar _) = GGT gcompare (App f1 arg1) (App f2 arg2) = case gcompare f1 f2 of GEQ -> case gcompare arg1 arg2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (App _ _) _ = GLT gcompare _ (App _ _) = GGT gcompare (Const v1) (Const v2) = gcompare v1 v2 gcompare (Const _) _ = GLT gcompare _ (Const _) = GGT gcompare (AsArray f1) (AsArray f2) = case gcompare f1 f2 of GEQ -> GEQ GLT -> GLT GGT -> GGT gcompare (AsArray _) _ = GLT gcompare _ (AsArray _) = GGT gcompare (Quantification q1 arg1 body1) (Quantification q2 arg2 body2) = case compare q1 q2 of LT -> GLT GT -> GGT EQ -> case gcompare arg1 arg2 of GEQ -> gcompare body1 body2 GLT -> GLT GGT -> GGT gcompare (Quantification _ _ _) _ = GLT gcompare _ (Quantification _ _ _) = GGT gcompare (Let bnd1 body1) (Let bnd2 body2) = case compare bnd1 bnd2 of EQ -> gcompare body1 body2 LT -> GLT GT -> GGT instance (GCompare v,GCompare qv,GCompare fun,GCompare fv,GCompare lv,GCompare e) => Ord (Expression v qv fun fv lv e t) where compare = defaultCompare instance GEq fun => GEq (Function fun) where geq (Fun f1) (Fun f2) = geq f1 f2 geq (Eq tp1 n1) (Eq tp2 n2) = do Refl <- geq tp1 tp2 Refl <- geq n1 n2 return Refl geq (Distinct tp1 n1) (Distinct tp2 n2) = do Refl <- geq tp1 tp2 Refl <- geq n1 n2 return Refl geq (Map i1 f1) (Map i2 f2) = do Refl <- geq f1 f2 Refl <- geq i1 i2 return Refl geq (Ord tp1 o1) (Ord tp2 o2) = do Refl <- geq tp1 tp2 if o1==o2 then return Refl else Nothing geq (Arith tp1 o1 n1) (Arith tp2 o2 n2) = do Refl <- geq tp1 tp2 if o1==o2 then do Refl <- geq n1 n2 return Refl else Nothing geq (ArithIntBin o1) (ArithIntBin o2) = if o1==o2 then Just Refl else Nothing geq Divide Divide = Just Refl geq (Abs tp1) (Abs tp2) = do Refl <- geq tp1 tp2 return Refl geq Not Not = Just Refl geq (Logic o1 n1) (Logic o2 n2) = if o1==o2 then do Refl <- geq n1 n2 return Refl else Nothing geq ToReal ToReal = Just Refl geq ToInt ToInt = Just Refl geq (ITE t1) (ITE t2) = do Refl <- geq t1 t2 return Refl geq (BVComp o1 bw1) (BVComp o2 bw2) = if o1==o2 then do Refl <- geq bw1 bw2 return Refl else Nothing geq (BVBin o1 bw1) (BVBin o2 bw2) = if o1==o2 then do Refl <- geq bw1 bw2 return Refl else Nothing geq (BVUn o1 bw1) (BVUn o2 bw2) = if o1==o2 then do Refl <- geq bw1 bw2 return Refl else Nothing geq (Select i1 e1) (Select i2 e2) = do Refl <- geq i1 i2 Refl <- geq e1 e2 return Refl geq (Store i1 e1) (Store i2 e2) = do Refl <- geq i1 i2 Refl <- geq e1 e2 return Refl geq (ConstArray i1 e1) (ConstArray i2 e2) = do Refl <- geq i1 i2 Refl <- geq e1 e2 return Refl geq (Concat a1 b1) (Concat a2 b2) = do Refl <- geq a1 a2 Refl <- geq b1 b2 return Refl geq (Extract bw1 start1 len1) (Extract bw2 start2 len2) = do Refl <- geq bw1 bw2 Refl <- geq start1 start2 Refl <- geq len1 len2 return Refl geq (Constructor d1 par1 (c1 :: Constr dt1 sig1)) (Constructor d2 par2 (c2 :: Constr dt2 sig2)) = do Refl <- datatypeEq d1 d2 Refl <- geq par1 par2 Refl <- geq c1 c2 return Refl geq (Test d1 par1 (c1 :: Constr dt1 sig1)) (Test d2 par2 (c2 :: Constr dt2 sig2)) = do Refl <- datatypeEq d1 d2 Refl <- geq par1 par2 Refl <- geq c1 c2 return Refl geq (Field d1 par1 (f1 :: Type.Field dt1 tp1)) (Field d2 par2 (f2 :: Type.Field dt2 tp2)) = do Refl <- datatypeEq d1 d2 Refl <- geq par1 par2 Refl <- geq f1 f2 return Refl geq (Divisible n1) (Divisible n2) = if n1==n2 then Just Refl else Nothing geq (PseudoBoolean op1 coeff1 res1) (PseudoBoolean op2 coeff2 res2) = if op1==op2 then do Refl <- geq coeff1 coeff2 if res1==res2 then return Refl else Nothing else Nothing geq _ _ = Nothing instance GCompare fun => GCompare (Function fun) where gcompare (Fun x) (Fun y) = gcompare x y gcompare (Fun _) _ = GLT gcompare _ (Fun _) = GGT gcompare (Eq t1 n1) (Eq t2 n2) = case gcompare t1 t2 of GEQ -> case gcompare n1 n2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Eq _ _) _ = GLT gcompare _ (Eq _ _) = GGT gcompare (Distinct t1 n1) (Distinct t2 n2) = case gcompare t1 t2 of GEQ -> case gcompare n1 n2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Distinct _ _) _ = GLT gcompare _ (Distinct _ _) = GGT gcompare (Map i1 f1) (Map i2 f2) = case gcompare f1 f2 of GEQ -> case gcompare i1 i2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Map _ _) _ = GLT gcompare _ (Map _ _) = GGT gcompare (Ord tp1 o1) (Ord tp2 o2) = case gcompare tp1 tp2 of GEQ -> case compare o1 o2 of EQ -> GEQ LT -> GLT GT -> GGT GLT -> GLT GGT -> GGT gcompare (Ord _ _) _ = GLT gcompare _ (Ord _ _) = GGT gcompare (Arith tp1 o1 n1) (Arith tp2 o2 n2) = case gcompare tp1 tp2 of GEQ -> case compare o1 o2 of EQ -> case gcompare n1 n2 of GEQ -> GEQ GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT GLT -> GLT GGT -> GGT gcompare (Arith _ _ _) _ = GLT gcompare _ (Arith _ _ _) = GGT gcompare (ArithIntBin o1) (ArithIntBin o2) = case compare o1 o2 of EQ -> GEQ LT -> GLT GT -> GGT gcompare (ArithIntBin _) _ = GLT gcompare _ (ArithIntBin _) = GGT gcompare Divide Divide = GEQ gcompare Divide _ = GLT gcompare _ Divide = GGT gcompare (Abs tp1) (Abs tp2) = case gcompare tp1 tp2 of GEQ -> GEQ GLT -> GLT GGT -> GGT gcompare (Abs _) _ = GLT gcompare _ (Abs _) = GGT gcompare Not Not = GEQ gcompare Not _ = GLT gcompare _ Not = GGT gcompare (Logic o1 n1) (Logic o2 n2) = case compare o1 o2 of EQ -> case gcompare n1 n2 of GEQ -> GEQ GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT gcompare (Logic _ _) _ = GLT gcompare _ (Logic _ _) = GGT gcompare ToReal ToReal = GEQ gcompare ToReal _ = GLT gcompare _ ToReal = GGT gcompare ToInt ToInt = GEQ gcompare ToInt _ = GLT gcompare _ ToInt = GGT gcompare (ITE t1) (ITE t2) = case gcompare t1 t2 of GEQ -> GEQ GLT -> GLT GGT -> GGT gcompare (ITE _) _ = GLT gcompare _ (ITE _) = GGT gcompare (BVComp o1 bw1) (BVComp o2 bw2) = case compare o1 o2 of EQ -> case gcompare bw1 bw2 of GEQ -> GEQ GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT gcompare (BVComp _ _) _ = GLT gcompare _ (BVComp _ _) = GGT gcompare (BVBin o1 bw1) (BVBin o2 bw2) = case compare o1 o2 of EQ -> case gcompare bw1 bw2 of GEQ -> GEQ GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT gcompare (BVBin _ _) _ = GLT gcompare _ (BVBin _ _) = GGT gcompare (BVUn o1 bw1) (BVUn o2 bw2) = case compare o1 o2 of EQ -> case gcompare bw1 bw2 of GEQ -> GEQ GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT gcompare (BVUn _ _) _ = GLT gcompare _ (BVUn _ _) = GGT gcompare (Select i1 e1) (Select i2 e2) = case gcompare i1 i2 of GEQ -> case gcompare e1 e2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Select _ _) _ = GLT gcompare _ (Select _ _) = GGT gcompare (Store i1 e1) (Store i2 e2) = case gcompare i1 i2 of GEQ -> case gcompare e1 e2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Store _ _) _ = GLT gcompare _ (Store _ _) = GGT gcompare (ConstArray i1 e1) (ConstArray i2 e2) = case gcompare i1 i2 of GEQ -> case gcompare e1 e2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (ConstArray _ _) _ = GLT gcompare _ (ConstArray _ _) = GGT gcompare (Concat a1 b1) (Concat a2 b2) = case gcompare a1 a2 of GEQ -> case gcompare b1 b2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Concat _ _) _ = GLT gcompare _ (Concat _ _) = GGT gcompare (Extract bw1 start1 len1) (Extract bw2 start2 len2) = case gcompare bw1 bw2 of GEQ -> case gcompare start1 start2 of GEQ -> case gcompare len1 len2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Extract _ _ _) _ = GLT gcompare _ (Extract _ _ _) = GGT gcompare (Constructor d1 par1 c1) (Constructor d2 par2 c2) = case datatypeCompare d1 d2 of GEQ -> case gcompare par1 par2 of GEQ -> case gcompare c1 c2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Constructor _ _ _) _ = GLT gcompare _ (Constructor _ _ _) = GGT gcompare (Test d1 par1 c1) (Test d2 par2 c2) = case datatypeCompare d1 d2 of GEQ -> case gcompare par1 par2 of GEQ -> case gcompare c1 c2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Test _ _ _) _ = GLT gcompare _ (Test _ _ _) = GGT gcompare (Field d1 par1 f1) (Field d2 par2 f2) = case datatypeCompare d1 d2 of GEQ -> case gcompare par1 par2 of GEQ -> case gcompare f1 f2 of GEQ -> GEQ GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT GLT -> GLT GGT -> GGT gcompare (Field _ _ _) _ = GLT gcompare _ (Field _ _ _) = GGT gcompare (Divisible n1) (Divisible n2) = case compare n1 n2 of EQ -> GEQ LT -> GLT GT -> GGT gcompare (Divisible _) _ = GLT gcompare _ (Divisible _) = GGT gcompare (PseudoBoolean op1 coeff1 res1) (PseudoBoolean op2 coeff2 res2) = case compare op1 op2 of EQ -> case gcompare coeff1 coeff2 of GEQ -> case compare res1 res2 of EQ -> GEQ LT -> GLT GT -> GGT GLT -> GLT GGT -> GGT LT -> GLT GT -> GGT data NoVar (t::Type) = NoVar' data NoFun (sig::([Type],Type)) = NoFun' data NoCon (sig::([Type],*)) = NoCon' data NoField (sig::(*,Type)) = NoField' instance GEq NoVar where geq _ _ = error "geq for NoVar" instance GEq NoFun where geq _ _ = error "geq for NoFun" instance GEq NoCon where geq _ _ = error "geq for NoCon" instance GEq NoField where geq _ _ = error "geq for NoField" instance GCompare NoVar where gcompare _ _ = error "gcompare for NoVar" instance GCompare NoFun where gcompare _ _ = error "gcompare for NoFun" instance GCompare NoCon where gcompare _ _ = error "gcompare for NoCon" instance GCompare NoField where gcompare _ _ = error "gcompare for NoField" instance Eq (NoVar t) where (==) _ _ = error "== for NoVar" instance Eq (NoFun t) where (==) _ _ = error "== for NoFun" instance Eq (NoCon t) where (==) _ _ = error "== for NoCon" instance Eq (NoField t) where (==) _ _ = error "== for NoField" instance Ord (NoVar t) where compare _ _ = error "compare for NoVar" instance Ord (NoFun t) where compare _ _ = error "compare for NoFun" instance Ord (NoCon t) where compare _ _ = error "compare for NoCon" instance Ord (NoField t) where compare _ _ = error "compare for NoField" instance Show (NoVar t) where showsPrec _ _ = showString "NoVar" instance GShow NoVar where gshowsPrec = showsPrec instance Show (NoFun t) where showsPrec _ _ = showString "NoFun" instance GShow NoFun where gshowsPrec = showsPrec instance Show (NoCon t) where showsPrec _ _ = showString "NoCon" instance GShow NoCon where gshowsPrec = showsPrec instance Show (NoField t) where showsPrec _ _ = showString "NoVar" instance GShow NoField where gshowsPrec = showsPrec instance GetType NoVar where getType _ = error "getType called on NoVar." instance GetFunType NoFun where getFunType _ = error "getFunType called on NoFun." instance (GetType v,GetType qv,GetFunType fun,GetType fv,GetType lv,GetType e) => GetType (Expression v qv fun fv lv e) where getType = runIdentity . expressionType (return.getType) (return.getType) (return.getFunType) (return.getType) (return.getType) (return.getType) instance (GetFunType fun) => GetFunType (Function fun) where getFunType = runIdentity . functionType (return.getFunType)
hguenther/smtlib2
Language/SMTLib2/Internals/Expression.hs
gpl-3.0
42,677
0
17
12,708
17,987
8,912
9,075
-1
-1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.EC2.AttachInternetGateway -- Copyright : (c) 2013-2014 Brendan Hay <[email protected]> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | Attaches an Internet gateway to a VPC, enabling connectivity between the -- Internet and the VPC. For more information about your VPC and Internet -- gateway, see the <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/ Amazon Virtual Private Cloud User Guide>. -- -- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AttachInternetGateway.html> module Network.AWS.EC2.AttachInternetGateway ( -- * Request AttachInternetGateway -- ** Request constructor , attachInternetGateway -- ** Request lenses , aigDryRun , aigInternetGatewayId , aigVpcId -- * Response , AttachInternetGatewayResponse -- ** Response constructor , attachInternetGatewayResponse ) where import Network.AWS.Prelude import Network.AWS.Request.Query import Network.AWS.EC2.Types import qualified GHC.Exts data AttachInternetGateway = AttachInternetGateway { _aigDryRun :: Maybe Bool , _aigInternetGatewayId :: Text , _aigVpcId :: Text } deriving (Eq, Ord, Read, Show) -- | 'AttachInternetGateway' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'aigDryRun' @::@ 'Maybe' 'Bool' -- -- * 'aigInternetGatewayId' @::@ 'Text' -- -- * 'aigVpcId' @::@ 'Text' -- attachInternetGateway :: Text -- ^ 'aigInternetGatewayId' -> Text -- ^ 'aigVpcId' -> AttachInternetGateway attachInternetGateway p1 p2 = AttachInternetGateway { _aigInternetGatewayId = p1 , _aigVpcId = p2 , _aigDryRun = Nothing } -- | Checks whether you have the required permissions for the action, without -- actually making the request, and provides an error response. If you have the -- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'. aigDryRun :: Lens' AttachInternetGateway (Maybe Bool) aigDryRun = lens _aigDryRun (\s a -> s { _aigDryRun = a }) -- | The ID of the Internet gateway. aigInternetGatewayId :: Lens' AttachInternetGateway Text aigInternetGatewayId = lens _aigInternetGatewayId (\s a -> s { _aigInternetGatewayId = a }) -- | The ID of the VPC. aigVpcId :: Lens' AttachInternetGateway Text aigVpcId = lens _aigVpcId (\s a -> s { _aigVpcId = a }) data AttachInternetGatewayResponse = AttachInternetGatewayResponse deriving (Eq, Ord, Read, Show, Generic) -- | 'AttachInternetGatewayResponse' constructor. attachInternetGatewayResponse :: AttachInternetGatewayResponse attachInternetGatewayResponse = AttachInternetGatewayResponse instance ToPath AttachInternetGateway where toPath = const "/" instance ToQuery AttachInternetGateway where toQuery AttachInternetGateway{..} = mconcat [ "DryRun" =? _aigDryRun , "InternetGatewayId" =? _aigInternetGatewayId , "VpcId" =? _aigVpcId ] instance ToHeaders AttachInternetGateway instance AWSRequest AttachInternetGateway where type Sv AttachInternetGateway = EC2 type Rs AttachInternetGateway = AttachInternetGatewayResponse request = post "AttachInternetGateway" response = nullResponse AttachInternetGatewayResponse
romanb/amazonka
amazonka-ec2/gen/Network/AWS/EC2/AttachInternetGateway.hs
mpl-2.0
4,240
0
9
923
469
286
183
59
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Plus.Comments.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Get a comment. -- -- /See:/ <https://developers.google.com/+/api/ Google+ API Reference> for @plus.comments.get@. module Network.Google.Resource.Plus.Comments.Get ( -- * REST Resource CommentsGetResource -- * Creating a Request , commentsGet , CommentsGet -- * Request Lenses , cgCommentId ) where import Network.Google.Plus.Types import Network.Google.Prelude -- | A resource alias for @plus.comments.get@ method which the -- 'CommentsGet' request conforms to. type CommentsGetResource = "plus" :> "v1" :> "comments" :> Capture "commentId" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Comment -- | Get a comment. -- -- /See:/ 'commentsGet' smart constructor. newtype CommentsGet = CommentsGet' { _cgCommentId :: Text } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'CommentsGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cgCommentId' commentsGet :: Text -- ^ 'cgCommentId' -> CommentsGet commentsGet pCgCommentId_ = CommentsGet' { _cgCommentId = pCgCommentId_ } -- | The ID of the comment to get. cgCommentId :: Lens' CommentsGet Text cgCommentId = lens _cgCommentId (\ s a -> s{_cgCommentId = a}) instance GoogleRequest CommentsGet where type Rs CommentsGet = Comment type Scopes CommentsGet = '["https://www.googleapis.com/auth/plus.login", "https://www.googleapis.com/auth/plus.me"] requestClient CommentsGet'{..} = go _cgCommentId (Just AltJSON) plusService where go = buildClient (Proxy :: Proxy CommentsGetResource) mempty
rueshyna/gogol
gogol-plus/gen/Network/Google/Resource/Plus/Comments/Get.hs
mpl-2.0
2,563
0
12
601
302
186
116
48
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.IAM.IAMPolicies.LintPolicy -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Lints, or validates, an IAM policy. Currently checks the -- google.iam.v1.Binding.condition field, which contains a condition -- expression for a role binding. Successful calls to this method always -- return an HTTP \`200 OK\` status code, even if the linter detects an -- issue in the IAM policy. -- -- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.iamPolicies.lintPolicy@. module Network.Google.Resource.IAM.IAMPolicies.LintPolicy ( -- * REST Resource IAMPoliciesLintPolicyResource -- * Creating a Request , iamPoliciesLintPolicy , IAMPoliciesLintPolicy -- * Request Lenses , iplpXgafv , iplpUploadProtocol , iplpAccessToken , iplpUploadType , iplpPayload , iplpCallback ) where import Network.Google.IAM.Types import Network.Google.Prelude -- | A resource alias for @iam.iamPolicies.lintPolicy@ method which the -- 'IAMPoliciesLintPolicy' request conforms to. type IAMPoliciesLintPolicyResource = "v1" :> "iamPolicies:lintPolicy" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] LintPolicyRequest :> Post '[JSON] LintPolicyResponse -- | Lints, or validates, an IAM policy. Currently checks the -- google.iam.v1.Binding.condition field, which contains a condition -- expression for a role binding. Successful calls to this method always -- return an HTTP \`200 OK\` status code, even if the linter detects an -- issue in the IAM policy. -- -- /See:/ 'iamPoliciesLintPolicy' smart constructor. data IAMPoliciesLintPolicy = IAMPoliciesLintPolicy' { _iplpXgafv :: !(Maybe Xgafv) , _iplpUploadProtocol :: !(Maybe Text) , _iplpAccessToken :: !(Maybe Text) , _iplpUploadType :: !(Maybe Text) , _iplpPayload :: !LintPolicyRequest , _iplpCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'IAMPoliciesLintPolicy' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'iplpXgafv' -- -- * 'iplpUploadProtocol' -- -- * 'iplpAccessToken' -- -- * 'iplpUploadType' -- -- * 'iplpPayload' -- -- * 'iplpCallback' iamPoliciesLintPolicy :: LintPolicyRequest -- ^ 'iplpPayload' -> IAMPoliciesLintPolicy iamPoliciesLintPolicy pIplpPayload_ = IAMPoliciesLintPolicy' { _iplpXgafv = Nothing , _iplpUploadProtocol = Nothing , _iplpAccessToken = Nothing , _iplpUploadType = Nothing , _iplpPayload = pIplpPayload_ , _iplpCallback = Nothing } -- | V1 error format. iplpXgafv :: Lens' IAMPoliciesLintPolicy (Maybe Xgafv) iplpXgafv = lens _iplpXgafv (\ s a -> s{_iplpXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). iplpUploadProtocol :: Lens' IAMPoliciesLintPolicy (Maybe Text) iplpUploadProtocol = lens _iplpUploadProtocol (\ s a -> s{_iplpUploadProtocol = a}) -- | OAuth access token. iplpAccessToken :: Lens' IAMPoliciesLintPolicy (Maybe Text) iplpAccessToken = lens _iplpAccessToken (\ s a -> s{_iplpAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). iplpUploadType :: Lens' IAMPoliciesLintPolicy (Maybe Text) iplpUploadType = lens _iplpUploadType (\ s a -> s{_iplpUploadType = a}) -- | Multipart request metadata. iplpPayload :: Lens' IAMPoliciesLintPolicy LintPolicyRequest iplpPayload = lens _iplpPayload (\ s a -> s{_iplpPayload = a}) -- | JSONP iplpCallback :: Lens' IAMPoliciesLintPolicy (Maybe Text) iplpCallback = lens _iplpCallback (\ s a -> s{_iplpCallback = a}) instance GoogleRequest IAMPoliciesLintPolicy where type Rs IAMPoliciesLintPolicy = LintPolicyResponse type Scopes IAMPoliciesLintPolicy = '["https://www.googleapis.com/auth/cloud-platform"] requestClient IAMPoliciesLintPolicy'{..} = go _iplpXgafv _iplpUploadProtocol _iplpAccessToken _iplpUploadType _iplpCallback (Just AltJSON) _iplpPayload iAMService where go = buildClient (Proxy :: Proxy IAMPoliciesLintPolicyResource) mempty
brendanhay/gogol
gogol-iam/gen/Network/Google/Resource/IAM/IAMPolicies/LintPolicy.hs
mpl-2.0
5,260
0
16
1,158
711
418
293
104
1
{-# LANGUAGE DeriveDataTypeable #-} module LambdaQ (Expr(..), lambda) where import Text.Parsec import Text.Parsec.String (Parser) import Text.Parsec.Language (emptyDef) import qualified Text.Parsec.Token as Tok import Data.Data (Data, Typeable) import Language.Haskell.TH (loc_filename, location) import Language.Haskell.TH.Quote data Expr = Var String | Lam String Expr | App Expr Expr deriving (Data, Typeable, Show) lexer :: Tok.TokenParser () lexer = Tok.makeTokenParser emptyDef symbol = Tok.symbol lexer parens = Tok.parens lexer identifier = Tok.identifier lexer whiteSpace = Tok.whiteSpace lexer var :: Parser Expr var = Var <$> identifier lam :: Parser Expr lam = do symbol "\\" x <- identifier symbol "->" e <- expr return $ Lam x e app :: Parser Expr app = term `chainl1` return App term :: Parser Expr term = var <|> parens expr <?> "term" expr :: Parser Expr expr = lam <|> app <?> "expr" lambda :: QuasiQuoter lambda = QuasiQuoter { quoteExp = \str-> do -- record current position for parse error reporting filename <- loc_filename <$> location case parse (whiteSpace *> expr <* eof) filename str of Left err -> error (show err) Right e -> dataToExpQ (const Nothing) e , quotePat = undefined , quoteDec = undefined , quoteType = undefined } {- Useage: {-# LANGUAGE QuasiQuotes # } import LambdaQ e :: Expr e = [lambda| \x -> \y -> x y |] -}
scturtle/fun.hs
LambdaQ.hs
unlicense
1,450
5
16
315
461
255
206
-1
-1
{-# LANGUAGE OverloadedStrings #-} -- Copyright 2014 (c) Diego Souza <[email protected]> -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. module Leela.Data.LQL ( LQL (..) , Grep (..) , Using (..) , AttrEvent (..) , GraphEvent (..) , grep , grepID , isStat , targetUser , lqlDescr , groupLQL ) where import Data.Maybe import Crypto.Hash import qualified Data.Vector as V import qualified Data.Sequence as S import Data.Foldable (toList) import Data.Serialize import qualified Data.Map.Strict as M import Leela.Data.Time import Text.Regex.TDFA import Leela.Data.Types import Leela.Data.Pipeline import Control.Applicative import qualified Data.ByteString.Lazy as L import Control.Monad.Identity targetUser :: Using -> User targetUser u = fromMaybe (uUser u) (uAsUser u) data Using = Using { uUser :: User , uTree :: Tree , uAsUser :: Maybe User } deriving (Eq) data Grep = GrepTAttr (Maybe GUID) Attr | GrepKAttr (Maybe GUID) Attr | GrepMakeLink (Maybe GUID) Label (Maybe GUID) | GrepKillLink (Maybe GUID) Label (Maybe GUID) | GrepMakeVertex Kind Node | GrepKillVertex (Maybe GUID) data LQL = StatStmt | PathStmt (Matcher, [(Bool, GUID -> Matcher)]) | KAttrGetStmt GUID Attr [Pipeline Identity (V.Vector (Time, Double))] | TAttrGetStmt GUID Attr TimeRange [Pipeline Identity (V.Vector (Time, Double))] | KAttrListStmt GUID (Mode Attr) | TAttrListStmt GUID (Mode Attr) | TAttrLastStmt (Maybe GUID) Attr | NameStmt Using (S.Seq GUID) | GUIDStmt Using (S.Seq (Kind, Node)) | AlterStmt (S.Seq Journal) | GrepStmt Using Grep data GraphEvent = MakeVertexEvent User Tree Kind Node GUID | KillVertexEvent GUID | MakeLinkEvent GUID Label GUID | KillLinkEvent GUID Label (Maybe GUID) deriving (Show, Eq) data AttrEvent = TAttrPutEvent GUID Attr Time Value [Option] | KAttrPutEvent GUID Attr Value [Option] | TAttrDelEvent GUID Attr (Maybe Time) | KAttrDelEvent GUID Attr deriving (Show, Eq) hashlazy' :: L.ByteString -> L.ByteString hashlazy' = L.fromStrict . digestToHexByteString . hashAlgo where hashAlgo :: L.ByteString -> Digest SHA1 hashAlgo = hashlazy grepID :: Grep -> GUID grepID grepExpr = case grepExpr of GrepKillVertex guidQ -> GUID $ hashlazy' $ L.intercalate "0" [ "kill" , maybe "*" (\(GUID g) -> g) guidQ ] GrepTAttr guidQ (Attr attrQ) -> GUID $ hashlazy' $ L.intercalate "0" ["t-attr" , maybe "*" (\(GUID g) -> g) guidQ , attrQ ] GrepKAttr guidQ (Attr attrQ) -> GUID $ hashlazy' $ L.intercalate "0" ["k-attr" , maybe "*" (\(GUID g) -> g) guidQ , attrQ ] GrepMakeLink aguidQ (Label labelQ) bguidQ -> GUID $ hashlazy' $ L.intercalate "0" ["link" , maybe "*" (\(GUID g) -> g) aguidQ , maybe "*" (\(GUID g) -> g) bguidQ , labelQ ] GrepKillLink aguidQ (Label labelQ) bguidQ -> GUID $ hashlazy' $ L.intercalate "0" [ "unlink" , maybe "*" (\(GUID g) -> g) aguidQ , maybe "*" (\(GUID g) -> g) bguidQ , labelQ ] GrepMakeVertex (Kind kindQ) (Node nodeQ) -> GUID $ hashlazy' $ L.intercalate "0" [ "make" , kindQ , nodeQ ] grep :: Grep -> Either (GraphEvent -> Bool) (AttrEvent -> Bool) grep (GrepTAttr guidQ (Attr exprQ)) = let regex = makeRegex exprQ :: Regex in Right $ \e -> case e of TAttrPutEvent guid (Attr attr) _ _ _ -> and [ maybe True (== guid) guidQ , match regex attr ] _ -> False grep (GrepKAttr guidQ (Attr exprQ)) = let regex = makeRegex exprQ :: Regex in Right $ \e -> case e of KAttrPutEvent guid (Attr attr) _ _ -> and [ maybe True (== guid) guidQ , match regex attr ] _ -> False grep (GrepMakeLink aguidQ (Label labelQ) bguidQ) = let regex = makeRegex labelQ :: Regex in Left $ \e -> case e of MakeLinkEvent aguid (Label labelV) bguid -> and [ maybe True (aguid ==) aguidQ , maybe True (bguid ==) bguidQ , match regex labelV ] _ -> False grep (GrepKillLink aguidQ (Label labelQ) bguidQ) = let regex = makeRegex labelQ :: Regex in Left $ \e -> case e of KillLinkEvent aguid (Label labelV) bguid -> and [ maybe True (aguid ==) aguidQ , maybe True ((bguid ==) . Just) bguidQ , match regex labelV ] _ -> False grep (GrepMakeVertex (Kind kindQ) (Node nodeQ)) = let kRegex = makeRegex kindQ :: Regex nRegex = makeRegex nodeQ :: Regex in Left $ \e -> case e of MakeVertexEvent _ _ (Kind kind) (Node node) _ -> and [ match kRegex kind , match nRegex node ] _ -> False grep (GrepKillVertex guidQ) = Left $ \e -> case e of KillVertexEvent guid -> maybe True (guid ==) guidQ _ -> False isStat :: LQL -> Bool isStat StatStmt = True isStat _ = False lqlDescr :: [LQL] -> String lqlDescr = show . go M.empty where go :: M.Map String Int -> [LQL] -> [(String, Int)] go acc [] = M.toList acc go acc (StatStmt : xs) = go (M.insertWith (+) "stat" 1 acc) xs go acc (PathStmt _ : xs) = go (M.insertWith (+) "path" 1 acc) xs go acc (KAttrGetStmt {} : xs) = go (M.insertWith (+) "attr get(k)" 1 acc) xs go acc (KAttrListStmt {} : xs) = go (M.insertWith (+) "attr kls" 1 acc) xs go acc (TAttrListStmt {} : xs) = go (M.insertWith (+) "attr tls" 1 acc) xs go acc (TAttrGetStmt {} : xs) = go (M.insertWith (+) "attr get(t)" 1 acc) xs go acc (NameStmt {} : xs) = go (M.insertWith (+) "name" 1 acc) xs go acc (GUIDStmt {} : xs) = go (M.insertWith (+) "guid" 1 acc) xs go acc (TAttrLastStmt {} : xs) = go (M.insertWith (+) "attr last" 1 acc) xs go acc (GrepStmt {} : xs) = go (M.insertWith (+) "grep" 1 acc) xs go acc (AlterStmt j : xs) = go (jDescr acc (toList j)) xs jDescr acc [] = acc jDescr acc (PutLink {} : xs) = jDescr (M.insertWith (+) "make(l)" 1 acc) xs jDescr acc (PutLabel {} : xs) = jDescr (M.insertWith (+) "make(l)" 1 acc) xs jDescr acc (PutNode {} : xs) = jDescr (M.insertWith (+) "make(n)" 1 acc) xs jDescr acc (DelLink {} : xs) = jDescr (M.insertWith (+) "kill(l)" 1 acc) xs jDescr acc (DelNode {} : xs) = jDescr (M.insertWith (+) "kill(n)" 1 acc) xs jDescr acc (DelKAttr {} : xs) = jDescr (M.insertWith (+) "attr kill(k)" 1 acc) xs jDescr acc (DelTAttr {} : xs) = jDescr (M.insertWith (+) "attr kill(t)" 1 acc) xs jDescr acc (PutKAttr {} : xs) = jDescr (M.insertWith (+) "attr put(k)" 1 acc) xs jDescr acc (PutTAttr {} : xs) = jDescr (M.insertWith (+) "attr put(t)" 1 acc) xs lqlMerge :: LQL -> LQL -> Either LQL (LQL, LQL) lqlMerge (NameStmt u xs) (NameStmt _ ys) = Left (NameStmt u (xs S.>< ys)) lqlMerge (GUIDStmt u xs) (GUIDStmt _ ys) = Left (GUIDStmt u (xs S.>< ys)) lqlMerge (AlterStmt xs) (AlterStmt ys) = Left (AlterStmt (xs S.>< ys)) lqlMerge a b = Right (a, b) groupLQL :: [LQL] -> [LQL] groupLQL = go where go (a : b : xs) = case (lqlMerge a b) of Left c -> go (c : xs) Right (c, d) -> c : go (d : xs) go xs = xs getGraphEvent_v0 :: Get GraphEvent getGraphEvent_v0 = getWord8 >>= \ty -> case ty of 0 -> MakeVertexEvent <$> get <*> get <*> get <*> get <*> get 1 -> KillVertexEvent <$> get 2 -> MakeLinkEvent <$> get <*> get <*> get 3 -> KillLinkEvent <$> get <*> get <*> get _ -> mzero putGraphEvent_v0 :: GraphEvent -> Put putGraphEvent_v0 (MakeVertexEvent u t k n g) = do putWord8 0 sequence_ [put u, put t, put k, put n, put g] putGraphEvent_v0 (KillVertexEvent g) = do putWord8 1 put g putGraphEvent_v0 (MakeLinkEvent ga l gb) = do putWord8 2 sequence_ [put ga, put l, put gb] putGraphEvent_v0 (KillLinkEvent ga l mgb) = do putWord8 3 sequence_ [put ga, put l, put mgb] putAttrEvent_v0 :: AttrEvent -> Put putAttrEvent_v0 (TAttrPutEvent g a t v opts) = do putWord8 0 sequence_ [put g, put a, put t, put v, put opts] putAttrEvent_v0 (KAttrPutEvent g a v opts) = do putWord8 1 sequence_ [put g, put a, put v, put opts] putAttrEvent_v0 (TAttrDelEvent g a mt) = do putWord8 2 sequence_ [put g, put a, put mt] putAttrEvent_v0 (KAttrDelEvent g a) = do putWord8 3 sequence_ [put g, put a] getAttrEvent_v0 :: Get AttrEvent getAttrEvent_v0 = getWord8 >>= \ty -> case ty of 0 -> TAttrPutEvent <$> get <*> get <*> get <*> get <*> get 1 -> KAttrPutEvent <$> get <*> get <*> get <*> get 2 -> TAttrDelEvent <$> get <*> get <*> get 3 -> KAttrDelEvent <$> get <*> get _ -> mzero putGrep_v0 :: Grep -> Put putGrep_v0 (GrepTAttr g a) = putWord8 0 >> sequence_ [put g, put a] putGrep_v0 (GrepKAttr g a) = putWord8 1 >> sequence_ [put g, put a] putGrep_v0 (GrepMakeLink a l b) = putWord8 2 >> sequence_ [put a, put l, put b] putGrep_v0 (GrepKillLink a l b) = putWord8 3 >> sequence_ [put a, put l, put b] putGrep_v0 (GrepMakeVertex k n) = putWord8 4 >> sequence_ [put k, put n] putGrep_v0 (GrepKillVertex n) = putWord8 5 >> put n getGrep_v0 :: Get Grep getGrep_v0 = getWord8 >>= \code -> case code of 0 -> GrepTAttr <$> get <*> get 1 -> GrepKAttr <$> get <*> get 2 -> GrepMakeLink <$> get <*> get <*> get 3 -> GrepKillLink <$> get <*> get <*> get 4 -> GrepMakeVertex <$> get <*> get 5 -> GrepKillVertex <$> get _ -> mzero instance Serialize Grep where get = getWord8 >>= \ver -> case ver of 0 -> getGrep_v0 _ -> mzero put e = putWord8 0 >> putGrep_v0 e instance Serialize GraphEvent where get = getWord8 >>= \ver -> case ver of 0 -> getGraphEvent_v0 _ -> mzero put e = putWord8 0 >> putGraphEvent_v0 e instance Serialize AttrEvent where get = getWord8 >>= \ver -> case ver of 0 -> getAttrEvent_v0 _ -> mzero put e = putWord8 0 >> putAttrEvent_v0 e
locaweb/leela
src/warpdrive/src/Leela/Data/LQL.hs
apache-2.0
13,303
0
17
5,572
4,155
2,157
1,998
246
21
module Coins.A265400Spec (main, spec) where import Test.Hspec import Coins.A265400 (a265400) main :: IO () main = hspec spec spec :: Spec spec = describe "A265400" $ it "correctly computes the first 20 elements" $ take 20 (map a265400 [1..]) `shouldBe` expectedValue where expectedValue = [0,0,0,1,0,1,0,1,2,0,2,3,0,3,4,5,0,5,6,7]
peterokagey/haskellOEIS
test/Coins/A265400Spec.hs
apache-2.0
345
0
10
59
160
95
65
10
1
-- Copyright 2015 Peter Harpending -- -- Licensed under the Apache License, Version 2.0 (the "License"); you -- may not use this file except in compliance with the License. You -- may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -- implied. See the License for the specific language governing -- permissions and limitations under the License. -- | -- Module : Skel -- Description : Short description -- Copyright : Copyright 2015 Peter Harpending -- License : Apache-2.0 -- Maintainer : Peter Harpending <[email protected]> -- Stability : experimental -- Portability : POSIX -- module Skel where
pharpend/editor-open
skel/Skel.hs
apache-2.0
874
0
2
164
26
25
1
1
0
{-# LANGUAGE Arrows #-} {- | An experiment in using Haskell Arrows, see <http://www.haskell.org/arrows/>, to automagically compute Lipschitz constants of maps. Something like this can also be used to compute derivatives of functions. Here we use floats, but later we intend to replace them with exact reals. -} module Lipschitz where import Control.Category import Control.Arrow -- | Locally Lipshitz maps from @a@ to @b@ data Lipschitz a b = Lipschitz { scaling :: a -> Float, apply :: a -> b } instance Category Lipschitz where id = Lipschitz { scaling = const 1.0, apply = (\x -> x) } g . f = Lipschitz { scaling = (\x -> scaling f x * scaling g (apply f x)), apply = (\x -> apply g (apply f x)) } instance Arrow Lipschitz where arr f = Lipschitz { scaling = const infinity, apply = f } where infinity = 1.0 / 0.0 first f = Lipschitz { scaling = (\(x, _) -> scaling f x), apply = (\(x,y) -> (apply f x, y)) } second f = Lipschitz { scaling = (\(_, y) -> scaling f y), apply = (\(x,y) -> (x, apply f y)) } f *** g = Lipschitz { scaling = (\(x,y) -> max (scaling f x) (scaling g y)), apply = (\(x,y) -> (apply f x, apply g y)) } f &&& g = Lipschitz { scaling = (\x -> max (scaling f x) (scaling g x)), apply = (\x -> (apply f x, apply g x)) }
andrejbauer/marshall
etc/haskell/Lipschitz.hs
bsd-2-clause
1,452
0
13
460
506
287
219
20
0
{-# LANGUAGE DeriveDataTypeable #-} -- | -- Module : Statistics.Distribution.ChiSquared -- Copyright : (c) 2010 Alexey Khudyakov -- License : BSD3 -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- The chi-squared distribution. This is a continuous probability -- distribution of sum of squares of k independent standard normal -- distributions. It's commonly used in statistical tests module Statistics.Distribution.ChiSquared ( ChiSquared -- Constructors , chiSquared , chiSquaredNDF ) where import Data.Typeable (Typeable) import Numeric.SpecFunctions (incompleteGamma,invIncompleteGamma,logGamma) import qualified Statistics.Distribution as D import qualified System.Random.MWC.Distributions as MWC -- | Chi-squared distribution newtype ChiSquared = ChiSquared Int deriving (Show,Typeable) -- | Get number of degrees of freedom chiSquaredNDF :: ChiSquared -> Int chiSquaredNDF (ChiSquared ndf) = ndf {-# INLINE chiSquaredNDF #-} -- | Construct chi-squared distribution. Number of degrees of freedom -- must be positive. chiSquared :: Int -> ChiSquared chiSquared n | n <= 0 = error $ "Statistics.Distribution.ChiSquared.chiSquared: N.D.F. must be positive. Got " ++ show n | otherwise = ChiSquared n {-# INLINE chiSquared #-} instance D.Distribution ChiSquared where cumulative = cumulative instance D.ContDistr ChiSquared where density = density quantile = quantile instance D.Mean ChiSquared where mean (ChiSquared ndf) = fromIntegral ndf {-# INLINE mean #-} instance D.Variance ChiSquared where variance (ChiSquared ndf) = fromIntegral (2*ndf) {-# INLINE variance #-} instance D.MaybeMean ChiSquared where maybeMean = Just . D.mean instance D.MaybeVariance ChiSquared where maybeStdDev = Just . D.stdDev maybeVariance = Just . D.variance instance D.ContGen ChiSquared where genContVar (ChiSquared n) = MWC.chiSquare n cumulative :: ChiSquared -> Double -> Double cumulative chi x | x <= 0 = 0 | otherwise = incompleteGamma (ndf/2) (x/2) where ndf = fromIntegral $ chiSquaredNDF chi {-# INLINE cumulative #-} density :: ChiSquared -> Double -> Double density chi x | x <= 0 = 0 | otherwise = exp $ log x * (ndf2 - 1) - x2 - logGamma ndf2 - log 2 * ndf2 where ndf = fromIntegral $ chiSquaredNDF chi ndf2 = ndf/2 x2 = x/2 {-# INLINE density #-} quantile :: ChiSquared -> Double -> Double quantile (ChiSquared ndf) p | p == 0 = 0 | p == 1 = 1/0 | p > 0 && p < 1 = 2 * invIncompleteGamma (fromIntegral ndf / 2) p | otherwise = error $ "Statistics.Distribution.ChiSquared.quantile: p must be in [0,1] range. Got: "++show p {-# INLINE quantile #-}
00tau/statistics
Statistics/Distribution/ChiSquared.hs
bsd-2-clause
2,825
0
12
625
662
352
310
60
1
{-# OPTIONS_HADDOCK ignore-exports #-} -- | This module contains some utility functions and wrappers around Gloss for -- dealing with drawing stuff on the screen. module Game.Glow.Render ( render, drawText ) where import Graphics.Gloss.Data.Color (Color, makeColor) import Graphics.Gloss.Data.Picture ( Picture (Pictures), color, scale, text, translate ) import Game.Glow.World (World, drawWorld, debugWorld) -- | Render the world to a picture we can draw. render :: World -> Picture render w = let dbg = translate (-500) 360 $ scale 0.1 0.1 $ devColor $ drawText 150 $ debugWorld w wp = drawWorld w in Pictures [wp, dbg] -- dev output -- devColor . scale 0.2 0.2 . translate (- 512) 0 . drawText 150 . show -- | This is the color for use by dev overlays. It is yellow with an alpha of -- 0.8. devColor :: Picture -> Picture devColor = color $ makeColor 1 0.8 0 0.8 -- | Properly draw multiline text. Needs the correct line offset to work -- properly. Line height is relative, with 100 being the character size from -- the baseline. 150 gives a nice spacing without the possibility of lines -- colliding. drawText :: Float -> String -> Picture drawText oss s = let l = zip (lines s) [0..] in Pictures $ map applyOffset l where applyOffset :: (String, Float) -> Picture applyOffset (l,os) = translate 0 (- os * oss) $ text l
sulami/glow
lib/Game/Glow/Render.hs
bsd-3-clause
1,449
0
15
358
305
172
133
19
1
-- TODO: There is quite a bit of duplication between the various .Mirror.* -- modules and Distribution.Client. This should be removed where possible. -- (One symptom of this is the frequent need to use explicit import or hiding -- lists for imports of Distribution.Client in the Mirror.* modules.) {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE RankNTypes #-} module Distribution.Client.Mirror.Session ( -- * MirrorSession MirrorSession -- Opaque , runMirrorSession , mirrorError , mirrorAskHttpLib , liftCont , Unlift(..) , askUnlift , mirrorFinally -- * Errors , MirrorError(..) , Entity(..) , ErrorState(..) , GetError(..) , ErrorResponse(..) -- ** Utility , mkErrorResponse , formatMirrorError , formatEntity , formatIOError , formatGetError , formatErrorResponse -- * Events , MirrorEvent(..) , notifyResponse -- * HTTP session , HttpSession -- ** Specific HttpSession actions , downloadFile , downloadFile' , requestGET , requestPUT -- ** Embedding HTTP session into mirror session , browserAction , browserActions -- * Logging , warn -- * Re-exports , throwError , liftIO ) where -- stdlib import Control.Applicative import Control.Exception import Control.Monad.Cont import Control.Monad.Except import Control.Monad.Reader import Control.Monad.State import Data.ByteString.Lazy (ByteString) import Data.IORef import Data.List (isPrefixOf) import Data.Set (Set) import Data.Typeable (Typeable, cast) import Network.Browser import Network.HTTP import Network.URI (URI) import System.Directory import System.FilePath import System.IO.Error import qualified Data.ByteString.Lazy as BS import qualified Data.Set as Set -- Cabal import Distribution.Package import Distribution.Text import Distribution.Verbosity import Distribution.Simple.Utils (wrapText) -- hackage import Distribution.Server.Util.Parse (unpackUTF8) -- hackage-security import qualified Hackage.Security.Client as Sec import qualified Hackage.Security.Client.Repository.HttpLib as Sec import qualified Hackage.Security.Client.Repository.HttpLib.HTTP as Sec.HTTP import qualified Hackage.Security.Util.Pretty as Sec {------------------------------------------------------------------------------- MirrorSession monad ERROR HANDLING STRATEGY There's two general classes of errors we have to consider: - systematic errors that apply to all packages - errors that only apply to individual packages For the first class we want to fail and not continue mirroring. For the second class we want to keep going and try mirroring other packages. In addition we want to remember the packages involved persistently so that we don't keep trying again and again to mirror packages that simply cannot be mirrored. We want the mirroring to be robust. So we don't want to have temporary or individual package errors make us fail overall. To add further complication, it can be hard to distinguish a systematic error from an individual per-package error. Our strategy may have to be complex and based on counting various events to make best guesses. Since we expect our error handling stategy we try to separate it from the main mirroring logic. We make a new monad to hold the plumbing for our strategy. We make it to be a state monad so that we can accumulating info about non-fatal errors in individual packages. We make it an error monad to deal with fatal errors. We layer state and error on top of the HTTP Browser monad. The layering order of the state vs error is such that errors do not roll back the changed state. This is because we want (at least the option) to keep the info about individual packages when we encounter a systematic error. So that's the plumbing. The actual policy/strategy is implemented by an action in the monad where we inform it of the interesting events, which is basically all the gets and puts, the packages and their http response codes. This policy action behaves like a state machine action, updating the monad state and if it decides it has to fail hard, by making use of error monad. -------------------------------------------------------------------------------} data MirrorInternalEnv = MirrorInternalEnv { mirrorVerbosity :: Verbosity , mirrorKeepGoing :: Bool , mirrorErrorRef :: IORef ErrorState , mirrorBrowser :: Sec.HTTP.Browser , mirrorHttpLib :: Sec.HttpLib } newtype MirrorSession a = MirrorSession { unMirror :: ReaderT MirrorInternalEnv IO a } deriving ( Functor , Applicative , Monad , MonadIO ) instance MonadReader (Verbosity, Bool) MirrorSession where ask = MirrorSession $ do MirrorInternalEnv{..} <- ask return (mirrorVerbosity, mirrorKeepGoing) local f m = MirrorSession (local f' (unMirror m)) where f' :: MirrorInternalEnv -> MirrorInternalEnv f' MirrorInternalEnv{..} = MirrorInternalEnv{ mirrorVerbosity = verbosity' , mirrorKeepGoing = keepGoing' , .. } where (verbosity', keepGoing') = f (mirrorVerbosity, mirrorKeepGoing) instance MonadState ErrorState MirrorSession where get = MirrorSession $ do MirrorInternalEnv{..} <- ask liftIO $ readIORef mirrorErrorRef put x = MirrorSession $ do MirrorInternalEnv{..} <- ask liftIO $ writeIORef mirrorErrorRef x runMirrorSession :: Verbosity -> Bool -> ErrorState -> MirrorSession a -> IO (Either MirrorError a, ErrorState) runMirrorSession verbosity keepGoing st (MirrorSession m) = do stRef <- newIORef st Sec.HTTP.withClient $ \browser httpLib -> do let internalEnv = MirrorInternalEnv { mirrorVerbosity = verbosity , mirrorKeepGoing = keepGoing , mirrorErrorRef = stRef , mirrorBrowser = browser , mirrorHttpLib = httpLib } result <- catches (Right <$> runReaderT m internalEnv) [ Handler $ \(ex :: MirrorError) -> return $ Left ex , Handler $ \(ex :: IOError) -> return $ Left (MirrorIOError ex) , Handler $ \(ex :: AsyncException) -> case ex of UserInterrupt -> return $ Left Interrupted _otherwise -> throwIO ex ] st' <- readIORef stRef return (result, st') mirrorError :: MirrorError -> MirrorSession a mirrorError = liftIO . throwIO newtype Unlift = Unlift { unlift :: forall a. MirrorSession a -> IO a } -- | Unlifting from MirrorSession to IO (@monad-unlift@ style) askUnlift :: MirrorSession Unlift askUnlift = MirrorSession $ ReaderT $ \env -> return $ Unlift $ \act -> runReaderT (unMirror act) env -- | Lift a continuation in IO to a continuation in MirrorSession -- -- This is just a convenience wrapper around 'askRun'. -- -- Another way to think about this type is -- -- > liftCont :: ContT r IO a -> ContT r MirrorSession a liftCont :: ((a -> IO b) -> IO b) -> ((a -> MirrorSession b) -> MirrorSession b) liftCont f g = do run <- askUnlift liftIO $ f $ \a -> unlift run (g a) mirrorFinally :: MirrorSession a -> MirrorSession b -> MirrorSession a mirrorFinally a b = do run <- askUnlift liftIO $ unlift run a `finally` unlift run b mirrorAskHttpLib :: MirrorSession Sec.HttpLib mirrorAskHttpLib = MirrorSession $ do MirrorInternalEnv{..} <- ask return mirrorHttpLib {------------------------------------------------------------------------------- Errors -------------------------------------------------------------------------------} data MirrorError = MirrorIOError IOError | GetEntityError Entity GetError | ParseEntityError Entity String String | PutPackageError PackageId ErrorResponse | Interrupted | InvalidOption String deriving (Typeable,Show) instance Exception MirrorError data Entity = EntityIndex | EntityLog | EntityPackage PackageId deriving Show data ErrorState = ErrorState { es_missing :: Set PackageId, es_unmirrorable :: Set PackageId } deriving Show data GetError = GetError ErrorResponse | GetVerificationError Sec.VerificationError | GetRemoteError Sec.SomeRemoteError | GetInvalidPackage Sec.InvalidPackageException deriving Show data ErrorResponse = ErrorResponse URI ResponseCode String (Maybe String) deriving Show mkErrorResponse :: URI -> Response ByteString -> ErrorResponse mkErrorResponse uri rsp = ErrorResponse uri (rspCode rsp) (rspReason rsp) mBody where mBody = case lookupHeader HdrContentType (rspHeaders rsp) of Just mimetype | "text/plain" `isPrefixOf` mimetype -> Just (unpackUTF8 (rspBody rsp)) _ -> Nothing formatMirrorError :: MirrorError -> String formatMirrorError (MirrorIOError ioe) = formatIOError ioe formatMirrorError (GetEntityError entity rsp) = "Failed to download " ++ formatEntity entity ++ ",\n " ++ formatGetError rsp formatMirrorError (ParseEntityError entity uri theError) = "Error parsing " ++ formatEntity entity ++ " at " ++ uri ++ ": " ++ theError formatMirrorError (PutPackageError pkgid rsp) = "Failed to upload package " ++ display pkgid ++ ",\n " ++ formatErrorResponse rsp formatMirrorError Interrupted = error "formatMirrorError: Interrupted" formatMirrorError (InvalidOption theError) = "Invalid option: " ++ theError formatEntity :: Entity -> String formatEntity EntityIndex = "the package index" formatEntity EntityLog = "the package upload log" formatEntity (EntityPackage pkgid) = "package " ++ display pkgid formatIOError :: IOError -> String formatIOError ioe | isUserError ioe = file ++ location ++ detail | otherwise = show ioe where file = case ioeGetFileName ioe of Nothing -> "" Just path -> path ++ ": " location = case ioeGetLocation ioe of "" -> "" loc -> loc ++ ": " detail = ioeGetErrorString ioe formatGetError :: GetError -> String formatGetError (GetError theError) = formatErrorResponse theError formatGetError (GetVerificationError theError) = Sec.pretty theError formatGetError (GetRemoteError theError) = Sec.pretty theError formatGetError (GetInvalidPackage theError) = Sec.pretty theError formatErrorResponse :: ErrorResponse -> String formatErrorResponse (ErrorResponse uri (a,b,c) reason mBody) = "HTTP error code " ++ show a ++ show b ++ show c ++ ", " ++ reason ++ "\n " ++ show uri ++ maybe "" (('\n':) . unlines . map (" "++) . lines . wrapText) mBody {------------------------------------------------------------------------------- Events -------------------------------------------------------------------------------} data MirrorEvent = GetIndexOk | GetPackageOk | GetPackageFailed GetError PackageId | PutPackageOk | PutPackageFailed ErrorResponse PackageId notifyResponse :: MirrorEvent -> MirrorSession () notifyResponse e = do st <- get (verbosity, keepGoing) <- ask st' <- handleEvent verbosity keepGoing st put st' where handleEvent _ False st = case e of GetIndexOk -> return st GetPackageOk -> return st PutPackageOk -> return st GetPackageFailed rsp pkgid -> mirrorError (GetEntityError (EntityPackage pkgid) rsp) PutPackageFailed rsp pkgid -> mirrorError (PutPackageError pkgid rsp) handleEvent verbosity True st = case e of GetIndexOk -> return st GetPackageOk -> return st PutPackageOk -> return st GetPackageFailed rsp pkgid -> if getFailedPermanent rsp then do liftIO $ warn verbosity $ formatMirrorError (GetEntityError (EntityPackage pkgid) rsp) return st { es_missing = Set.insert pkgid (es_missing st) } else mirrorError (GetEntityError (EntityPackage pkgid) rsp) PutPackageFailed rsp pkgid -> if putFailedPermanent rsp then do liftIO $ warn verbosity $ formatMirrorError (PutPackageError pkgid rsp) return st { es_unmirrorable = Set.insert pkgid (es_unmirrorable st) } else mirrorError (PutPackageError pkgid rsp) -- Was this error when downloading a package permanent, or might it have -- been some kind of transient error and should we try again? -- -- NOTE: Verification errors can be transient (for instance, if a timestamp -- was updated too late). getFailedPermanent :: GetError -> Bool getFailedPermanent (GetError resp) = case resp of ErrorResponse _ (4,0,4) _ _ -> True ErrorResponse _ _otherCode _ _ -> False getFailedPermanent (GetRemoteError (Sec.SomeRemoteError theError)) = case cast theError of Just (Sec.HTTP.UnexpectedResponse _ (4,0,4)) -> True _otherwise -> False getFailedPermanent (GetVerificationError _) = False getFailedPermanent (GetInvalidPackage _) = True putFailedPermanent :: ErrorResponse -> Bool putFailedPermanent (ErrorResponse _ (4,0,0) _ _) = True putFailedPermanent (ErrorResponse _ (4,0,4) _ _) = True putFailedPermanent (ErrorResponse _ _otherCode _ _) = False {------------------------------------------------------------------------------- HttpSession and actions -------------------------------------------------------------------------------} type HttpSession a = BrowserAction (HandleStream ByteString) a downloadFile :: URI -> FilePath -> HttpSession (Maybe GetError) downloadFile uri file = do out $ "downloading " ++ show uri ++ " to " ++ file metag <- liftIO $ getETag file case metag of Just etag -> do let headers = [mkHeader HdrIfNoneMatch (quote etag)] (_, rsp) <- request (Request uri GET headers BS.empty) case rspCode rsp of (3,0,4) -> do out $ file ++ " unchanged with ETag " ++ etag return Nothing (2,0,0) -> do liftIO $ writeDowloadedFileAndEtag rsp return Nothing _ -> return (Just (GetError (mkErrorResponse uri rsp))) Nothing -> do (_, rsp) <- request (Request uri GET [] BS.empty) case rspCode rsp of (2,0,0) -> do liftIO $ writeDowloadedFileAndEtag rsp return Nothing _ -> return (Just (GetError (mkErrorResponse uri rsp))) where writeDowloadedFileAndEtag rsp = do BS.writeFile file (rspBody rsp) setETag file (unquote <$> findHeader HdrETag rsp) downloadFile' :: URI -> FilePath -> HttpSession (Maybe GetError) downloadFile' uri file = do out $ "downloading " ++ show uri ++ " to " ++ file rsp <- requestGET uri case rsp of Left theError -> return (Just (GetError theError)) Right content -> do liftIO $ BS.writeFile file content --TODO: check we wrote the expected length. return Nothing requestGET :: URI -> HttpSession (Either ErrorResponse ByteString) requestGET uri = do (_, rsp) <- request (Request uri GET headers BS.empty) case rspCode rsp of (2,0,0) -> return (Right (rspBody rsp)) _ -> return (Left (mkErrorResponse uri rsp)) where headers = [] requestPUT :: URI -> String -> ByteString -> HttpSession (Maybe ErrorResponse) requestPUT uri mimetype body = do (_, rsp) <- request (Request uri PUT headers body) case rspCode rsp of (2,_,_) -> return Nothing _ -> return (Just (mkErrorResponse uri rsp)) where headers = [ Header HdrContentLength (show (BS.length body)) , Header HdrContentType mimetype ] {------------------------------------------------------------------------------- Auxiliary functions used by HttpSession actions -------------------------------------------------------------------------------} getETag :: FilePath -> IO (Maybe String) getETag file = catchJustDoesNotExistError (Just <$> readFile (file <.> "etag")) (\_ -> return Nothing) setETag :: FilePath -> Maybe String -> IO () setETag file Nothing = catchJustDoesNotExistError (removeFile (file <.> "etag")) (\_ -> return ()) setETag file (Just etag) = writeFile (file <.> "etag") etag catchJustDoesNotExistError :: IO a -> (IOError -> IO a) -> IO a catchJustDoesNotExistError = catchJust (\e -> if isDoesNotExistError e then Just e else Nothing) quote :: String -> String quote s = '"' : s ++ ['"'] unquote :: String -> String unquote ('"':s) = go s where go [] = [] go ('"':[]) = [] go (c:cs) = c : go cs unquote s = s {------------------------------------------------------------------------------- Embedding HTTP session into mirror session -------------------------------------------------------------------------------} browserAction :: HttpSession a -> MirrorSession a browserAction act = MirrorSession $ do MirrorInternalEnv{..} <- ask liftIO $ Sec.HTTP.withBrowser mirrorBrowser act browserActions :: [HttpSession (Maybe err)] -> MirrorSession (Maybe err) browserActions = foldr1 maybeThen . map browserAction where -- Bind for the strange not-quite-monad where errors are returned as -- (Just err) and success is returned as Nothing maybeThen :: Monad m => m (Maybe err) -> m (Maybe err) -> m (Maybe err) maybeThen p q = do res <- p case res of Just theError -> return (Just theError) Nothing -> q {------------------------------------------------------------------------------- Logging -------------------------------------------------------------------------------} warn :: Verbosity -> String -> IO () warn verbosity msg = when (verbosity >= normal) $ putStrLn ("Warning: " ++ msg)
ocharles/hackage-server
Distribution/Client/Mirror/Session.hs
bsd-3-clause
18,326
0
21
4,484
4,121
2,142
1,979
-1
-1
module Utils.NumRomans where import Data.Char toRomanUpper :: Int -> String toRomanUpper n = concat $ map convert $ zip ns [tns-1, tns-2 .. 0] where (ns, tns) = let ns = show n in (map digitToInt ns, length ns) convert (nchg, np) = case np of 0 -> ["", "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX"] !! nchg 1 -> ["", "X", "XX", "XXX", "XL", "L", "LX", "LXX", "LXXX", "XC"] !! nchg 2 -> ["", "C", "CC", "CCC", "CD", "D", "DC", "DCC", "DCCC", "CM"] !! nchg _ -> if nchg == 0 then "" else take (nchg * 10 ^ np `div` 1000) $ repeat 'M' toRomanLower :: Int -> String toRomanLower n = concat $ map convert $ zip ns [tns-1, tns-2 .. 0] where (ns, tns) = let ns = show n in (map digitToInt ns, length ns) convert (nchg, np) = case np of 0 -> ["", "i", "ii", "iii", "iv", "v", "vi", "vii", "viii", "ix"] !! nchg 1 -> ["", "x", "xx", "xxx", "xl", "l", "lx", "lxx", "lxxx", "xm"] !! nchg 2 -> ["", "c", "cc", "ccc", "cd", "d", "dc", "dcc", "dccc", "cm"] !! nchg _ -> if nchg == 0 then "" else take (nchg * 10 ^ np `div` 1000) $ repeat 'm'
carliros/Simple-San-Simon-Functional-Web-Browser
src/Utils/NumRomans.hs
bsd-3-clause
1,406
0
16
579
561
319
242
20
5
-- Module: Instruments.Utils.InterestRate -- Copyright: (c) Johan Astborg, Andreas Bock -- License: BSD-3 -- Maintainer: Johan Astborg <[email protected]> -- Portability: portable -- -- Types and functions for working with interest rates, -- compounding and discounting. module Instruments.Utils.InterestRate where import Control.Applicative import Data.Tuple import Data.Maybe import qualified Data.Map as M type Rate = Double type Maturity = Double type DiscountFactor = Double type CompoundFactor = Double type Offset = Double -- | Represents the type of compounding -- -- For discrete exponential compounding, use Exponential. -- For discrete simple compounding use Simple. -- Continuous compounding is the default used in HQL internally. data Compounding = Continuous | Exponential | Linear deriving (Show) -- | Represents the compounding frequency -- -- An interest rate can have different compounding frequencies. -- For other frequencies than specified, use Other. data Frequency = Annually | SemiAnnually | Quarterly | Monthly | Daily | Other Int deriving (Show, Eq) convertFreq :: Frequency -> Double convertFreq Annually = 1 convertFreq SemiAnnually = 2 convertFreq Quarterly = 4 convertFreq Monthly = 12 convertFreq Daily = 365 convertFreq (Other d) = fromIntegral d newtype ContinuousRate = ContinuousRate Rate deriving (Show) newtype SimpleRate = SimpleRate Rate deriving (Show) data ExponentialRate = ExponentialRate Rate Frequency deriving (Show) class InterestRate a where -- | Returns the corresponding continuously compounded rate continuousRate :: a -> ContinuousRate -- | Returns the discount factor at an offset discountFactor :: a -> Offset -> DiscountFactor -- | Returns the compound factor at an offset compoundFactor :: a -> Offset -> CompoundFactor -- | Get the intrinsic rate rate :: a -> Rate instance InterestRate ContinuousRate where continuousRate = id discountFactor (ContinuousRate r) offset = exp (-rr*offset) where rr = r / 100.0 compoundFactor rate offset = 1 / discountFactor rate offset rate (ContinuousRate r) = r instance InterestRate ExponentialRate where continuousRate (ExponentialRate r n) = ContinuousRate $ (exp(r/(100*nn)) - 1)*nn*100 where nn = convertFreq n; discountFactor (ExponentialRate r n) offset = 1/((1+r/(100.0*nn))**(offset/nn)) where nn = convertFreq n; compoundFactor rate offset = 1 / discountFactor rate offset rate (ExponentialRate r _) = r instance InterestRate SimpleRate where continuousRate (SimpleRate r) = ContinuousRate $ (exp (r/100.0) - 1) * 100.0 discountFactor (SimpleRate r) = const $ 1/(1+r/100.0) compoundFactor rate offset = 1 / discountFactor rate offset rate (SimpleRate r) = r
HIPERFIT/HQL
src/Instruments/Utils/InterestRate.hs
bsd-3-clause
2,895
5
16
618
641
359
282
53
1
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-} module Text.Printf.Safe.CombinatorsSpec (main, spec) where import Text.Printf.Safe (Printf (..), printf) import Text.Printf.Safe.Combinators import Test.Hspec import Test.Hspec.QuickCheck (prop) main :: IO () main = hspec spec _b :: Bool -> String _b = show _u :: () -> String _u = show spec :: Spec spec = do describe "(+++)" $ do prop "commutes with printf (for plain text)" $ \str str' -> let f1 = str :<> EOS f2 = str' :<> EOS in printf (f1 +++ f2) == printf f1 ++ printf f2 it "accumulates args in correct order" $ let f1 = "john has " % d >< " dogs: " % _b f2 = "jane loves " % _u >< "." in printf (f1 +++ f2) (42 :: Int) False () == printf f1 42 False ++ printf f2 () describe "numeric formatters" $ do it "works properly" $ do printf ("dec: " % d) (12 :: Int) `shouldBe` "dec: 12" printf ("pad: " % d' ' ' 4) (12 :: Int) `shouldBe` "pad: 12" printf ("pad: " % d' '0' 4) (12 :: Int) `shouldBe` "pad: 0012" printf ("oct: " % o) (12 :: Int) `shouldBe` "oct: 14" printf ("pad: " % o' ' ' 4) (12 :: Int) `shouldBe` "pad: 14" printf ("pad: " % o' '0' 4) (12 :: Int) `shouldBe` "pad: 0014" printf ("bin: " % b) (12 :: Int) `shouldBe` "bin: 1100" printf ("pad: " % b' ' ' 5) (12 :: Int) `shouldBe` "pad: 1100" printf ("pad: " % b' '0' 5) (12 :: Int) `shouldBe` "pad: 01100" printf ("hex: " % h) (12 :: Int) `shouldBe` "hex: c" printf ("pad: " % h' ' ' 4) (12 :: Int) `shouldBe` "pad: c" printf ("pad: " % h' '0' 4) (12 :: Int) `shouldBe` "pad: 000c" describe "show formatters" $ do it "works properly" $ do printf ("w: " % _S) () `shouldBe` "w: ()" printf ("w: " % _S) True `shouldBe` "w: True" describe "string formatters" $ do it "works properly" $ do printf ("w: " % s) "hey!" `shouldBe` "w: hey!"
konn/safe-printf
test/Text/Printf/Safe/CombinatorsSpec.hs
bsd-3-clause
1,947
0
18
541
772
405
367
44
1
{-# LANGUAGE OverloadedStrings #-} module Text.XmlHtml.XML.Render where import Blaze.ByteString.Builder import Data.Char import Data.Maybe import Data.Monoid import Text.XmlHtml.Common import Data.Text (Text) import qualified Data.Text as T ------------------------------------------------------------------------------ render :: Encoding -> Maybe DocType -> [Node] -> Builder render e dt ns = byteOrder `mappend` xmlDecl e `mappend` docTypeDecl e dt `mappend` nodes where byteOrder | isUTF16 e = fromText e "\xFEFF" -- byte order mark | otherwise = mempty nodes | null ns = mempty | otherwise = firstNode e (head ns) `mappend` (mconcat $ map (node e) (tail ns)) ------------------------------------------------------------------------------ -- | Function for rendering XML nodes without the overhead of creating a -- Document structure. renderXmlFragment :: Encoding -> [Node] -> Builder renderXmlFragment _ [] = mempty renderXmlFragment e (n:ns) = firstNode e n `mappend` (mconcat $ map (node e) ns) ------------------------------------------------------------------------------ xmlDecl :: Encoding -> Builder xmlDecl e = fromText e "<?xml version=\"1.0\" encoding=\"" `mappend` fromText e (encodingName e) `mappend` fromText e "\"?>\n" ------------------------------------------------------------------------------ docTypeDecl :: Encoding -> Maybe DocType -> Builder docTypeDecl _ Nothing = mempty docTypeDecl e (Just (DocType tag ext int)) = fromText e "<!DOCTYPE " `mappend` fromText e tag `mappend` externalID e ext `mappend` internalSubset e int `mappend` fromText e ">\n" ------------------------------------------------------------------------------ externalID :: Encoding -> ExternalID -> Builder externalID _ NoExternalID = mempty externalID e (System sid) = fromText e " SYSTEM " `mappend` sysID e sid externalID e (Public pid sid) = fromText e " PUBLIC " `mappend` pubID e pid `mappend` fromText e " " `mappend` sysID e sid ------------------------------------------------------------------------------ internalSubset :: Encoding -> InternalSubset -> Builder internalSubset _ NoInternalSubset = mempty internalSubset e (InternalText t) = fromText e " " `mappend` fromText e t ------------------------------------------------------------------------------ sysID :: Encoding -> Text -> Builder sysID e sid | not ("\'" `T.isInfixOf` sid) = fromText e "\'" `mappend` fromText e sid `mappend` fromText e "\'" | not ("\"" `T.isInfixOf` sid) = fromText e "\"" `mappend` fromText e sid `mappend` fromText e "\"" | otherwise = error "SYSTEM id is invalid" ------------------------------------------------------------------------------ pubID :: Encoding -> Text -> Builder pubID e sid | not ("\"" `T.isInfixOf` sid) = fromText e "\"" `mappend` fromText e sid `mappend` fromText e "\"" | otherwise = error "PUBLIC id is invalid" ------------------------------------------------------------------------------ node :: Encoding -> Node -> Builder node e (TextNode t) = escaped "<>&" e t node e (Comment t) | "--" `T.isInfixOf` t = error "Invalid comment" | "-" `T.isSuffixOf` t = error "Invalid comment" | otherwise = fromText e "<!--" `mappend` fromText e t `mappend` fromText e "-->" node e (Element t a c) = element e t a c ------------------------------------------------------------------------------ -- | Process the first node differently to encode leading whitespace. This -- lets us be sure that @parseXML@ is a left inverse to @render@. firstNode :: Encoding -> Node -> Builder firstNode e (Comment t) = node e (Comment t) firstNode e (Element t a c) = node e (Element t a c) firstNode _ (TextNode "") = mempty firstNode e (TextNode t) = let (c,t') = fromJust $ T.uncons t in escaped "<>& \t\r\n" e (T.singleton c) `mappend` node e (TextNode t') ------------------------------------------------------------------------------ escaped :: [Char] -> Encoding -> Text -> Builder escaped _ _ "" = mempty escaped bad e t = let (p,s) = T.break (`elem` bad) t r = T.uncons s in fromText e p `mappend` case r of Nothing -> mempty Just (c,ss) -> entity e c `mappend` escaped bad e ss ------------------------------------------------------------------------------ entity :: Encoding -> Char -> Builder entity e '&' = fromText e "&amp;" entity e '<' = fromText e "&lt;" entity e '>' = fromText e "&gt;" entity e '\"' = fromText e "&quot;" entity e c = fromText e "&#" `mappend` fromText e (T.pack (show (ord c))) `mappend` fromText e ";" ------------------------------------------------------------------------------ element :: Encoding -> Text -> [(Text, Text)] -> [Node] -> Builder element e t a [] = fromText e "<" `mappend` fromText e t `mappend` (mconcat $ map (attribute e) a) `mappend` fromText e "/>" element e t a c = fromText e "<" `mappend` fromText e t `mappend` (mconcat $ map (attribute e) a) `mappend` fromText e ">" `mappend` (mconcat $ map (node e) c) `mappend` fromText e "</" `mappend` fromText e t `mappend` fromText e ">" ------------------------------------------------------------------------------ attribute :: Encoding -> (Text, Text) -> Builder attribute e (n,v) | not ("\'" `T.isInfixOf` v) = fromText e " " `mappend` fromText e n `mappend` fromText e "=\'" `mappend` escaped "<&" e v `mappend` fromText e "\'" | otherwise = fromText e " " `mappend` fromText e n `mappend` fromText e "=\"" `mappend` escaped "<&\"" e v `mappend` fromText e "\""
23Skidoo/xmlhtml
src/Text/XmlHtml/XML/Render.hs
bsd-3-clause
7,043
0
15
2,437
1,810
944
866
112
2
{-# OPTIONS_HADDOCK hide #-} module Rosterium.Dealer where import qualified Data.Map.Strict as Map import Data.Vector (Vector) import qualified Data.Vector as V import Data.Word import System.Random.MWC allocateN :: Int -> [p] -> [p] -> GenIO -> IO ([p],[p]) allocateN count leftover bench gen = do list <- shuffle gen bench let avail = leftover ++ list let width = length avail if count < width then do return (splitAt count avail) else do (list',remains) <- allocateN (count - width) [] bench gen return (avail ++ list', remains) -- -- Generate a random array, use those values as keys to insert list elements -- into a Map, then read the map out in key order to result in a shuffled list. -- shuffle :: GenIO -> [p] -> IO [p] shuffle gen values = let width = length values in do variates <- uniformVector gen width :: IO (Vector Word64) let numbers = V.toList variates let pairs = zip numbers values return $ Map.elems . Map.fromList $ pairs
afcowie/rosterium
lib/Rosterium/Dealer.hs
bsd-3-clause
1,046
0
13
266
335
175
160
27
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeOperators #-} module Response where import Lib.Prelude import Servant (Handler, (:~>)(NT)) import qualified Database as Db (Connections) type Response = ReaderT Env Handler newtype Env = Env { dbConnections :: Db.Connections } responseToHandler :: Env -> Response :~> Handler responseToHandler env = NT $ \rt -> runReaderT rt env
Unisay/dancher
src/Response.hs
bsd-3-clause
384
0
7
65
100
62
38
10
1
module Parameters ( getParameters ) where import Data.Monoid ((<>)) import Options.Applicative import Types getParameters :: ParserInfo Parameters getParameters = info p idm where p = hsubparser ( command "run" runHelper <> command "test" testHelper ) runHelper = info (RunParams <$> getConfig) ( fullDesc <> header "Run Cloud Haskell" ) testHelper = info (TestParams <$> getConfig) ( fullDesc <> header "Test Cloud Haskell" ) getConfig :: Parser Config getConfig = Config <$> option auto ( long "send-for" <> metavar "SEC" <> help "Sending period, in seconds" ) <*> option auto ( long "wait-for" <> metavar "SEC" <> help "Grace period, in seconds" ) <*> option auto ( long "with-seed" <> metavar "INT" <> help "Seed value for PRNG" ) <*> strOption ( long "config" <> metavar "FILE" <> help "Configuration file with nodes" )
vonavi/hcloud
src/Parameters.hs
bsd-3-clause
1,232
0
13
540
255
127
128
28
1
import Control.Applicative import Foreign.Ptr import Foreign.StorableMonad data Foo = Foo Int Char instance Storable Foo where alignment _ = alignment (undefined::Ptr ()) sizeOf _ = sizeOf (undefined::Int) + sizeOf (undefined::Char) peek = runStorable (Foo <$> peekS <*> peekS) poke ptr (Foo x y) = runStorable (pokeS x >> pokeS y) ptr
acowley/StorableMonad
examples/Example.hs
bsd-3-clause
345
1
9
60
145
73
72
9
0
{-# LANGUAGE ViewPatterns, PatternGuards #-} {-# LANGUAGE PackageImports, GeneralizedNewtypeDeriving #-} {-# OPTIONS_GHC -fno-warn-overlapping-patterns #-} {-# LANGUAGE FlexibleInstances, TypeSynonymInstances, MultiParamTypeClasses #-} module Reductions ( Reduceron, reduce, noenv , whnf, whnftw , whnf1, whnf2, whnf3, whnf4, whnf5, hnf1, hnf2 ) where import Ast import Operations -- import Primitives import Control.Monad import "transformers" Data.Functor.Identity import "transformers" Control.Monad.Trans.State ( StateT(..) ) import "transformers" Control.Monad.Trans.List import "monads-fd" Control.Monad.State import Control.Applicative -- normalizations {{{ whnf beta t@(ast -> App f a) = whnf beta f >>= \f' -> case ast f' of Lam _ _ -> pure (app f' a) <|> whnf beta (beta f' a) _ -> pure (app f' a) whnf beta t@(ast -> Let x e m) = flip (leet x) <$> (pushM x e *> whnf beta m) <*> popM whnf beta t@(ast -> Lam _ _) = pure t whnf beta t@(ast -> Var x) = resolveM x >>= ( maybe (pure t) $ \e' -> whnf beta e' >>= \t' -> assertM x t' *> pure t' ) -- whnf beta t@(ast -> Prim _) = pure t -- whnftw' b e t = desu b e t id (const (:[])) -- desu :: (Term -> Term -> Term) -> Env -> Term -> (Term -> a) -> (Env -> Term -> [a]) -> [a] -- -- desu bt en t@(ast -> Lam _ _) k1 k2 = k2 en t -- -- desu bt en t@(ast -> App f a) k1 k2 = -- desu bt en f (k1 . flip app a) $ \en' f' -> -- case ast f' of -- Lam _ _ -> k1 (app f' a) : desu bt en' (bt f' a) k1 k2 -- _ -> k2 en' (app f' a) -- -- desu bt en t@(ast -> Let x e m) k1 k2 = -- desu bt (push x e en) m (k1 . leet x e) $ -- \((_, e') : en') m' -> k2 en' (leet x e' m') -- -- desu bt en t@(ast -> Var x@(resolve en -> Just e)) k1 k2 = -- desu bt en e k1 $ \en' e' -> -- k2 (assert x e' en') e' -- -- desu bt en t@(ast -> Var _) k1 k2 = k2 en t whnftw' b e t = desu b (\_ e t _ k -> k e t) e t (const id) (const (:[])) hnftw' b e t = desu b drop e t (const id) (const (:[])) where drop c e (ast -> Lam x m) k1 k2 = c e m (\e' m' -> k1 e' (lam x m')) (\e' m' -> k2 e' (lam x m')) -- desu :: (Term -> Term -> Term) -> Env -> Term -> (Env -> Term -> a) -> (Env -> Term -> [a]) -> [a] -- desu bt en t@(ast -> Lam _ _) k1 k2 = k2 en t desu bt lm en t@(ast -> Lam _ _) k1 k2 = lm (desu bt lm) en t k1 k2 -- desu bt en t@(ast -> App f a) k1 k2 = -- desu bt en f (\en' f' -> k1 en' (app f' a)) $ \en' f' -> -- case ast f' of -- -- Lam _ _ -> k1 en' (app f' a) : desu bt en' (bt f' a) k1 k2 -- Lam _ _ -> k1 en' (markS "app" $ app f' a) -- : desu bt en' (bt f' a) k1 k2 -- _ -> k2 en' (app f' a) desu bt lm en t@(ast -> App f a) k1 k2 = desu bt lm en f (\en' f' -> k1 en' (app f' a)) $ \en' f' -> case ast f' of -- Lam _ _ -> k1 en' (app f' a) : desu bt en' (bt f' a) k1 k2 Lam _ _ -> k1 en' (markS "app" $ app f' a) : desu bt lm en' (bt f' a) k1 k2 _ -> k2 en' (app f' a) -- desu bt en t@(ast -> Let x e m) k1 k2 = -- let reco k ((_, e'):en') m' = k en' (leet x e' m') in -- desu bt (push x e en) m (reco k1) (reco k2) desu bt lm en t@(ast -> Let x e m) k1 k2 = let reco k ((_, e'):en') m' = k en' (leet x e' m') in desu bt lm (push x e en) m (reco k1) (reco k2) -- desu bt en t@(ast -> Var x@(resolve en -> Just e)) k1 k2 = -- -- desu bt en e (\en' e' -> k1 (assert x e' en') t) -- -- (\en' e' -> k2 (assert x e' en') e') -- k1 en (markS ("res " ++ x) t) : -- desu bt en e (\en' e' -> k1 (assert x e' en') t) -- (\en' e' -> k2 (assert x e' en') e') desu bt lm en t@(ast -> Var x@(resolve en -> Just e)) k1 k2 = -- desu bt en e (\en' e' -> k1 (assert x e' en') t) -- (\en' e' -> k2 (assert x e' en') e') k1 en (markS ("res " ++ show x) t) : desu bt lm en e (\en' e' -> k1 (assert x e' en') t) (\en' e' -> k2 (assert x e' en') e') desu bt lm en t@(ast -> Var _) k1 k2 = k2 en t whnftw b t = odesu b t pure pure odesu :: (Term -> Term -> Term) -> Term -> (Term -> Reduceron a) -> (Term -> Reduceron a) -> Reduceron a odesu bt t@(ast -> Lam _ _) k1 k2 = k2 t odesu bt t@(ast -> App f a) k1 k2 = odesu bt f (k1 . flip app a) $ \f' -> case ast f' of Lam _ _ -> k1 (app f' a) <|> odesu bt (bt f' a) k1 k2 _ -> k2 (app f' a) odesu bt t@(ast -> Let x e m) k1 k2 = let reco k m' = popM >>= \e' -> k (leet x e' m') in pushM x e *> odesu bt m (reco k1) (reco k2) odesu bt t@(ast -> Var x) k1 k2 = resolveM x >>= maybe (k2 t) ( \e' -> let reco k e'' = assertM x e'' *> k e'' in odesu bt e' (reco k1) (reco k2) ) -- odesu :: (Term -> Term -> Term) -> Term -> (Term -> Reduceron a) -> (Term -> Reduceron a) -> Reduceron a -- -- odesu bt t@(ast -> Lam _ _) k1 k2 = k2 t -- -- odesu bt t@(ast -> App f a) k1 k2 = -- odesu bt f (k1 . flip app a) $ \f' -> -- case ast f' of -- Lam _ _ -> k1 (app f' a) <|> odesu bt (bt f' a) k1 k2 -- _ -> k2 (app f' a) -- -- odesu bt t@(ast -> Let x e m) k1 k2 = -- pushM x e *> odesu bt m (k1 . leet x e) -- ( \m' -> popM >>= \e' -> k2 (leet x e' m') ) -- -- odesu bt t@(ast -> Var x) k1 k2 = -- resolveM x >>= maybe (k2 t) ( \e' -> -- odesu bt e' k1 $ \e'' -> assertM x e'' *> k2 e'' ) -- -- }}} type Env = [(Ident, Term)] noenv = [] :: Env push :: Ident -> Term -> Env -> Env push x e en = (x, e) : en pop :: Env -> Maybe ((Ident, Term), Env) pop [] = Nothing pop (xe:en) = Just (xe, en) assert :: Ident -> Term -> Env -> Env assert x e [] = error "no." assert x e (ye@(y, _) : en) | x == y = (x, e) : en | otherwise = ye : assert x e en resolve :: Env -> Ident -> Maybe Term resolve = flip lookup -- THE REDUCERON {{{ newtype Reduceron a = Red { reduce_ :: Env -> [(a, Env)] } instance Functor Reduceron where fmap f (Red g) = Red (\e -> [ (f a, e') | (a, e') <- g e ]) instance Monad Reduceron where return a = Red (\e -> [(a, e)]) Red m >>= f = Red $ \e -> [ be | (a, e') <- m e, be <- reduce_ (f a) e' ] instance MonadState Env Reduceron where get = Red (\s -> [(s, s)]) put s = Red (\_ -> [((), s)]) instance Applicative Reduceron where pure = return (<*>) = ap instance Alternative Reduceron where empty = Red (\_ -> []) Red a <|> Red b = Red (\s -> a s ++ b s) -- newtype Reduceron a = Red { reduce_ :: StateT Env (ListT Identity) a } -- deriving (Functor, Monad, MonadState Env, Applicative, Alternative) -- -- -- -- reduce :: Reduceron a -> Env -> [a] -- -- reduce r e = runIdentity $ runListT (reduce_ r `evalStateT` e) reduce :: Reduceron a -> [a] -- reduce r = runIdentity $ runListT (reduce_ r `evalStateT` noenv) reduce r = map fst $ r `reduce_` noenv resolveM :: Ident -> Reduceron (Maybe Term) resolveM x = gets (lookup x) pushM :: Ident -> Term -> Reduceron () pushM x t = modify ((x, t) :) popM :: Reduceron Term popM = gets (snd . head) <* modify tail assertM :: Ident -> Term -> Reduceron () assertM x t = modify (assert x t) -- }}} -- whnf1, whnf2 :: Term -> [Term] -- whnf1 t = whnf splicingBeta t `reduce` noenv -- whnf2 t = whnf lazyBeta t `reduce` noenv whnf1, whnf2 :: Term -> [Term] whnf1 = seqlist . reduce . whnf splicingBeta whnf2 = seqlist . reduce . whnf lazyBeta whnf3 = seqlist . reduce . whnftw lazyBeta whnf4 = seqlist . whnftw' lazyBeta noenv whnf5 = seqlist . whnftw' splicingBeta noenv hnf1 = seqlist . hnftw' splicingBeta noenv hnf2 = seqlist . hnftw' lazyBeta noenv seqlist [] = [] seqlist (x:xs) = x `seq` (x : seqlist xs) -- vim:set fdm=marker:
pqwy/redex
src/Reductions.hs
bsd-3-clause
7,932
0
14
2,493
2,497
1,326
1,171
111
2
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-} module TestPrograms where import NeatInterpolation import Text.Regex import Data.Cmm.QuasiQuoter import Data.Cmm.AST in1, in2, in3, in4, in5, in6, in7, in8, in9, in10, in11, in12, in13, in14 :: String in1 = [string| x := a * b; if (a * b) > (20 * c) then y := 20 + a; else y := 30 + c; z := 20 * 30; a := 20; u := a * b;|] in2 = [string| x := a * b; while (20 * c) > (a * b) do { a := 20 + a; c := c - 1; } z := 20 * 30; a := 20; u := a * b;|] in3 = [string| x := 2 < 10; if x then if false then y := true; else { y := false; z := 42 * 42; } else y := 100 - (10 * 10);|] in4 = [string| x := 2 < 10; if x then if false then output y; else output y * ((10 + 2) - 1); else y := 100 - (10 * 10);|] in5 = [string| skip; skip; x := 2 * 20; if x < 41 then { output x; i := 10; while i > 0 do output x; skip; } else { w := false; skip; } skip; output 10; skip; output 2; |] in6 = [string| x := a * b; while false do { a := 20 + a; c := c - 1; } z := 20 * 30; a := 20; u := a * b;|] in7 = [string| x := 2; if true then { while false do { output x; skip; } skip; } else y := 42; output y;|] in8 = [string| x := 3; if true then y := 10; else { y := 42; output y; } output y;|] in9 = [string| x := 10; y := 11; u := x * (10 - y); v := x * input; if x < 10 then z := 12; else z := 24; output z;|] in10 = [string| x := 0; y := 2; while x < 10 do { x := x + 1; y := y * y; } output y;|] in11 = [string| x := 0; c := x < 10; if c then output x; else output c;|] in12 = [string| a := (20 * 3) - 1; b := a + 9; c := 0; while b < 68 do { output a * b; if (10 + a) > 41 then output c; else { c := c + 1; b := a; } } output c + a; |] in13 = [string| x := input; while x > 1 do { y := x * 2; if y > 3 then x := x - y; else skip; z := x - 4; if z > 0 then x := x * 2; else skip; z := z - 1; } output x; |] in14 = [string| x := input; y := 0; while x > 0 do { x := x - 1; y := y + x; } while y < 10 do { output y; y := y + 1; } |] testPrograms :: [(String, String)] testPrograms = let progs = [ ("in1", in1), ("in2", in2), ("in3", in3), ("in4", in4), ("in5", in5) , ("in6", in6), ("in7", in7), ("in8", in8), ("in9", in9), ("in10", in10) , ("in11", in11), ("in12", in12), ("in13", in13), ("in14", in14) ] rgx = mkRegex "(\r\n)|\r|\n" in map (\(n,p) -> (n, subRegex rgx p "\n")) progs p10 :: Program p10 = [cmm| x := 0; y := 2; while x < 10 do { x := x + 1; y := y * y; } output y; |]
adamschoenemann/asa-analysis
test/TestPrograms.hs
bsd-3-clause
3,232
0
11
1,456
392
277
115
45
1
-- | -- Module : Core.Vector.List -- License : BSD-style -- Maintainer : Vincent Hanquez <[email protected]> -- Stability : experimental -- Portability : portable -- module Core.Collection.List ( wordsWhen ) where import qualified Data.List import Core.Internal.Base -- | Simple helper to split a list repeatly when the predicate match wordsWhen :: (x -> Bool) -> [x] -> [[x]] wordsWhen _ [] = [] wordsWhen p is = loop is where loop s = let (w, s') = Data.List.break p s in case s' of [] -> [w] _:xs -> w : loop xs
vincenthz/hs-xyz-test
Core/Collection/List.hs
bsd-3-clause
608
0
13
186
156
88
68
12
2
module HDevs.Composition where import HDevs.Atomic import Data.These compose :: Model input intermediate -> LastEventTime -> NextEventTime -> Model intermediate output -> LastEventTime -> NextEventTime -> Model input output compose model1 tL1 tN1 model2 tL2 tN2 = Atomic deltaInt' deltaExt' deltaCon' ta' lambda' where tL = max tL1 tL2 deltaInt' | tN1 < tN2 = let model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model1' model2' = case lambda model1 of Nothing -> model2 Just y1 -> deltaExt model2 (tN1 - tL2) y1 tL2' = tN1 tN2' = tN1 + ta model2' in compose model1' tL1' tN1' model2' tL2' tN2' | tN1 == tN2 = let model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model1 model2' = case lambda model1 of Nothing -> deltaInt model2 Just y1 -> deltaCon model2 y1 tL2' = tN1 tN2' = tN1 + ta model2' in compose model1' tL1' tN1' model2' tL2' tN2' | otherwise = let model2' = deltaInt model2 tL2' = tN2 tN2' = tN2 + ta model2' in compose model1 tL1 tN1 model2' tL2' tN2' deltaExt' e x = compose model1' tL1' tN1' model2 tL2 tN2 where model1' = deltaExt model1 e1 x e1 = if tL1 > tL2 then e else e + (tL2 - tL1) tL1' = tL + e1 tN1' = tL1' + ta model1' deltaCon' x | tN1 < tN2 = let model1' = deltaCon model1 x tL1' = tN1 tN1' = tN1 + ta model1' model2' = case lambda model1 of Nothing -> model2 Just y1 -> deltaExt model2 (tN1 - tL2) y1 tL2' = tN1 tN2' = tN1 + ta model2' in compose model1' tL1' tN1' model2' tL2' tN2' | tN1 == tN2 = let model1' = deltaCon model1 x tL1' = tN1 tN1' = tN1 + ta model1 model2' = case lambda model1 of Nothing -> deltaInt model2 Just y1 -> deltaCon model2 y1 tL2' = tN1 tN2' = tN1 + ta model2' in compose model1' tL1' tN1' model2' tL2' tN2' | otherwise = let model1' = deltaExt model1 (tN1 - tL1) x tL1' = tN1 tN1' = tN1 + ta model1 model2' = deltaInt model2 tL2' = tN2 tN2' = tN2 + ta model2' in compose model1' tL1' tN1' model2' tL2' tN2' ta' = min (tN1 - tL) (tN2 - tL) lambda' | tN2 <= tN1 = lambda model2 | otherwise = Nothing parallel :: Model input1 output1 -> LastEventTime -> NextEventTime -> Model input2 output2 -> LastEventTime -> NextEventTime -> Model (These input1 input2) (These output1 output2) parallel model1 tL1 tN1 model2 tL2 tN2 = Atomic deltaInt' deltaExt' deltaCon' ta' lambda' where tL = max tL1 tL2 deltaInt' | tN1 < tN2 = let model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model1' in parallel model1' tL1' tN1' model2 tL2 tN2 | tN1 == tN2 = let model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model1' model2' = deltaInt model2 tL2' = tN1 tN2' = tN1 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' | otherwise = let model2' = deltaInt model2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1 tL1 tN1 model2' tL2' tN2' deltaExt' e (This x1) = parallel model1' tL1' tN1' model2 tL2 tN2 where model1' = deltaExt model1 e1 x1 e1 = if tL1 > tL2 then e else e + (tL2 - tL1) tL1' = tL + e1 tN1' = tL1' + ta model1' deltaExt' e (That x2) = parallel model1 tL1 tN1 model2' tL2' tN2' where model2' = deltaExt model2 e2 x2 e2 = if tL2 > tL1 then e else e + (tL1 - tL2) tL2' = tL2 + e2 tN2' = tL2' + ta model2' deltaExt' e (These x1 x2) = parallel model1' tL1' tN1' model2' tL2' tN2' where model1' = deltaExt model1 e1 x1 e1 = if tL1 > tL2 then e else e + (tL2 - tL1) tL1' = tL1 + e1 tN1' = tL1' + ta model1' model2' = deltaExt model2 e2 x2 e2 = if tL2 > tL1 then e else e + (tL1 - tL2) tL2' = tL2 + e2 tN2' = tL2' + ta model2' deltaCon' (This x1) | tN1 < tN2 = -- confluent model1, no transition model2 let model1' = deltaCon model1 x1 tL1' = tN1 tN1' = tN1 + ta model1' in parallel model1' tL1' tN1' model2 tL2 tN2 | tN1 == tN2 = -- confluent model1, internal model2 let model1' = deltaCon model1 x1 tL1' = tN1 tN1' = tN1 + ta model1' model2' = deltaInt model2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' | otherwise = -- external model1, internal model2 let model1' = deltaExt model1 e1 x1 e1 = tN2 - tL1 tL1' = tL + e1 tN1' = tL1' + ta model1' model2' = deltaInt model2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' deltaCon' (That x2) | tN2 < tN1 = -- no transition model1, confluent model2 let model2' = deltaCon model2 x2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1 tL1 tN1 model2' tL2' tN2' | tN1 == tN2 = -- internal model1, confluent model2 let model2' = deltaCon model2 x2 tL2' = tN2 tN2' = tN2 + ta model2 model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' | otherwise = -- internal model1, external model2 let model2' = deltaExt model2 e2 x2 e2 = tN1 - tL2 tL2' = tL + e2 tN2' = tL2' + ta model2' model1' = deltaInt model1 tL1' = tN1 tN1' = tN1 + ta model1' in parallel model1' tL1' tN1' model2' tL2' tN2' deltaCon' (These x1 x2) | tN1 < tN2 = -- confluent model1, external model2 let model1' = deltaCon model1 x1 tL1' = tN1 tN1' = tN1 + ta model1' model2' = deltaExt model2 e2 x2 e2 = tN1 - tL2 tL2' = tL + e2 tN2' = tL2' + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' | tN1 == tN2 = -- confluent model1, confluent model2 let model1' = deltaCon model1 x1 tL1' = tN1 tN1' = tN1 + ta model1' model2' = deltaCon model2 x2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' | otherwise = -- external model1, confluent model2 let model1' = deltaExt model1 e1 x1 e1 = tN2 - tL1 tL1' = tL + e1 tN1' = tL1' + ta model1' model2' = deltaCon model2 x2 tL2' = tN2 tN2' = tN2 + ta model2' in parallel model1' tL1' tN1' model2' tL2' tN2' ta' = min (tN1 - tL) (tN2 - tL) lambda' | tN1 < tN2 = This `fmap` (lambda model1) | tN2 < tN1 = That `fmap` (lambda model2) | otherwise = let merge (Just y1) (Just y2) = Just $ These y1 y2 merge (Just y1) Nothing = Just $ This y1 merge Nothing (Just y2) = Just $ That y2 merge Nothing Nothing = Nothing in merge (lambda model1) (lambda model2) feedback :: Model (These input signal) (These output signal) -> Model input output feedback model = Atomic deltaInt' deltaExt' deltaCon' ta' lambda' where deltaInt' = undefined deltaExt' = undefined deltaCon' = undefined ta' = undefined lambda' = undefined
sglumac/HDevs
src/HDevs/Composition.hs
bsd-3-clause
9,358
0
17
4,494
2,553
1,266
1,287
226
12
{-#LANGUAGE RecordWildCards #-} module FileServer where import Network hiding (accept, sClose) import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast) import Network.Socket.ByteString import Data.ByteString.Char8 (pack, unpack) import System.Environment import System.IO import Control.Concurrent import Control.Concurrent.STM import Control.Exception import Control.Monad (forever, when, join) import Data.List.Split import Data.Word import Text.Printf (printf) import System.Directory --Server data type allows me to pass address and port details easily data FileServer = FileServer { address :: String, port :: String } --Constructor newFileServer :: String -> String -> IO FileServer newFileServer address port = atomically $ do FileServer <$> return address <*> return port --4 is easy for testing the pooling maxnumThreads = 4 serverport :: String serverport = "7007" serverhost :: String serverhost = "localhost" run:: IO () run = withSocketsDo $ do --Command line arguments for port and address --args <- getArgs createDirectoryIfMissing True "/distserver/" setCurrentDirectory "/distserver/" server <- newFileServer serverhost serverport --sock <- listenOn (PortNumber (fromIntegral serverport)) addrinfos <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just serverport) let serveraddr = head addrinfos sock <- socket (addrFamily serveraddr) Stream defaultProtocol bindSocket sock (addrAddress serveraddr) listen sock 5 _ <- printf "Listening on port %s\n" serverport --Listen on port from command line argument --New Abstract FIFO Channel chan <- newChan --Tvars are variables Stored in memory, this way we can access the numThreads from any method numThreads <- atomically $ newTVar 0 --Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server forkIO $ clientconnectHandler sock chan numThreads server --Calls the mainHandler which will monitor the FIFO channel mainHandler sock chan mainHandler :: Socket -> Chan String -> IO () mainHandler sock chan = do --Read current message on the FIFO channel chanMsg <- readChan chan --If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running case (chanMsg) of ("KILL_SERVICE") -> putStrLn "Terminating the Service!" _ -> mainHandler sock chan clientconnectHandler :: Socket -> Chan String -> TVar Int -> FileServer -> IO () clientconnectHandler sock chan numThreads server = do --Accept the socket which returns a handle, host and port --(handle, host, port) <- accept sock (s,a) <- accept sock --handle <- socketToHandle s ReadWriteMode --Read numThreads from memory and print it on server console count <- atomically $ readTVar numThreads putStrLn $ "numThreads = " ++ show count --If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached if (count < maxnumThreads) then do forkFinally (clientHandler s chan server) (\_ -> atomically $ decrementTVar numThreads) atomically $ incrementTVar numThreads else do send s (pack ("Maximum number of threads in use. try again soon"++"\n\n")) sClose s clientconnectHandler sock chan numThreads server clientHandler :: Socket -> Chan String -> FileServer -> IO () clientHandler sock chan server@FileServer{..} = forever $ do message <- recv sock 1024 let msg = unpack message print $ msg ++ "!ENDLINE!" let cmd = head $ words $ head $ splitOn ":" msg print cmd case cmd of ("HELO") -> heloCommand sock server $ (words msg) !! 1 ("KILL_SERVICE") -> killCommand chan sock ("DOWNLOAD") -> downloadCommand sock server msg _ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return () --Function called when HELO text command recieved heloCommand :: Socket -> FileServer -> String -> IO () heloCommand sock FileServer{..} msg = do send sock $ pack $ "HELO " ++ msg ++ "\n" ++ "IP:" ++ "192.168.6.129" ++ "\n" ++ "Port:" ++ port ++ "\n" ++ "StudentID:12306421\n\n" return () killCommand :: Chan String -> Socket -> IO () killCommand chan sock = do send sock $ pack $ "Service is now terminating!" writeChan chan "KILL_SERVICE" downloadCommand :: Socket -> FileServer -> String -> IO () downloadCommand sock server@FileServer{..} command = do let clines = splitOn "\\n" command filename = (splitOn ":" $ clines !! 0) !! 1 case (isfile filename) of True -> do let fdata = readFile filename send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++ "DATA: " ++ show(fdata) ++ "\n\n" False -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++ "DATA: " ++ "File not Found!!" ++ "\n\n"+ "\n\n"; return () --Increment Tvar stored in memory i.e. numThreads incrementTVar :: TVar Int -> STM () incrementTVar tv = modifyTVar tv ((+) 1) --Decrement Tvar stored in memory i.e. numThreads decrementTVar :: TVar Int -> STM () decrementTVar tv = modifyTVar tv (subtract 1)
Garygunn94/DFS
.stack-work/intero/intero26493An0.hs
bsd-3-clause
5,400
96
15
1,237
1,256
665
591
98
4
{-# LANGUAGE OverloadedStrings #-} -- | -- Available conduit combinators to process data from *.osm file. -- For the best performance, use any of conduitNodes/Ways/Relations/NWR. -- Example: -- -- > import qualified Data.Conduit.List as CL -- > import Text.XML.Stream.Parse (parseFile, def) -- > printNodes filepath = parseFile def filepath =$ conduitNodes $$ CL.mapM_ print -- module Data.Conduit.OSM ( sourceFileOSM , conduitNWR , conduitNodes , conduitWays , conduitRelations , conduitOSM ) where import Data.Conduit (Consumer, Conduit, Source, ConduitM, (=$)) import Data.Text (Text, unpack, toLower) import Data.XML.Types (Event, Name) import Control.Monad.Catch (MonadThrow, throwM) import Control.Monad.Trans.Resource (MonadResource) import Control.Exception (ErrorCall(..)) import Text.Read (readMaybe) import Text.XML.Stream.Parse (AttrParser, tagName, requireAttr, attr , ignoreAttrs, many, many', manyYield, manyYield' , parseFile, def, choose, tagIgnoreAttrs) import Data.Conduit.OSM.Types sourceFileOSM :: MonadResource m => FilePath -> Source m OSM sourceFileOSM path = parseFile def path =$ conduitOSM conduitOSM :: MonadThrow m => Conduit Event m OSM conduitOSM = manyYield parseOSM conduitNodes :: MonadThrow m => Conduit Event m Node conduitNodes = loopConduit $ tagIgnoreAttrs "osm" $ manyYield' parseNode conduitWays :: MonadThrow m => Conduit Event m Way conduitWays = loopConduit $ tagIgnoreAttrs "osm" $ manyYield' parseWay conduitRelations :: MonadThrow m => Conduit Event m Relation conduitRelations = loopConduit $ tagIgnoreAttrs "osm" $ manyYield' parseRelation conduitNWR :: MonadThrow m => Conduit Event m NWRWrap conduitNWR = loopConduit $ tagIgnoreAttrs "osm" $ manyYield' parseNWR -- | Keep yielding output if parser can still parse anything remaining loopConduit :: Monad m => ConduitM i o m (Maybe ()) -> Conduit i m o loopConduit cond = loop where loop = cond >>= maybe (return ()) (const loop) parseOSM :: MonadThrow m => Consumer Event m (Maybe OSM) parseOSM = tagName "osm" tagParser $ \cont -> cont <$> parseBounds <*> many parseNode <*> many parseWay <*> many' parseRelation where tagParser = OSM <$> requireAttrRead "version" <*> attr "generator" <* ignoreAttrs -- | Wrap nodes, ways and relations parseNWR :: MonadThrow m => Consumer Event m (Maybe NWRWrap) parseNWR = choose [ fmap N <$> parseNode , fmap W <$> parseWay , fmap R <$> parseRelation ] parseNode :: MonadThrow m => Consumer Event m (Maybe Node) parseNode = tagName "node" tagParser $ \cont -> cont <$> many' parseTag where tagParser = (\f latitude longitude tagz -> Node latitude longitude (f tagz)) <$> nwrCommonParser <*> requireAttrRead "lat" <*> requireAttrRead "lon" <* ignoreAttrs parseWay :: MonadThrow m => Consumer Event m (Maybe Way) parseWay = tagName "way" (nwrCommonParser <* ignoreAttrs) $ \cont -> Way <$> many parseNd <*> (cont <$> many' parseTag) parseRelation :: MonadThrow m => Consumer Event m (Maybe Relation) parseRelation = tagName "relation" (nwrCommonParser <* ignoreAttrs) $ \cont -> Relation <$> many parseMember <*> (cont <$> many' parseTag) parseMember :: MonadThrow m => Consumer Event m (Maybe Member) parseMember = tagName "member" tagParser return where tagParser = Member <$> (requireAttr "type" >>= readNWRType) <*> requireAttr "ref" <*> attr "role" <* ignoreAttrs parseNd :: MonadThrow m => Consumer Event m (Maybe Nd) parseNd = tagName "nd" (Nd <$> requireAttr "ref" <* ignoreAttrs) return parseTag :: MonadThrow m => Consumer Event m (Maybe Tag) parseTag = tagName "tag" tagParser (return . Tag) where tagParser = (,) <$> requireAttr "k" <*> requireAttr "v" <* ignoreAttrs parseBounds :: MonadThrow m => Consumer Event m (Maybe Bounds) parseBounds = tagName "bounds" tagParser return where tagParser = Bounds <$> requireAttrRead "minlat" <*> requireAttrRead "minlon" <*> requireAttrRead "maxlat" <*> requireAttrRead "maxlon" nwrCommonParser :: AttrParser ([Tag] -> NWRCommon) nwrCommonParser = NWRCommon <$> requireAttr "id" <*> fmap (>>= readBool) (attr "visible") <*> attr "chageset" <*> attr "timestamp" <*> attr "user" readNWRType :: Text -> AttrParser NWR readNWRType a = case toLower a of "node" -> return NWRn "relation" -> return NWRr "way" -> return NWRw _ -> throwM $ ErrorCall "unknown type in <member>" fromStr :: Read a => Text -> Maybe a fromStr = readMaybe . unpack requireAttrRead :: Read a => Name -> AttrParser a requireAttrRead str = requireAttr str >>= maybe (throwM $ ErrorCall "Could not parse attribute value") return . fromStr readBool :: Text -> Maybe Bool readBool a | toLower a == "true" = Just True | toLower a == "false" = Just False | otherwise = Nothing
przembot/osm-conduit
src/Data/Conduit/OSM.hs
bsd-3-clause
5,281
0
15
1,343
1,454
741
713
95
4
module Language.Modelica.Parser.ClassDefinition where import Language.Modelica.Syntax.Modelica import Language.Modelica.Parser.Parser (Parser) import Language.Modelica.Parser.Lexer import Language.Modelica.Parser.Expression import Language.Modelica.Parser.Modification import Language.Modelica.Parser.ComponentClause import Language.Modelica.Parser.Equation import Language.Modelica.Parser.Basic import Language.Modelica.Parser.Option import Language.Modelica.Parser.Utility (eitherOr, (~*)) import Control.Applicative (liftA, liftA2, liftA3, (*>), (<*), (<*>), (<$>)) import Text.ParserCombinators.Parsec ((<|>), optionMaybe, try, many, getState) import qualified Data.Set as Set import_clause :: Parser ImportClause import_clause = liftA2 ImportClause (import_ *> (try import_clause1 <|> import_clause2)) comment import_clause1 :: Parser Import import_clause1 = liftA2 Assign ident (assign *> name) import_clause2 :: Parser Import import_clause2 = liftA2 IList name (optionMaybe $ (symbol "." *> (star `eitherOr` braces import_list))) import_list :: Parser ImportList import_list = liftA2 ImportList ident (commaList ident) element :: Parser Element element = liftA ElementImportClause (try import_clause) <|> liftA ElementExtendsClause (try extends_clause) <|> element_help element_help :: Parser Element element_help = element_options >>= \opts -> liftA (Element opts) classDefOrCompClause <|> liftA2 (ElementReplaceable opts) (replaceable_ *> classDefOrCompClause) (optionMaybe (liftA2 (,) constraining_clause comment)) element_options :: Parser ElementOptions element_options = ElementOptions <$> optionMaybe redeclare_ <*> optionMaybe final_ <*> optionMaybe inner_ <*> optionMaybe outer_ classDefOrCompClause :: Parser (Either ClassDefinition ComponentClause) classDefOrCompClause = liftA Left class_definition <|> liftA Right component_clause element_list :: Parser ElementList element_list = liftA (ElementList . map fst) (element ~* semicolon) external_function_call :: Parser ExternalFunctionCall external_function_call = liftA3 ExternalFunctionCall (optionMaybe (try $ component_reference <* assign)) ident (parens (optionMaybe expression_list)) language_specification :: Parser LanguageSpecification language_specification = unicode_string composition_list :: Parser [CompositionList] composition_list = many $ liftA PublicElementList (public_ *> element_list) <|> liftA ProtectedElementList (protected_ *> element_list) <|> liftA ESec (try equation_section) <|> liftA ASec (try algorithm_section) composition_external :: Parser CompositionExternal composition_external = liftA3 CompositionExternal (external_ *> optionMaybe language_specification) (optionMaybe external_function_call) (optionMaybe annotation <* semicolon) -- | This parser behaves according to the specification. composition_annotation_last :: Parser Composition composition_annotation_last = Composition <$> element_list <*> composition_list <*> (optionMaybe composition_external) <*> (optionMaybe (annotation <* semicolon)) -- | In order to succeed, this parser needs the /annotation/ keyword first. composition_annotation_first :: Parser Composition composition_annotation_first = let comp ann el cl ce = Composition el cl ce ann in comp <$> (liftA Just (annotation <* semicolon)) <*> element_list <*> composition_list <*> (optionMaybe composition_external) composition :: Parser Composition composition = do opt <- getState if Set.member PermitAnnotationFirst opt then composition_annotation_first <|> composition_annotation_last else composition_annotation_last class_specifier :: Parser ClassSpecifier class_specifier = class_specifier_extends <|> try class_specifier_a <|> try class_specifier_enum <|> try class_specifier_der <|> class_specifier_end class_specifier_a :: Parser ClassSpecifier class_specifier_a = ClassSpecifierA <$> ident <*> (assign *> base_prefix) <*> name <*> (optionMaybe array_subscripts) <*> (optionMaybe class_modification) <*> comment class_specifier_enum :: Parser ClassSpecifier class_specifier_enum = liftA3 ClassSpecifierEnum ident (assign *> enumeration_ *> enum_list_or_colon) comment class_specifier_der :: Parser ClassSpecifier class_specifier_der = liftA3 ClassSpecifierDer ident (assign *> der_ *> parens p) comment where p = liftA3 (,,) name (comma *> ident) (commaList ident) class_specifier_end :: Parser ClassSpecifier class_specifier_end = ClassSpecifierEnd <$> ident <*> string_comment <*> composition <*> (end_ *> ident) class_specifier_extends :: Parser ClassSpecifier class_specifier_extends = ClassSpecifierExtends <$> (extends_ *> ident) <*> (optionMaybe class_modification) <*> string_comment <*> composition <*> (end_ *> ident) class_definition :: Parser ClassDefinition class_definition = liftA3 ClassDefinition (optionMaybe encapsulated_) class_prefixes class_specifier -- | Main entry point for parsing stored_definition :: Parser StoredDefinition stored_definition = liftA2 StoredDefinition (optionMaybe (within_ *> optionMaybe name <* semicolon)) (many p) where p = liftA2 (,) (optionMaybe final_) (class_definition <* semicolon)
xie-dongping/modelicaparser
src/Language/Modelica/Parser/ClassDefinition.hs
bsd-3-clause
5,298
0
14
755
1,282
685
597
138
2
module Problem64 where import ContinuedFraction main :: IO () -- if period is odd then total terms in continued fraction representation will be even main = print . length . filter (even . length . continuedFractionSqrt) . filter (\x -> let root = floor . sqrt . fromIntegral $ x in root * root /= x) $ [1 .. 10000]
adityagupta1089/Project-Euler-Haskell
src/problems/Problem64.hs
bsd-3-clause
353
0
16
98
101
54
47
9
1
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} import Control.Monad.Catch import Control.Monad (when, unless, forever, replicateM_) import Control.Monad.IO.Class import Control.Error hiding (err) import Data.Foldable import qualified Data.Map.Strict as M import Data.Time.Clock import Data.Time.Format.Human import System.Exit import System.IO (stderr, stdout) import Data.Aeson as Aeson import qualified Data.ByteString.Lazy.Char8 as BS.L import qualified Text.PrettyPrint.ANSI.Leijen as T.PP import Text.PrettyPrint.ANSI.Leijen (Doc, (<+>), (<$$>)) import qualified Network.Transport.TCP as TCP import Network.Socket (ServiceName, HostName) import Network.BSD (getHostName) import Options.Applicative hiding (action, header) import qualified Options.Applicative as O import Control.Concurrent (threadDelay) import Control.Distributed.Process import Control.Distributed.Process.Internal.Types (NodeId(..)) import Control.Distributed.Process.Node import qualified Text.Trifecta as TT import Pipes import qualified Pipes.Concurrent as P.C import qualified Pipes.Prelude as P.P import TPar.Rpc import TPar.ProcessPipe hiding (runProcess) import TPar.Server import TPar.SubPubStream as SubPub import TPar.Server.Types import TPar.JobMatch import TPar.Types portOption :: Mod OptionFields ServiceName -> Parser ServiceName portOption m = option str ( short 'p' <> long "port" <> value "5757" <> m ) hostOption :: Parser HostName hostOption = strOption ( short 'H' <> long "host" <> value "localhost" <> help "server host name" ) jobMatchArg :: Parser JobMatch jobMatchArg = argument (liftTrifecta parseJobMatch) (help "job match expression") type Mode = IO () tpar :: ParserInfo Mode tpar = O.info (helper <*> tparParser) $ O.fullDesc <> O.progDesc "Start queues, add workers, and enqueue tasks" <> O.header "tpar - simple distributed task queuing" tparParser :: Parser Mode tparParser = subparser $ command "server" ( info modeServer $ fullDesc <> progDesc "Start a server") <> command "worker" ( info modeWorker $ fullDesc <> progDesc "Start a worker") <> command "enqueue" ( info modeEnqueue $ fullDesc <> progDesc "Enqueue a job") <> command "status" ( info modeStatus $ fullDesc <> progDesc "Show queue status") <> command "kill" ( info modeKill $ fullDesc <> progDesc "Kill or dequeue a job") <> command "rerun" ( info modeRerun $ fullDesc <> progDesc "Restart a failed job") <> command "watch" ( info modeWatch $ fullDesc <> progDesc "Watch the output of a set of jobs") <> command "dump" ( info modeDump $ fullDesc <> progDesc "Dump details about a set of jobs in JSON") withServer' :: HostName -> ServiceName -> (ServerIface -> Process a) -> Process a withServer' host port action = do let nid :: NodeId nid = NodeId (TCP.encodeEndPointAddress host port 0) -- request server interface mref <- monitorNode nid (sq, rq) <- newChan :: Process (SendPort ServerIface, ReceivePort ServerIface) nsendRemote nid "tpar" sq iface <- receiveWait [ matchIf (\(NodeMonitorNotification ref _ _) -> ref == mref) $ \(NodeMonitorNotification _ _ reason) -> case reason of DiedDisconnect -> fail "Failed to connect. Are you sure there is a server running?" _ -> fail $ show reason , matchChan rq pure ] unless (protocolVersion iface == currentProtocolVersion) $ fail $ "tpar protocol version mismatch: server speaks "++show (protocolVersion iface) ++ ", client speaks "++show currentProtocolVersion -- request server interface link (serverPid iface) unlinkNode nid r <- action iface unlink (serverPid iface) return r withServer :: HostName -> ServiceName -> (ServerIface -> Process ()) -> IO () withServer host port action = do hostname <- getHostName Right transport <- TCP.createTransport hostname "0" TCP.defaultTCPParameters node <- newLocalNode transport initRemoteTable runProcess node $ withServer' host port action modeWorker :: Parser Mode modeWorker = run <$> option (auto >>= checkNWorkers) ( short 'N' <> long "workers" <> value 1 <> help "number of local workers to start" ) <*> hostOption <*> portOption (help "server port number") <*> option (Just <$> (auto <|> pure 10)) ( short 'r' <> long "reconnect" <> metavar "SECONDS" <> value Nothing <> help "attempt to reconnect when server vanishes (with optional retry period); otherwise terminates on server vanishing" ) <* helper where checkNWorkers n | n >= 1 = return n | otherwise = fail "Worker count (-N) should be at least one" run nWorkers serverHost serverPort reconnectPeriod = perhapsRepeat $ withServer serverHost serverPort $ \serverIface -> do replicateM_ nWorkers $ spawnLocal $ runRemoteWorker serverIface liftIO $ forever threadDelay maxBound where perhapsRepeat action | Just period <- reconnectPeriod = forever $ do handleAll (liftIO . print) action liftIO (threadDelay $ 1000*1000*period) | otherwise = action modeServer :: Parser Mode modeServer = run <$> option str (short 'H' <> long "host" <> value "localhost" <> help "interface address to listen on" ) <*> portOption (help "port to listen on") <*> option auto ( short 'N' <> long "workers" <> value 0 <> help "number of local workers to start" ) <* helper where run serverHost serverPort nLocalWorkers = do Right transport <- TCP.createTransport serverHost serverPort TCP.defaultTCPParameters node <- newLocalNode transport initRemoteTable runProcess node $ do iface <- runServer replicateM_ nLocalWorkers $ spawnLocal $ runRemoteWorker iface liftIO $ forever $ threadDelay maxBound modeEnqueue :: Parser Mode modeEnqueue = run <$> hostOption <*> portOption (help "server port number") <*> sinkType <*> switch (short 'w' <> long "watch" <> help "Watch output of task") <*> option (JobName <$> str) (short 'n' <> long "name" <> value (JobName "unnamed-job") <> help "Set the job's name") <*> option str (short 'd' <> long "directory" <> value "." <> help "Set the directory the job will be launched from (relative to the working directory of the worker who runs it)") <*> option (Priority <$> auto) (short 'P' <> long "priority" <> value (Priority 0) <> help "Set the job's priority") <*> some (argument str idm) <* helper where run :: HostName -> ServiceName -> OutputStreams (Maybe FilePath) -> Bool -> JobName -> FilePath -> Priority -> [String] -> IO () run serverHost serverPort sink watch name dir priority (cmd:args) = withServer serverHost serverPort $ \iface -> do let jobReq = JobRequest { jobName = name , jobPriority = priority , jobCommand = cmd , jobArgs = args , jobSinks = sink , jobCwd = dir , jobEnv = Nothing } if watch then do prod <- enqueueAndFollow iface jobReq code <- runEffect $ prod >-> P.P.mapM_ (processOutputToHandles $ OutputStreams stdout stderr) case code of ExitSuccess -> return () ExitFailure n -> liftIO $ putStrLn $ "exited with code "++show n else do Right _ <- callRpc (enqueueJob iface) (jobReq, Nothing) return () run _ _ _ _ _ _ _ _ = fail "Expected command line" sinkType :: Parser (OutputStreams (Maybe FilePath)) sinkType = OutputStreams <$> option (Just <$> str) (short 'o' <> long "output" <> metavar "FILE" <> help "remote file to log standard output to" <> value Nothing) <*> option (Just <$> str) (short 'e' <> long "error" <> metavar "FILE" <> help "remote file to log standard error to" <> value Nothing) liftTrifecta :: TT.Parser a -> ReadM a liftTrifecta parser = do s <- str case TT.parseString parser mempty s of TT.Success a -> return a TT.Failure err -> fail $ show $ TT._errDoc err modeWatch :: Parser Mode modeWatch = run <$> hostOption <*> portOption (help "server port number") <*> jobMatchArg <* helper where run serverHost serverPort jobMatch = withServer serverHost serverPort $ \iface -> do inputs <- subscribeToJobs iface jobMatch let input = foldMap fold inputs failed = M.keys $ M.filter isNothing inputs unless (null failed) $ liftIO $ print $ T.PP.red "warning: failed to attach to" <+> prettyShow (length failed) <+> "jobs" <$$> T.PP.nest 4 (T.PP.vcat $ map prettyShow failed) runEffect $ P.C.fromInput input >-> P.P.mapM_ (processOutputToHandles $ OutputStreams stdout stderr) subscribeToJobs :: ServerIface -> JobMatch -> Process (M.Map JobId (Maybe (P.C.Input ProcessOutput))) subscribeToJobs iface jobMatch = do Right jobs <- callRpc (getQueueStatus iface) jobMatch prods <- traverse SubPub.subscribe $ M.unions [ M.singleton jobId jobMonitor | Job {..} <- jobs , Running {..} <- pure jobState ] traverse (traverse producerToInput) prods where producerToInput :: Producer ProcessOutput Process ExitCode -> Process (P.C.Input ProcessOutput) producerToInput prod = do (output, input) <- liftIO $ P.C.spawn (P.C.bounded 1) void $ spawnLocal $ runEffect $ void prod >-> P.C.toOutput output return input modeStatus :: Parser Mode modeStatus = run <$> hostOption <*> portOption (help "server port number") <*> switch (short 'v' <> long "verbose" <> help "verbose queue status") <*> (jobMatchArg <|> pure (NegMatch NoMatch)) <* helper where run serverHost serverPort verbose jobMatch = withServer serverHost serverPort $ \iface -> do Right jobs <- callRpc (getQueueStatus iface) jobMatch time <- liftIO getCurrentTime let prettyTime = T.PP.text . humanReadableTime' time liftIO $ T.PP.putDoc $ T.PP.vcat $ map (prettyJob verbose prettyTime) jobs ++ [mempty] modeDump :: Parser Mode modeDump = run <$> hostOption <*> portOption (help "server port number") <*> (jobMatchArg <|> pure (NegMatch NoMatch)) <* helper where run serverHost serverPort jobMatch = withServer serverHost serverPort $ \iface -> do Right jobs <- callRpc (getQueueStatus iface) jobMatch liftIO $ BS.L.putStrLn $ Aeson.encode $ map jobToJson jobs jobToJson :: Job -> Value jobToJson (Job {jobRequest = JobRequest{..}, ..}) = object [ "id" .= fromEnum jobId , "name" .= case jobName of JobName s -> s , "priority" .= fromEnum jobPriority , "command" .= jobCommand , "arguments" .= jobArgs , "working_dir" .= jobCwd , "remote_logs" .= object [ "stdout" .= stdOut jobSinks , "stderr" .= stdErr jobSinks ] , "environment" .= jobEnv , "state" .= state ] where stateObj s rest = object $ ("state" .= (s :: String)) : rest state = case jobState of Queued {..} -> stateObj "queued" [ "queued_at" .= jobQueueTime ] Starting {..} -> stateObj "starting" [ "queued_at" .= jobQueueTime , "starting_since" .= jobStartingTime , "running_on" .= show jobProcessId ] Running {..} -> stateObj "running" [ "queued_at" .= jobQueueTime , "started_at" .= jobStartTime , "running_on" .= show jobProcessId ] Finished {..} -> stateObj "finished" [ "queued_at" .= jobQueueTime , "started_at" .= jobStartTime , "finished_at" .= jobFinishTime , "ran_on" .= show jobWorkerNode , "exit_code" .= getExitCode jobExitCode ] Failed {..} -> stateObj "finished" [ "queued_at" .= jobQueueTime , "started_at" .= jobStartTime , "failed_at" .= jobFailedTime , "ran_on" .= show jobWorkerNode , "error" .= jobErrorMsg ] Killed {..} -> stateObj "killed" [ "queued_at" .= jobQueueTime , "started_at" .= jobKilledStartTime , "killed_at" .= jobKilledTime , "ran_on" .= show jobKilledWorkerNode ] prettyJob :: Bool -> (UTCTime -> Doc) -> Job -> Doc prettyJob verbose prettyTime (Job {..}) = T.PP.vcat $ [header] ++ (if verbose then [details] else []) where JobRequest {..} = jobRequest twoCols :: Int -> [(Doc, Doc)] -> Doc twoCols width = T.PP.vcat . map (\(a,b) -> T.PP.fillBreak width a <+> T.PP.align b) header = T.PP.fillBreak 5 (prettyJobId jobId) <+> T.PP.fillBreak 50 (prettyJobName jobName) <+> prettyJobState jobState details = T.PP.indent 4 $ twoCols 15 [ ("priority:", prettyPriority jobPriority) , ("queued:", prettyTime (jobQueueTime jobState)) , ("command:", T.PP.text jobCommand) , ("arguments:", T.PP.hsep $ map T.PP.text jobArgs) , ("logging:", T.PP.vcat $ toList $ (\x y -> x<>":"<+>y) <$> OutputStreams "stdout" "stderr" <*> fmap (maybe "none" T.PP.text) jobSinks) , ("status:", prettyDetailedState jobState) ] <$$> mempty prettyDetailedState Queued{..} = "waiting to run" <+> T.PP.parens ("since" <+> prettyTime jobQueueTime) prettyDetailedState Starting{..} = "starting on" <+> prettyShow jobProcessId <+> T.PP.parens ("since" <+> prettyTime jobStartingTime) prettyDetailedState Running{..} = "running on" <+> prettyShow jobProcessId <+> T.PP.parens ("since" <+> prettyTime jobStartTime) prettyDetailedState Finished{..} = "finished with exit code" <+> prettyShow (getExitCode jobExitCode) <+> T.PP.parens (prettyTime jobFinishTime) <$$> "started" <+> prettyTime jobStartTime <$$> "ran on" <+> prettyShow jobWorkerNode prettyDetailedState Failed{..} = "failed with error" <+> T.PP.parens (prettyTime jobFailedTime) <$$> "started" <+> prettyTime jobStartTime <$$> T.PP.indent 4 (T.PP.text jobErrorMsg) prettyDetailedState Killed{..} = "killed at user request" <+> T.PP.parens (prettyTime jobKilledTime) <$$> maybe "never started" (\t -> "started" <+> prettyTime t) jobKilledStartTime prettyJobState Queued{} = T.PP.blue "queued" prettyJobState Starting{} = T.PP.blue "starting" prettyJobState Running{} = T.PP.cyan "running" prettyJobState Finished{..} | ExitFailure _ <- jobExitCode = T.PP.yellow $ "failed" | otherwise = T.PP.green "finished" prettyJobState Failed{} = T.PP.red "job failed" prettyJobState Killed{} = T.PP.yellow "killed" prettyJobId (JobId n) = T.PP.int n prettyJobName (JobName name) = T.PP.bold $ T.PP.text name prettyPriority (Priority p) = T.PP.int p getExitCode :: ExitCode -> Int getExitCode ExitSuccess = 0 getExitCode (ExitFailure c) = c prettyShow :: Show a => a -> Doc prettyShow = T.PP.text . show modeKill :: Parser Mode modeKill = run <$> hostOption <*> portOption (help "server port number") <*> jobMatchArg <* helper where run serverHost serverPort jobMatch = withServer serverHost serverPort $ \iface -> do Right jobs <- callRpc (killJobs iface) jobMatch liftIO $ T.PP.putDoc $ T.PP.vcat $ map (prettyJob False prettyShow) jobs ++ [mempty] liftIO $ when (null jobs) $ exitWith $ ExitFailure 1 modeRerun :: Parser Mode modeRerun = run <$> hostOption <*> portOption (help "server port number") <*> jobMatchArg <* helper where run serverHost serverPort jobMatch = withServer serverHost serverPort $ \iface -> do Right jobs <- callRpc (rerunJobs iface) jobMatch liftIO $ T.PP.putDoc $ T.PP.vcat $ map (prettyJob False prettyShow) jobs ++ [mempty] liftIO $ when (null jobs) $ exitWith $ ExitFailure 1 main :: IO () main = do run <- execParser tpar res <- runExceptT $ tryIO run case res of Left err -> putStrLn $ "error: "++show err Right () -> return ()
bgamari/tpar
Main.hs
bsd-3-clause
18,217
168
17
5,988
5,172
2,544
2,628
389
12
; ; HSP help manager—p HELPƒ\[ƒXƒtƒ@ƒCƒ‹ ; (æ“ª‚ªu;v‚̍s‚̓Rƒƒ“ƒg‚Æ‚µ‚ďˆ—‚³‚ê‚Ü‚·) ; %type Šg’£–½—ß %ver 3.3 %note llmod3.hsp‚ðƒCƒ“ƒNƒ‹[ƒh‚·‚éB•K—v‚ɉž‚¶‚Älistview.hsp,progbox.hsp,trackbox.hsp,treebox.hsp,udbtn.hsp‚ðƒCƒ“ƒNƒ‹[ƒh‚·‚é %date 2009/08/01 %author tom %dll llmod3 %url http://www5b.biglobe.ne.jp/~diamond/hsp/hsp2file.htm %index listview ƒŠƒXƒgƒrƒ…[‚̍쐬 %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2,n3 n1 : • n2 : ‚‚³ n3 : ƒXƒ^ƒCƒ‹ %inst ƒŠƒXƒgƒrƒ…[‚ðì¬‚µ‚Ü‚·B ƒŠƒXƒgƒrƒ…[‚́AExplorer‚̉E‘¤‚É‚ ‚é—l‚ȃIƒuƒWƒFƒNƒg‚Å‚·B ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ÉƒŠƒXƒgƒrƒ…[‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B ^p ƒXƒ^ƒCƒ‹ ƒXƒ^ƒCƒ‹‚͈ȉº‚Ì’l‚ð‘«‚µ‚āA•¡”‚̃Xƒ^ƒCƒ‹‚ðŽw’è‚·‚邱‚Æ‚ª‚Å‚«‚Ü‚·B 0 ‘å‚«‚¢ƒAƒCƒRƒ“•\ަ 1 Ú×•\ަ 2 ¬‚³‚¢ƒAƒCƒRƒ“•\ަ 3 ƒŠƒXƒg•\ަ 4 •¡”‘I‘ð‚Å‚«‚È‚¢‚悤‚É‚·‚é 8 listview‚ª‘I‘ð‚³‚ê‚Ä‚¢‚È‚¢‚Æ‚«‚Å‚àA‘I‘ð‚³‚ê‚Ä‚¢‚éƒAƒCƒeƒ€‚̐F‚ð•Ï‚¦‚é $10 ¸‡‚ŃAƒCƒeƒ€‚ðƒ\[ƒg‚·‚é $20 ~‡‚ŃAƒCƒeƒ€‚ðƒ\[ƒg‚·‚é $40 ƒZƒbƒg‚µ‚½ƒCƒ[ƒWƒŠƒXƒg‚ðlistviewÁ‹ŽŽž‚ɏÁ‹Ž‚µ‚È‚¢(•¡”‚Ìlistview‚Å“¯‚¶ƒCƒ[ƒW‚ðŽg‚¤ê‡‚ÉŽw’è‚·‚é) $80 ƒAƒCƒRƒ“•\ަ‚̂Ƃ«AƒAƒCƒeƒ€‚̃eƒLƒXƒg‚ðÜ‚è•Ô‚³‚È‚¢(ƒfƒtƒHƒ‹ƒg‚ł̓eƒLƒXƒg‚Í’·‚¢‚Ɛ܂è•Ô‚³‚ê‚é) $100 ? $800 ? $2000 ƒXƒNƒ[ƒ‹ƒo[‚ª‚‚©‚È‚¢ $4000 —“‚Ì–¼‘O‚ð•\ަ‚·‚é•”•ª‚ð•t‚¯‚È‚¢ ^ ˆÈ‰º‚̃Xƒ^ƒCƒ‹‚Ícomctl32.dll‚̃o[ƒWƒ‡ƒ“‚É‚æ‚Á‚Ă͗LŒø‚ɂȂç‚È‚¢ê‡‚ª‚ ‚è‚Ü‚·B $10000 ƒŠƒXƒg•\Ž¦Žž‚ÉŒrü‚ð•t‚¯‚é $40000 ƒ`ƒFƒbƒNƒ{ƒbƒNƒX‚ð•t‚¯‚é $100000 —“‚Ì–¼‘O‚ð•\ަ‚·‚鏊‚ðƒhƒ‰ƒbƒO&ƒhƒƒbƒv‚Å‚«‚é $200000 ƒŠƒXƒg•\Ž¦Žž‚ɃAƒCƒeƒ€‚ª‘I‘ð‚³‚ê‚½‚Æ‚«ƒTƒuƒAƒCƒeƒ€‚àF‚ð•Ï‚¦‚é $400000 ƒAƒCƒeƒ€‚̏ã‚Ƀ}ƒEƒXƒ|ƒCƒ“ƒ^‚ª—ˆ‚½‚Æ‚«ƒJ[ƒ\ƒ‹‚ðŽw‚ÌŒ`‚É‚·‚é $800000 ƒAƒCƒeƒ€ƒNƒŠƒbƒN‚³‚ꂽ‚Æ‚«ƒJ[ƒ\ƒ‹‚ðŽw‚ÌŒ`‚É‚·‚é $1000000 ƒXƒNƒ[ƒ‹ƒo[‚𕽂ç‚ȃXƒ^ƒCƒ‹‚ɕς¦‚é $8000000 $400000,$800000‚̃Xƒ^ƒCƒ‹‚ðŽw’肵‚½‚Æ‚«AƒAƒCƒeƒ€‚̏ã‚Ƀ}ƒEƒXƒ|ƒCƒ“ƒ^‚ª—ˆ‚邯‰ºü‚ª‚‚­ $10000000 $400000,$800000‚̃Xƒ^ƒCƒ‹‚ðŽw’肵‚½‚Æ‚«A‰ºü‚ª‚‚­ ^p ^ ¦ƒ`ƒFƒbƒNƒ{ƒbƒNƒX‚ð•t‚¯‚½‚Æ‚«‚̃Xƒ^ƒCƒ‹‚ŁAƒAƒCƒeƒ€‚ªƒ`ƒFƒbƒN‚³‚ê‚Ä‚¢‚é‚©‚ð’²‚×‚é‚Æ‚«‚͈ȉº‚̂悤‚É‚µ‚Ä‚­‚¾‚³‚¢ ^ ;list_id : listview‚ÌID ;idx : ƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX prm=list_id,4140,idx,61440 sendmsg prm chk=stat>>12-1 if chk : dialog ""+idx+"”Ԗڂ̃AƒCƒeƒ€‚̓`ƒFƒbƒN‚³‚ê‚Ä‚¢‚Ü‚·" ^ ƒ`ƒFƒbƒN‚ð‚Í‚¸‚µ‚½‚èA‚‚¯‚½‚肵‚½‚¢‚Æ‚«‚͈ȉº‚̂悤‚É‚µ‚Ü‚·B ^ dim lvi,9 idx = 0 ;idx”Ԗڂ̃AƒCƒeƒ€ if uncheck != 0 { lvi = 8,0,0,1<<12,$f000 } else { lvi = 8,0,0,2<<12,$f000 } prm = list_id,4139,idx : getptr prm.3,lvi : sendmsg prm ^ ¦listview‚ÍHSP•W€–½—ß‚Ìcls‚âscreen–½—߂ł͏Á‹Ž‚³‚ê‚Ü‚¹‚ñB_cls‚Ü‚½‚Í_clrobj‚ðŽg—p‚µ‚Ä‚­‚¾‚³‚¢B %href listadd listdel listaddcl listdelcl listsel listget listmax listicon listhit sel_listview %index listadd ƒŠƒXƒgƒrƒ…[‚ɃAƒCƒeƒ€‚ð’ljÁ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,"s2",n3,n4,n5,n6 n1 : ƒAƒCƒeƒ€‚̈ʒu s2 : ƒAƒCƒeƒ€‚̃eƒLƒXƒg n3 : —“‚̔ԍ† n4 : ƒAƒCƒRƒ“”ԍ† n5 : ƒAƒCƒeƒ€‚̏ó‘Ô n6 : ƒAƒCƒeƒ€‚ÉŽ‚½‚¹‚鐔’l %inst ƒŠƒXƒgƒrƒ…[‚Ìn1”Ԗڂ̈ʒu‚ɃAƒCƒeƒ€‚ð‚‚¯‰Á‚¦‚Ü‚·B n3‚ɂ͉½”Ô–Ú‚Ì—“‚ɒljÁ‚·‚é‚©‚ðŽw’è‚·‚éƒCƒ“ƒfƒbƒNƒXAn4‚ɂ̓AƒCƒRƒ“”ԍ†An5‚ɂ̓AƒCƒeƒ€‚̏ó‘Ô‚ð“ü‚ê‚Ü‚·B ^ —“‚ւ̒ljÁ‚́Alistaddcl‚Å—“‚ð’ljÁ‚µ‚Ä‚¢‚邯‚«‚̂ݗLŒø‚Å‚·B ‚Ü‚½ƒAƒCƒRƒ“”ԍ†‚́Alisticon‚ŃAƒCƒRƒ“‚ð’ljÁ‚µ‚Ä‚¢‚éê‡‚̂ݗLŒø‚ɂȂèA—“‚֒ljÁ‚·‚邯‚«‚̓AƒCƒRƒ“”ԍ†‚Í–³Ž‹‚³‚ê‚Ü‚·B ^ n6‚É’l‚ð“ü‚ê‚ăAƒCƒeƒ€‚É‚»‚̐”’l‚ðŽ‚½‚¹‚邱‚Æ‚ª‚Å‚«‚Ü‚·B ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ɒljÁ‚µ‚½ƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX‚ª‘ã“ü‚³‚ê‚Ü‚·B(-1‚È‚çƒGƒ‰[) ^ ƒAƒCƒeƒ€‚̏ó‘Ô listview‚̃AƒCƒeƒ€‚̏ó‘Ԃɂ́AˆÈ‰º‚Ì‚à‚Ì‚ª‚ ‚è‚Ü‚·B ^p ó‘Ô ”’l ƒtƒH[ƒJƒX‚ª‚ ‚Á‚Ä‚¢‚é 1 ‘I‘ð‚³‚ê‚Ä‚¢‚é 2 Ø‚èŽæ‚ç‚ꂽó‘Ô 4 ƒnƒCƒ‰ƒCƒg 8 ^p ¦ƒnƒCƒ‰ƒCƒg‚Ílistsel‚Å‘I‘ðó‘Ô‚ð‰ðœ‚µ‚È‚¢‚ÆŒ³‚É–ß‚è‚Ü‚¹‚ñB %href listdel listsel listaddcl listdelcl listview %sample listadd 3,"new item",0,15 ^ listadd 0,"new sub item",1,0,1 %index listdel ƒŠƒXƒgƒrƒ…[‚̃AƒCƒeƒ€‚ðÁ‹Ž %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX %inst n1”Ԗڂ̃AƒCƒeƒ€‚ðÁ‹Ž‚µ‚Ü‚·B n1‚ð-1‚É‚·‚邯‘S‚ẴAƒCƒeƒ€‚ðÁ‹Ž‚µ‚Ü‚·B %href listadd listview %index listaddcl ƒŠƒXƒgƒrƒ…[‚É—“‚ð’ljÁ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,"s2",n3,n4 n1 : —“‚̃Cƒ“ƒfƒbƒNƒX s2 : —“‚Ì–¼‘O n3 : —“‚Ì• n4 : —“‚Ì–¼‘O‚̈ʒu %inst —“‚ð’ljÁ‚µ‚Ü‚·B n3‚ðÈ—ª‚µ‚½‚Æ‚«‚̃fƒtƒHƒ‹ƒg’l‚Í100‚Å‚·B ^ ^p n4‚Ì’l 0 ¶Šñ‚¹ 1 ‰EŠñ‚¹ 2 ’†‰› %href listdelcl listview %index listdelcl —“‚ðÁ‹Ž %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : —“‚̃Cƒ“ƒfƒbƒNƒX %inst n1”Ô–Ú‚Ì—“‚ðÁ‹Ž‚µ‚Ü‚·B %href listaddcl listview %index listsel ƒŠƒXƒgƒrƒ…[‚̃AƒCƒeƒ€‚ð‘I‘ð %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2 n1 : ƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX n2 : ‘I‘ðó‘Ԃ̃^ƒCƒv %inst n1”Ԗڂ̃AƒCƒeƒ€‚ð‘I‘ð‚µ‚Ü‚·B n2‚ð-1‚É‚·‚邯‘I‘ðó‘Ô‚ð‰ðœ‚µ‚Ü‚·B n2‚ðÈ—ª‚·‚邯ƒtƒH[ƒJƒX‚ð‚ ‚í‚¹‚āA‘I‘ðó‘Ô‚É‚µ‚Ü‚·B ^ ^p ó‘Ô ”’l ƒtƒH[ƒJƒX‚ª‚ ‚Á‚Ä‚¢‚é 1 ‘I‘ð‚³‚ê‚Ä‚¢‚é 2 Ø‚èŽæ‚ç‚ꂽó‘Ô 4 ƒnƒCƒ‰ƒCƒg 8 ^p %href listadd listview %index listget ƒŠƒXƒgƒrƒ…[‚̃AƒCƒeƒ€‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2,n3,n4 v1 : Žæ“¾‚µ‚½‚à‚Ì‚ð“ü‚ê‚é•ϐ” n2 : ƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX n3 : Žæ“¾‚·‚é‚à‚Ì n4 : Žæ“¾‚·‚éƒAƒCƒeƒ€‚̏ó‘Ô %inst Žå‚ɃAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX‚ðŽæ“¾‚µ‚Ü‚·B n3‚ňȉº‚Ì’l‚ð‘ã“ü‚·‚邯޿“¾‚·‚é‚à‚Ì‚ª•Ï‚í‚Á‚Ä‚«‚Ü‚·B ^ ^p n3‚Ì’l n4‚Ì’l 1 n2‚̏ã‚̃AƒCƒeƒ€‚Ìindex‚ðŽæ“¾ 0 or Žæ“¾‚·‚éƒAƒCƒeƒ€‚̏ó‘Ô 2 n2‚̉º‚̃AƒCƒeƒ€‚Ìindex‚ðŽæ“¾ 0 or Žæ“¾‚·‚éƒAƒCƒeƒ€‚̏ó‘Ô 3 n2‚̍¶‚̃AƒCƒeƒ€‚Ìindex‚ðŽæ“¾ 0 or Žæ“¾‚·‚éƒAƒCƒeƒ€‚̏ó‘Ô 4 n2‚̉E‚̃AƒCƒeƒ€‚Ìindex‚ðŽæ“¾ 0 or Žæ“¾‚·‚éƒAƒCƒeƒ€‚̏ó‘Ô 5 n2‚̃eƒLƒXƒgŽæ“¾ •ϐ”‚̃TƒCƒY or -1 6 n2‚̃AƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ðŽæ“¾ 0 or 1 ^p ^ n3‚ð5‚É‚µ‚½‚Æ‚«‚Ístat‚ɃAƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ª“ü‚è‚Ü‚·B n3‚ð5‚É‚µ‚½‚Æ‚«n4‚ð-1‚É‚·‚邯Av1‚É“ü‚Á‚Ä‚¢‚é•¶Žš—ñ‚ðŽg‚Á‚ăAƒCƒeƒ€‚̃eƒLƒXƒg‚ðÄÝ’è‚Å‚«‚Ü‚·B n3‚ð6‚É‚µ‚½‚Æ‚«n4‚ð1‚É‚·‚邯Av1‚É“ü‚Á‚Ä‚¢‚é’l‚ðŽg‚Á‚ăAƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ðÄÝ’è‚Å‚«‚Ü‚·B ^ n4‚ðˆÈ‰º‚Ì’l‚É‚·‚邯n4‚ÅŽw’肵‚½ó‘Ԃ̃AƒCƒeƒ€‚ð(n2+1)”Ԗڂ̃AƒCƒeƒ€‚©‚çn3‚ÅŽw’肵‚½•ûŒü‚ÉŒü‚©‚Á‚ÄŒŸõ‚µ‚Ü‚·B(n3‚ð0‚É‚·‚邯•ûŒü‚ÍŽw’肵‚Ü‚¹‚ñ) n4‚̏ó‘Ԃ̃AƒCƒeƒ€‚ªŒ©‚‚©‚ê‚Îv1‚ɃAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX‚ª‘ã“ü‚³‚ê‚Ü‚·B Œ©‚‚©‚ç‚È‚©‚Á‚½ê‡‚Ív1‚É-1‚ª‘ã“ü‚³‚ê‚Ü‚·B n2‚ð-1‚É‚·‚邯æ“ª‚̃AƒCƒeƒ€‚©‚猟õ‚µ‚Ü‚·B ^ ^p n4‚Ì’l 0 ó‘Ô‚ÍŽw’肵‚È‚¢ 1 ƒtƒH[ƒJƒX‚ª‚ ‚Á‚Ä‚¢‚é 2 ‘I‘ð‚³‚ê‚Ä‚¢‚é 4 Ø‚èŽæ‚ç‚ꂽó‘Ô 8 ƒnƒCƒ‰ƒCƒg ^p %href listadd listview %sample #include "llmod3.hsp" #include "listview.hsp" listview winx-6,winy/2,1 lv=stat listaddcl 0,"name" repeat 30 : listadd cnt,"ƒAƒCƒeƒ€ No."+cnt : loop button "check",*check alloc bf,1024*32 : bf="" mesbox bf,winx-6,winy-csry-26 _objsel lv stop *check bf="" : notesel bf : start=-1 ;æ“ª‚©‚猟õ repeat -1 listget chk,start,0,2 ;‘I‘ðó‘Ԃ̃AƒCƒeƒ€‚ð’T‚· if chk=-1 : break ;Œ©‚‚©‚ç‚È‚¢ê‡-1‚ª‘ã“ü‚³‚ê‚é listget s,chk,5 ;Œ©‚‚©‚Á‚½ƒAƒCƒeƒ€‚̃eƒLƒXƒg‚ðŽæ“¾ noteadd s+"‚Í‘I‘ð‚³‚ê‚Ä‚¢‚Ü‚·" start=chk loop objprm 1,bf stop %index listicon ƒŠƒXƒgƒrƒ…[‚̃AƒCƒRƒ“‚ðÝ’è %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2,n3 v1 : ƒAƒCƒRƒ“‚ª“ü‚Á‚½”’l•ϐ”(get_icon‚Ŏ擾‚µ‚½‚à‚Ì) n2 : ƒAƒCƒRƒ“‚̐” n3 : ƒZƒbƒg‚·‚éƒAƒCƒRƒ“‚̑召 %inst ƒŠƒXƒgƒrƒ…[‚̃AƒCƒeƒ€‚̃AƒCƒRƒ“‚ðÝ’肵‚Ü‚·B ƒAƒCƒRƒ“‚ðƒZƒbƒg‚·‚é‚ɂ́Aimgƒ‚ƒWƒ…[ƒ‹‚ª•K—v‚Å‚·B ^ ¦•K‚¸Alistview.hsp‚æ‚èæ‚Éimg.hsp‚ðƒCƒ“ƒNƒ‹[ƒh‚µ‚Ä‚­‚¾‚³‚¢B ^ n3‚ð0‚É‚·‚邯‘å‚«‚¢ƒAƒCƒRƒ“‚ð•\ަ‚·‚邯‚«‚̃AƒCƒRƒ“‚ðƒZƒbƒg‚µAn3‚ð1‚É‚·‚邯¬‚³‚¢ƒAƒCƒRƒ“‚ð•\ަ‚·‚邯‚«‚̃AƒCƒRƒ“‚ðƒZƒbƒg‚µ‚Ü‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãA‚Ìstat‚ɃZƒbƒg‚µ‚½ƒAƒCƒRƒ“‚ɂ‚¢‚Ă̏î•ñ‚ª“ü‚Á‚½“Á•ʂȒl‚ª‘ã“ü‚³‚ê‚Ü‚·B(ƒCƒ[ƒWƒŠƒXƒg‚ƌĂ΂ê‚é‚à‚̂ł·B) ^ listview‚Ítreebox‚ƈá‚Á‚āA_clrobj‚ȂǂÅlistview‚ªÁ‹Ž‚³‚ê‚邯AƒAƒCƒRƒ“‚ðƒZƒbƒg‚µ‚ďÁ”‚½ƒƒ‚ƒŠ‚ðŽ©“®“I‚ÉŠJ•ú‚·‚é‚̂ŁA destroy_imglist 'listiconŽÀsŒã‚Éstat‚É‘ã“ü‚³‚ꂽ’l' ‚ðŽÀs‚·‚é•K—v‚Í‚ ‚è‚Ü‚¹‚ñB ‚½‚¾‚µAlistview‚̃Xƒ^ƒCƒ‹‚É'ƒZƒbƒg‚µ‚½ƒCƒ[ƒWƒŠƒXƒg‚ðlistviewÁ‹ŽŽž‚ɏÁ‹Ž‚µ‚È‚¢'‚ðŽw’肵‚Ä‚¢‚éê‡‚́Aƒƒ‚ƒŠ‚ÍŽ©“®“I‚ÉŠJ•ú‚³‚ê‚Ü‚¹‚ñB‚±‚̂Ƃ«‚́A destroy_imglist 'listiconŽÀsŒã‚Éstat‚É‘ã“ü‚³‚ꂽ’l' ‚ðŽÀs‚µ‚Ä‚­‚¾‚³‚¢B %href listadd listview %sample #include "llmod3.hsp" #include "img.hsp" ;listicon‚ðŽg‚¤Žž‚Í•K—v(listview.hsp‚æ‚è‘O‚É) #include "listview.hsp" ;‚Ü‚¸ƒŠƒXƒgƒrƒ…[‚ðì¬ listview 300,200,1|2|4|$20 ^ ;shell32.dll‚Ì3”Ô–Ú‚Æ4”Ԗڂ̃AƒCƒRƒ“(¬‚³‚¢•û)‚ðŽg‚¤ ;(*get_icon‚ɂ‚¢‚Ä‚Íimg.hsp‚Ìà–¾‚ð“Ç‚ñ‚Å‚­‚¾‚³‚¢B) filename="shell32.dll" get_icon icons.0,filename,3,1 get_icon icons.1,filename,4,1 ^ listicon icons,2 img_id=stat ;ƒZƒbƒg‚µ‚½ƒAƒCƒRƒ“‚ɂ‚¢‚Ă̏î•ñ‚ª“ü‚Á‚½“Á•ʂȒl ^ ;ƒAƒCƒeƒ€‚ð’ljÁ ;0s–Ú‚Ì0”Ô–Ú‚Ì—“‚ɃAƒCƒRƒ“”ԍ†1‚̃AƒCƒRƒ“‚ð•t‚¯‚½ƒAƒCƒeƒ€‚ð’ljÁ listadd 0,"folder 1",0,1 itm1=stat %index listmax ƒŠƒXƒgƒrƒ…[‚̃AƒCƒeƒ€‚̐”‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2 v1 : ƒŠƒXƒgƒrƒ…[“à‚̃AƒCƒeƒ€‚̐”‚ð“ü‚ê‚鐔’l•ϐ” n2 : ƒtƒ‰ƒO %inst ƒŠƒXƒgƒrƒ…[“à‚̃AƒCƒeƒ€‚̐”‚ðŽæ“¾‚µ‚Ü‚·B n2‚ð1‚É‚·‚邯ƒŠƒXƒgƒrƒ…[“à‚ÉŠ®‘S‚ÉŒ©‚¦‚Ä‚¢‚éƒAƒCƒeƒ€‚̐”‚ðŽæ“¾‚µ‚Ü‚·B %href listview %index listhit ƒ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %inst ƒ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX‚ðŽæ“¾‚µ‚Ü‚·B ^ stat‚Ƀ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚̃Cƒ“ƒfƒbƒNƒX‚ª‘ã“ü‚³‚ê‚Ü‚·B ƒ}ƒEƒX‚̉º‚ɃAƒCƒeƒ€‚ª–³‚¯‚ê‚Î-1‚ª‘ã“ü‚³‚ê‚Ü‚·B %href listadd listview %index sel_listview ‘€ì‚·‚郊ƒXƒgƒrƒ…[‚ð‘I‘ð %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒŠƒXƒgƒrƒ…[‚ÌID %inst ‚±‚Ì–½—ߎg—pŒã‚Ín1‚ÅŽw’肵‚½ƒŠƒXƒgƒrƒ…[‚ɑ΂µ‚ÄŠe‘€ì‚ðs‚¢‚Ü‚·B ƒŠƒXƒgƒrƒ…[V‹K‚ɍ쐬‚·‚邯AŽ©“®“I‚É‚»‚ÌƒŠƒXƒgƒrƒ…[‚ªŠe‘€ì‚̑ΏۂɂȂè‚Ü‚·B •¡”‚ÌƒŠƒXƒgƒrƒ…[‚ðì¬‚µ‚½‚Æ‚«‚́A‚±‚Ì–½—߂ő€ì‚·‚郊ƒXƒgƒrƒ…[‚ð‘I‘ð‚µ‚Ä‚­‚¾‚³‚¢B %href listview %index progbox ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚̍쐬 %group Šg’£“üo—͐§Œä–½—ß %prm n1,n2,n3 n1 : • n2 : ‚‚³ n3 : ƒXƒ^ƒCƒ‹ %inst ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ðì¬‚µ‚Ü‚·B ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚́Ai’»ó‹µ‚ð•\ަ‚·‚é‚̂ɕ֗˜‚Å‚·B ^ ƒXƒ^ƒCƒ‹(comctl32.dll‚̃o[ƒWƒ‡ƒ“‚ª4.70ˆÈã‚łȂ¯‚ê‚Ζ³Œø‚Å‚·) ^p n4‚Ì’l 1 i‚Ý•û‚ªƒXƒ€[ƒY‚ɂȂè‚Ü‚·B 4 ‚’¼‚ɖڐ·‚肪i‚݂܂·B ^p ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ɃvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B ^ ¦progbox‚ÍHSP•W€–½—ß‚Ìcls‚âscreen–½—߂ł͏Á‹Ž‚³‚ê‚Ü‚¹‚ñB_cls‚Ü‚½‚Í_clrobj‚ðŽg—p‚µ‚Ä‚­‚¾‚³‚¢B %href progbox progrng progset sel_progbox %index sel_progbox ‘€ì‚·‚éƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ð‘I‘ð %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ÌID %inst ‚±‚Ì–½—ߎg—pŒã‚Ín1‚ÅŽw’肵‚½ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ɑ΂µ‚ÄŠe‘€ì‚ðs‚¢‚Ü‚·B ƒvƒƒOƒŒƒXƒ{ƒbƒNƒXV‹K‚ɍ쐬‚·‚邯AŽ©“®“I‚É‚»‚̃vƒƒOƒŒƒXƒ{ƒbƒNƒX‚ªŠe‘€ì‚̑ΏۂɂȂè‚Ü‚·B •¡”‚̃vƒƒOƒŒƒXƒ{ƒbƒNƒX‚ðì¬‚µ‚½‚Æ‚«‚́A‚±‚Ì–½—߂ő€ì‚·‚éƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ð‘I‘ð‚µ‚Ä‚­‚¾‚³‚¢B %href progbox %index progrng ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚͈̔͂ðÝ’è %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2,n3 n1 : Å¬’l n2 : Å‘å’l n3 : Å‘å’l‚ÌŠg’£ƒtƒ‰ƒO %inst ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚͈̔͂̐ݒè‚ð‚µ‚Ü‚·B Ý’è‚Å‚«‚é’l‚́AÅ¬’l‚ª0ˆÈãAÅ‘å’l‚Í65535ˆÈ‰º‚Å‚·B n3‚É1‚ð‘ã“ü‚·‚邯Å‘å’l‚ª$ffffffff‚܂łɂȂè‚Ü‚·B‚½‚¾‚µ‚±‚̐ݒè‚Ícomctl32.dll‚̃o[ƒWƒ‡ƒ“‚ª4.70ˆÈã‚łȂ¯‚ê‚Ζ³Œø‚Å‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½Œã‚Ìstat‚Ì’l ˆÈ‘OƒZƒbƒg‚µ‚Ä‚¢‚½”ÍˆÍ [ Å¬’l | (Å‘å’l<<16)]‚ÌŒ`Ž® (n3=0‚̂Ƃ«‚Ì‚Ý) %href progbox %index progset ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ÌŠeÝ’è %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2 n1 : ƒZƒbƒg‚·‚é’l n2 : ƒZƒbƒg‚·‚éƒ^ƒCƒv %inst ƒvƒƒOƒŒƒXƒ{ƒbƒNƒX‚ÌŠeÝ’è‚ðs‚¢‚Ü‚·B ƒZƒbƒg‚·‚éƒ^ƒCƒv‚͈ȉº‚Ì’Ê‚è‚Å‚·B ^p –Ú“I n1‚Ì’l n2‚Ì’l progsetŽÀsŒã‚Ìstat‚Ì’l i‚Ü‚¹‚é 0 0 i‚Ü‚¹‚é‘O‚̈ʒu i‚Ü‚¹‚é—ʂ̃Zƒbƒg i‚Ü‚¹‚é—Ê 0 ˆÈ‘OƒZƒbƒg‚µ‚½i‚Ü‚¹‚é—Ê ˆÊ’u‚ðƒZƒbƒg ƒZƒbƒg‚·‚éˆÊ’u 1 ƒZƒbƒg‚·‚é‘O‚̈ʒu “Á’è‚̗ʂ𑝉Á‚³‚¹‚é ‘‰Á‚³‚¹‚é—Ê 2 ‘‰Á‚³‚¹‚é‘O‚̈ʒu ^ (ˆÈ‰º‚Ì‚à‚Ì‚Ícomctl32.dll‚̃o[ƒWƒ‡ƒ“‚ª4.70ˆÈã‚łȂ¯‚ê‚Ζ³Œø‚Å‚·) –ڐ·‚è‚̐F F 3 ƒZƒbƒg‚·‚é‘O‚̐F –ڐ·‚è‚ÌŒã‚ë‚̐F F 4 ƒZƒbƒg‚·‚é‘O‚̐F (F‚Í r|(g<<8)|(g<<16) ‚ÌŒ`Ž®) (’l‚ª 0 ‚ɂȂÁ‚Ä‚¢‚鏊‚͏ȗª‚Å‚«‚é) ^p stat‚Ì’l‚ª'ƒZƒbƒg‚·‚é‘O‚̐F'‚ƂȂÁ‚Ä‚¢‚鏊‚́AƒZƒbƒg‚·‚é‘O‚̐F‚ªƒVƒXƒeƒ€‚̃fƒtƒHƒ‹ƒg‚̐F‚¾‚Á‚½ê‡ -16777216($ff000000)‚ªstat‚É‘ã“ü‚³‚ê‚Ü‚·B‚Ü‚½AF‚ðƒZƒbƒg‚·‚邯‚«‚É$ff000000‚ðŽg‚¤‚ƃVƒXƒeƒ€‚̃fƒtƒHƒ‹ƒg‚̐F‚ɐݒè‚Å‚«‚Ü‚·B %href progrng progbox %sample progbox 300,20 ;•300,‚‚³20‚̃vƒƒOƒŒƒXƒ{ƒbƒNƒX‚ðì‚é progrng 0,200 ;”͈͂Í0‚©‚ç200 ^ *lp progset ;–ڐ·‚è‚ði‚Ü‚¹‚é ; ; goto lp %index trackbox ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̍쐬 %group –½—ߊT—v %prm n1,n2,n3 n1 : • n2 : ‚‚³ n3 : ƒXƒ^ƒCƒ‹ %inst ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ðì¬‚µ‚Ü‚·B ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚Æ‚Í‚Â‚Ü‚Ý‚ðŽ‚Á‚½ƒIƒuƒWƒFƒNƒg‚Å‚·B Œˆ‚Ü‚Á‚½‘I‘ð”͈͂̒†‚©‚琔’l‚𓾂½‚¢‚Æ‚«‚ȂǂɎg‚¤‚̂ɕ֗˜‚Å‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚Ƀgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B ^ trackbox‚̃Xƒ^ƒCƒ‹(n4)‚͈ȉº‚Ì’l‚ð‘g‚ݍ‡‚킹‚ÄŽ‚‚±‚Æ‚ª‚Å‚«‚Ü‚·B —Ⴆ‚΁A n4=2|8|$100 ;‚Ü‚½‚Í n4=2+8+$100 trackbox 200,70,n4 ‚Ȃ琂’¼‚Å—¼‘¤‚ɖڐ·‚肪•t‚«Aƒc[ƒ‹ƒ`ƒbƒv‚ðŽ‚ÂƒXƒ^ƒCƒ‹‚ɂȂè‚Ü‚·B ^p n4‚Ì’l 0 …•½ 1 Ž©“®‚Ŗڐ·‚è‚ð•t‚¯‚é 2 ‚’¼ 4 '…•½'‚̂Ƃ« –ڐ·‚è‚ðã‘¤‚É•t‚¯‚é '‚’¼'‚̂Ƃ« –ڐ·‚è‚ð¶‘¤‚É•t‚¯‚é 8 –ڐ·‚è‚𗼑¤‚É•t‚¯‚é $10 –ڐ·‚è‚ð•t‚¯‚È‚¢ $20 ‚‚܂݂̕”•ª‚É‘I‘ð—̈æ‚ð•t‚¯‚é $40 $20 ‚̃Xƒ^ƒCƒ‹‚̂Ƃ«A‚‚܂݂ª“®‚¢‚Ä‚à‘I‘ð—̈æ‚ðŒÅ’肵‚½‚܂܂ɂ·‚é $80 ‚‚܂݂Ȃµ $100 ‚‚܂݂ªƒNƒŠƒbƒN‚³‚ê‚Ä‚¢‚邯‚«ƒc[ƒ‹ƒ`ƒbƒv‚ð•t‚¯‚é(‚±‚̃Xƒ^ƒCƒ‹‚Í comctl32.dll‚̃o[ƒWƒ‡ƒ“‚É‚æ‚Á‚Ă͌ø‰Ê‚ª‚È‚¢ê‡‚ª‚ ‚è‚Ü‚·B) ^p ^ ¦trackbox‚ÍHSP•W€–½—ß‚Ìcls‚âscreen–½—߂ł͏Á‹Ž‚³‚ê‚Ü‚¹‚ñB_cls‚Ü‚½‚Í_clrobj‚ðŽg—p‚µ‚Ä‚­‚¾‚³‚¢B %href trackpos trackrng trackmrk tracksel sel_trackbox %index sel_trackbox ‘€ì‚·‚éƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ð‘I‘ð %group –½—ߊT—v %prm n1 n1 : ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ÌID %inst ‚±‚Ì–½—ߎg—pŒã‚Ín1‚ÅŽw’肵‚½ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ɑ΂µ‚ÄŠe‘€ì‚ðs‚¢‚Ü‚·B ƒgƒ‰ƒbƒNƒ{ƒbƒNƒXV‹K‚ɍ쐬‚·‚邯AŽ©“®“I‚É‚»‚̃gƒ‰ƒbƒNƒ{ƒbƒNƒX‚ªŠe‘€ì‚̑ΏۂɂȂè‚Ü‚·B •¡”‚̃gƒ‰ƒbƒNƒ{ƒbƒNƒX‚ðì¬‚µ‚½‚Æ‚«‚́A‚±‚Ì–½—߂ő€ì‚·‚éƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚ð‘I‘ð‚µ‚Ä‚­‚¾‚³‚¢B %href trackbox %index trackpos ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̂‚܂݂̈ʒu‚ÌÝ’è‚ÆŽæ“¾ %group –½—ߊT—v %prm n1,n2 n1 : Ý’è‚·‚éˆÊ’u n2 : ˆÊ’u‚ðŽæ“¾‚·‚éƒtƒ‰ƒO %inst ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̂‚܂݂̈ʒu‚ðÝ’肵‚Ü‚·B n2‚ð1‚É‚·‚邯‚‚܂݂̈ʒu‚ðŽæ“¾‚µ‚Ü‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½Œã‚Ìstat‚Ì’l n2‚ð1‚É‚·‚邯stat‚ɂ‚܂݂̈ʒu‚ª‘ã“ü‚³‚ê‚Ü‚·B ‚»‚êˆÈŠO‚Ístat‚Í0‚ɂȂè‚Ü‚·B %href trackrng trackbox %index trackrng ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚Ì”ÍˆÍ‚ÌÝ’è‚ÆŽæ“¾ %group –½—ߊT—v %prm n1,n2,n3 n1 : Å¬’l n2 : Å‘å’l n3 : ”ÍˆÍ‚ðŽæ“¾‚·‚éƒtƒ‰ƒO %inst ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚͈̔͂ðÝ’èAŽæ“¾‚µ‚Ü‚·B n3‚ð1‚É‚·‚邯Ý’è”͈͂̒l‚ðŽæ“¾‚µ‚Ü‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½Œã‚Ìstat‚Ì’l n3‚ð1‚É‚µ‚āAn1‚ð1‚É‚·‚邯trackbox‚̍ŏ¬ˆÊ’uAn2‚ð1‚É‚·‚邯Å‘åˆÊ’u‚ªstat‚É‘ã“ü‚³‚ê‚Ü‚·B ‚»‚êˆÈŠO‚Ístat‚Í0‚ɂȂè‚Ü‚·B %href trackpos trackbox %index trackmrk ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̖ڐ·‚è‚ÌÝ’è‚ÆŽæ“¾ %group –½—ߊT—v %prm n1,n2 n1 : –ڐ·‚è‚ð‚‚¯‚éˆÊ’u n2 : ƒtƒ‰ƒO %inst ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̖ڐ·‚è‚̈ʒu‚ðÝ’èAŽæ“¾‚µ‚Ü‚·B ^ n2‚Ì’l ‹@”\ 0 n1‚ÅŽw’肵‚½ˆÊ’u‚ɖڐ·‚è‚ð•t‚¯‚Ü‚·B 1 •t‚¯‚ç‚ê‚Ä‚¢‚é–ڐ·‚è‚̐”‚ðŽæ“¾‚µ‚Ü‚·B 2 ŠÔŠu‚ðn1‚ÅŽw’肵‚½’l‚É‚µ‚Ėڐ·‚è•t‚¯‚Ü‚·B‚½‚¾‚µAƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚̃Xƒ^ƒCƒ‹‚ð'Ž©“®‚Ŗڐ·‚è‚ð•t‚¯‚é'(1)‚É‚µ‚½‚Æ‚«‚̂ݗLŒø‚Å‚·B 3 n1”Ԗڂ̖ڐ·‚è‚̈ʒu‚ðŽæ“¾‚µ‚Ü‚·B 4 •t‚¯‚ç‚ê‚Ä‚¢‚é–ڐ·‚è‚ð‚·‚×‚ÄƒNƒŠƒA‚µ‚Ü‚·B %href trackbox %index tracksel ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚Ì‘I‘ð”ÍˆÍ‚ÌÝ’è‚ÆŽæ“¾ %group –½—ߊT—v %prm n1,n2,n3 n1 : Å¬’l n2 : Å‘å’l n3 : ƒtƒ‰ƒO %inst ƒgƒ‰ƒbƒNƒ{ƒbƒNƒX‚Ì‘I‘ð”͈͂ðÝ’èAŽæ“¾‚µ‚Ü‚·B ‚½‚¾‚µAƒgƒ‰ƒbƒNƒ{ƒbƒNƒXì¬Žž‚É'‚‚܂݂̕”•ª‚É‘I‘ð—̈æ‚ð•t‚¯‚é'($20)‚Æ‚¢‚¤ ƒXƒ^ƒCƒ‹‚ðŽw’肵‚Ä‚¢‚éê‡‚ÉŒÀ‚è‚Ü‚·B ^p n3‚ð0‚É‚·‚邯n1-n2‚͈̔͂ð‘I‘ð‚µ‚Ü‚·B n3‚ð1‚É‚·‚邯ŠJŽnˆÊ’u(n1‚Ì’l)‚݂̂ðƒZƒbƒg‚µ‚Ü‚·B n3‚ð2‚É‚·‚邯I—¹ˆÊ’u(n2‚Ì’l)‚݂̂ðƒZƒbƒg‚µ‚Ü‚·B n3‚ð3‚É‚·‚邯‘I‘ð”͈͂ðƒNƒŠƒA‚µ‚Ü‚·B ^p ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½Œã‚Ìstat‚Ì’l n1An2‚ð‚Æ‚à‚É0‚·‚邯‘I‘ðŠJŽnˆÊ’u‚ªstat‚É‘ã“ü‚³‚ê‚Ü‚·B n1An2‚ð‚Æ‚à‚É1‚·‚邯‘I‘ðI—¹ˆÊ’u‚ªstat‚É‘ã“ü‚³‚ê‚Ü‚·B %href trackbox %index treemax ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒeƒ€‚̐”‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2 v1 : ƒcƒŠ[ƒ{ƒbƒNƒX“à‚̃AƒCƒeƒ€‚̐”‚ð“ü‚ê‚鐔’l•ϐ” n2 : ƒtƒ‰ƒO %inst ƒcƒŠ[ƒ{ƒbƒNƒX“à‚̃AƒCƒeƒ€‚̐”‚ðŽæ“¾‚µ‚Ü‚·B n2‚ð1‚É‚·‚邯ƒcƒŠ[ƒ{ƒbƒNƒX“à‚ÉŠ®‘S‚ÉŒ©‚¦‚Ä‚¢‚éƒAƒCƒeƒ€‚̐”‚ðŽæ“¾‚µ‚Ü‚·B %href treebox %index treesel ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒeƒ€‚ð‘I‘ð %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2 n1 : ƒAƒCƒeƒ€‚ÌID n2 : ‘I‘ðó‘Ԃ̃^ƒCƒv %inst n1‚ÅŽw’肵‚½ƒAƒCƒeƒ€‚ð‘I‘ð‚µ‚Ü‚·B n2‚É$10‚ð‘«‚·‚ƃAƒCƒeƒ€‚ªŽq‚ð‚à‚Á‚Ä‚¢‚邯‚«A‚»‚̃AƒCƒeƒ€‚ðŽûk‚µ‚Ü‚·B n2‚É$20‚ð‘«‚·‚ƃAƒCƒeƒ€‚ªŽq‚ð‚à‚Á‚Ä‚¢‚邯‚«A‚»‚̃AƒCƒeƒ€‚ð“WŠJ‚µ‚Ü‚·B ^ ^p n2‚Ì’l ‘I‘ðó‘Ô 0 •’ʂ̃^ƒCƒv 1 DragDrop‚Ìtarget‚̂悤‚ȃ^ƒCƒv 2 item‚ðæ“ª‚ÉŽ‚Á‚Ä‚¢‚Á‚Ä‘I‘ð(‰Æ‚̊‹«‚ł͂Ȃç‚È‚¢‚悤‚È... tom) +$10 “WŠJ +$20 Žûk +$30 ‹t‚̏ó‘Ô‚É‚·‚é ^p %href treebox treeadd %index treedel ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒeƒ€‚ðÁ‹Ž %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒAƒCƒeƒ€‚ÌID %inst n1‚ÅŽw’肵‚½ƒAƒCƒeƒ€‚ðÁ‹Ž‚µ‚Ü‚·B %href treeadd treebox %index treesort ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒeƒ€‚ðƒ\[ƒg %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒAƒCƒeƒ€‚ÌID %inst n1‚ÅŽw’肵‚½ƒAƒCƒeƒ€‚ÌŽq‚ðƒ\[ƒg‚µ‚Ü‚·B n1‚ðÈ—ª‚·‚邯ƒgƒbƒv‚̃AƒCƒeƒ€‚ðƒ\[ƒg‚µ‚Ü‚·B %href treeadd treebox %index treeicon ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒRƒ“‚ðÝ’è %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2 v1 : ƒAƒCƒRƒ“‚ª“ü‚Á‚½”’l•ϐ”(get_icon‚Ŏ擾‚µ‚½‚à‚Ì) n2 : ƒAƒCƒRƒ“‚̐” %inst ƒcƒŠ[ƒ{ƒbƒNƒX‚̃AƒCƒeƒ€‚̃AƒCƒRƒ“‚ðÝ’肵‚Ü‚·B ƒAƒCƒRƒ“‚ðƒZƒbƒg‚·‚é‚ɂ́Aimgƒ‚ƒWƒ…[ƒ‹‚ª•K—v‚Å‚·B ^ ¦•K‚¸Atreebox.hsp‚æ‚èæ‚Éimg.hsp‚ðƒCƒ“ƒNƒ‹[ƒh‚µ‚Ä‚­‚¾‚³‚¢B ^ ‚±‚Ì–½—ߎÀsŒã‚Éstat‚ɃZƒbƒg‚µ‚½ƒAƒCƒRƒ“‚ɂ‚¢‚Ă̏î•ñ‚ª“ü‚Á‚½“Á•ʂȒl‚ª•Ô‚³‚ê‚Ü‚·B(ƒCƒ[ƒWƒŠƒXƒg‚ƌĂ΂ê‚é‚à‚̂ł·B) treebox‚ª‚¢‚ç‚È‚­‚È‚Á‚Ätreebox‚ðÁ‹Ž‚µ‚½‚Æ‚«‚͂Ȃé‚ׂ­ destroy_imglist 'treeiconŽÀsŒã‚Éstat‚É‘ã“ü‚³‚ꂽ’l' ‚ðŽÀs‚µ‚Ä‚­‚¾‚³‚¢B (ƒAƒCƒRƒ“‚ðƒZƒbƒg‚·‚邯ƒƒ‚ƒŠ‚ðÁ”‚Ü‚·Bdestroy_imglist‚ðŽÀs‚·‚邱‚ƂŃZƒbƒg‚µ‚ďÁ”‚½ƒƒ‚ƒŠ‚ðŠJ•ú‚µ‚Ü‚·B) treebox‚ª‚Ü‚¾‚ ‚邤‚¿‚Édestroy_imglist‚ðŽÀs‚·‚邯AƒZƒbƒg‚³‚ꂽƒAƒCƒRƒ“‚Í–³Œø‚ɂȂè‚Ü‚·B %href treeadd treebox %sample #include "llmod3.hsp" #include "img.hsp" ;treeicon‚ðŽg‚¤Žž‚Í•K—v(treebox.hsp‚æ‚è‘O‚É) #include "treebox.hsp" ^ treebox 300,200,1|2|4|$20 ^ filename="shell32.dll" get_icon icons.0,filename,3,1 get_icon icons.1,filename,4,1 treeicon icons,2 img_id=stat ;ƒZƒbƒg‚µ‚½ƒAƒCƒRƒ“‚ɂ‚¢‚Ă̏î•ñ‚ª“ü‚Á‚½“Á•ʂȒl ^ treeadd 0,"folder 1",,0,1 itm1=stat %index treehit ƒ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %inst ƒ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚ÌID‚ðŽæ“¾‚µ‚Ü‚·B ^ stat‚Ƀ}ƒEƒX‚̉º‚É‚ ‚éƒAƒCƒeƒ€‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B ƒ}ƒEƒX‚̉º‚ɃAƒCƒeƒ€‚ª–³‚¯‚ê‚Î0‚ª‘ã“ü‚³‚ê‚Ü‚·B %href treebox %index sel_treebox ‘€ì‚·‚éƒcƒŠ[ƒ{ƒbƒNƒX‚ð‘I‘ð %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1 n1 : ƒcƒŠ[ƒ{ƒbƒNƒX‚ÌID %inst ‚±‚Ì–½—ߎg—pŒã‚Ín1‚ÅŽw’肵‚½ƒcƒŠ[ƒ{ƒbƒNƒX‚ɑ΂µ‚ÄŠe‘€ì‚ðs‚¢‚Ü‚·B V‹K‚ɃcƒŠ[ƒ{ƒbƒNƒX‚ðì¬‚·‚邯AŽ©“®“I‚É‚»‚̃cƒŠ[ƒ{ƒbƒNƒX‚ªŠe‘€ì‚̑ΏۂɂȂè‚Ü‚·B •¡”‚̃cƒŠ[ƒ{ƒbƒNƒX‚ðì¬‚µ‚½‚Æ‚«‚́A‚±‚Ì–½—߂ő€ì‚·‚éƒcƒŠ[ƒ{ƒbƒNƒX‚ð‘I‘ð‚µ‚Ä‚­‚¾‚³‚¢B %href treebox %index treeget ƒcƒŠ[ƒ{ƒbƒNƒX‚ÌŠeî•ñ‚ðŽæ“¾ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm v1,n2,n3,n4 v1 : Žæ“¾‚µ‚½‚à‚Ì‚ð“ü‚ê‚é•ϐ” n2 : Žæ“¾‚·‚éƒ^ƒCƒv n3 : n2‚É‚æ‚Á‚ĕςí‚é n4 : n2‚É‚æ‚Á‚ĕςí‚é %inst Žå‚ɃAƒCƒeƒ€‚ÌID‚ðŽæ“¾‚µ‚Ü‚·B n2‚ňȉº‚Ì’l‚ð‘ã“ü‚·‚邯޿“¾‚·‚é‚à‚Ì‚ª•Ï‚í‚Á‚Ä‚«‚Ü‚·B ^ ^p n2‚Ì’l n3‚Ì’l n4‚Ì’l 0 ‘I‘ð‚³‚ê‚Ä‚¢‚é±²ÃтÌID‚ðŽæ“¾ 0 0 1 ŽŸ‚̱²ÃтÌID(“¯‚¶ƒŒƒxƒ‹)‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 2 ‘O‚̱²ÃтÌID(“¯‚¶ƒŒƒxƒ‹)‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 3 e±²ÃтÌID‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 4 ˆê”Ô–Ú‚ÌŽq‚ǂ౲ÃтÌID‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 5 Å‰‚ÉŒ©‚¦‚Ä‚¢‚é±²ÃтÌID‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 6 ƒAƒCƒeƒ€‚̃eƒLƒXƒgŽæ“¾ ƒAƒCƒeƒ€‚ÌID •ϐ”‚̃TƒCƒY or -1 7 ƒAƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ðŽæ“¾ ƒAƒCƒeƒ€‚ÌID 0 or 1 -1 ƒ‹[ƒg‚ðŽæ“¾ 0 0 ^p ^ ’l‚ª0‚ÌŠ‚ÍÈ—ª‚Å‚«‚Ü‚·B n2‚ª6‚̏ꍇAn4‚ɉ½•¶Žš‚܂Ŏ擾‚·‚é‚©Žw’肵‚Ü‚·B63•¶ŽšˆÈ“à‚È‚çÈ—ª‚Å‚«‚Ü‚·B n2‚ª6‚̂Ƃ«‚Ístat‚Étreeadd‚ŃAƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ª‘ã“ü‚³‚ê‚Ü‚·B ^ n2‚ð6‚É‚µ‚½‚Æ‚«n4‚ð-1‚É‚·‚邯Av1‚É“ü‚Á‚Ä‚¢‚é•¶Žš—ñ‚ðŽg‚Á‚ăAƒCƒeƒ€‚̃eƒLƒXƒg‚ðÄÝ’è‚Å‚«‚Ü‚·B n2‚ð7‚É‚µ‚½‚Æ‚«n4‚ð1‚É‚·‚邯Av1‚É“ü‚Á‚Ä‚¢‚é’l‚ðŽg‚Á‚ăAƒCƒeƒ€‚ÉŽ‚½‚¹‚½’l‚ðÄÝ’è‚Å‚«‚Ü‚·B %href treeadd treebox %index treeadd treebox‚ɃAƒCƒeƒ€‚ð’ljÁ %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,"s2",n3,n4,n5,n6 n1 : eƒAƒCƒeƒ€‚ÌID s2 : ƒAƒCƒeƒ€‚̃eƒLƒXƒg n3 : ƒAƒCƒeƒ€‚̒ljÁ‚ÌŽd•û n4 : ƒAƒCƒeƒ€‚ªŽûk‚µ‚Ä‚¢‚鎞‚̃AƒCƒRƒ“”ԍ† n5 : ƒAƒCƒeƒ€‚ª“WŠJ‚µ‚Ä‚¢‚鎞‚̃AƒCƒRƒ“”ԍ† n6 : ƒAƒCƒeƒ€‚ÉŽ‚½‚¹‚鐔’l %inst ƒcƒŠ[ƒ{ƒbƒNƒX‚Én1‚ðe‚É‚µ‚ăAƒCƒeƒ€‚ð‰Á‚¦‚Ü‚·B n1‚É0‚ð‘ã“ü‚·‚邯ˆê”Ԗڂ̃AƒCƒeƒ€‚ɂȂè‚Ü‚·B n3‚ɂ͂ǂ̂悤‚ɃAƒCƒeƒ€‚ð’ljÁ‚·‚é‚©An4‚ɂ͎ûk‚µ‚Ä‚¢‚鎞‚̃AƒCƒRƒ“”ԍ†(0‚©‚ç)An5‚ɂ͓WŠJ‚µ‚Ä‚¢‚邯‚«‚̃AƒCƒRƒ“”ԍ†(0‚©‚ç)‚ð“ü‚ê‚Ü‚·B ^ ‚ǂ̂悤‚ɒljÁ‚·‚é‚©‚́A1‚Ő擪‚ÌŽq‚Æ‚µ‚ĒljÁA0‚Ü‚½‚Í2‚ňê”ԍŌã‚ÌŽq‚Æ‚µ‚ĒljÁA3‚Ń\[ƒg‚µ‚ĒljÁA‚ƂȂè‚Ü‚·B ƒAƒCƒRƒ“‚Ítreeicon‚ŃZƒbƒg‚µ‚Ü‚·B ^ n6‚É’l‚ð“ü‚ê‚ăAƒCƒeƒ€‚É‚»‚̐”’l‚ðŽ‚½‚¹‚邱‚Æ‚ª‚Å‚«‚Ü‚·B n6‚Őݒ肵‚½”’l‚Ítreeget‚Ŏ擾‚Å‚«‚Ü‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ɒljÁ‚µ‚½ƒAƒCƒeƒ€‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B(0‚È‚çƒGƒ‰[) %href treedel treeicon treeget treesel treebox %sample treeadd itm3,"new item",3,0,1 new_itm_id=stat %index treebox treebox‚̍쐬 %group ƒIƒuƒWƒFƒNƒg§Œä–½—ß %prm n1,n2,n3 n1 : • n2 : ‚‚³ n3 : ƒXƒ^ƒCƒ‹ %inst ƒcƒŠ[ƒ{ƒbƒNƒX‚ðì¬‚µ‚Ü‚·B ƒcƒŠ[ƒ{ƒbƒNƒX‚Ƃ̓GƒNƒXƒvƒ[ƒ‰[‚̍¶‘¤‚É‚ ‚邿‚¤‚È‚à‚̂ł·B ƒXƒ^ƒCƒ‹‚͈ȉº‚Ì’l‚ð‘g‚ݍ‡‚킹‚ÄŽg‚¤‚±‚Æ‚ª‚Å‚«‚Ü‚·B ^p n3‚Ì’l 1 ƒAƒCƒeƒ€‚ªŽq‚ðŽ‚Â‚Æ‚«'+','-'‚̃}[ƒN‚ª•t‚­ 2 ƒAƒCƒeƒ€‚ƃAƒCƒeƒ€‚ðŒq‚®ü‚ª‚‚­ 4 Å‰‚̃AƒCƒeƒ€‚ɐü‚ª•t‚­(2‚ªŽw’肳‚ê‚Ä‚¢‚邯‚«‚Ì‚Ý) 8 ¦‚±‚̐”Žš‚ÍŽg‚í‚È‚¢‚ʼnº‚³‚¢B $10 ¦‚±‚̐”Žš‚ÍŽg‚í‚È‚¢‚ʼnº‚³‚¢B $20 ƒcƒŠ[ƒ{ƒbƒNƒX‚ª‘I‘ð‚³‚ê‚Ä‚¢‚È‚¢‚Æ‚«‚É‚à‘I‘ð‚³‚ê‚Ä‚¢‚éƒAƒCƒeƒ€‚ª•ª‚©‚邿‚¤‚ɐF‚ª•t‚­ $40 ¦‚æ‚­•ª‚©‚ç‚È‚¢(tom) $80 ƒAƒCƒeƒ€‚ªƒcƒŠ[ƒ{ƒbƒNƒX‚©‚ç‚͂ݏo‚Ä‚¢‚Ä‚àƒc[ƒ‹ƒ`ƒbƒv‚ª‚‚©‚È‚¢ ¦ˆÈ‰º‚̃Xƒ^ƒCƒ‹‚Ícomctl32.dll‚̃o[ƒWƒ‡ƒ“‚É‚æ‚Á‚Ă͎g—p‚Å‚«‚È‚¢‚à‚Ì‚à‚ ‚è‚Ü‚·B $100 €–ڂɃ`ƒFƒbƒNƒ{ƒbƒNƒX‚ª•t‚­ $200 ƒ}ƒEƒX‚ªƒAƒCƒeƒ€‚̏ã‚É—ˆ‚邯‰ºü‚ª•t‚­ $400 ƒ}ƒEƒX‚ŃNƒŠƒbƒN‚³‚ꂽƒAƒCƒeƒ€‚݂̂ª“WŠJ‚³‚ê‚é $800 ¦‚æ‚­•ª‚©‚ç‚È‚¢(tom) $1000 ƒXƒ^ƒCƒ‹2 ‚ªŽw’肳‚ê‚Ä‚¢‚È‚¢‚Æ‚«A‘I‘ð‚³‚ê‚½ƒAƒCƒeƒ€‚Ì”wŒi‚ª•Ï‚í‚é $2000 ƒAƒCƒeƒ€‚ªƒcƒŠ[ƒ{ƒbƒNƒX“à‚ÉŽû‚Ü‚ç‚È‚¢Žž‚Å‚àAƒXƒNƒ[ƒ‹ƒo[‚ª•t‚©‚È‚¢ $4000 ¦‚æ‚­•ª‚©‚ç‚È‚¢(tom) $8000 …•½ƒXƒNƒ[ƒ‹ƒo[‚ª•t‚©‚È‚¢ ^p ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ɃcƒŠ[ƒ{ƒbƒNƒX‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B ^ ¦treebox‚ÍHSP•W€–½—ß‚Ìcls‚âscreen–½—߂ł͏Á‹Ž‚³‚ê‚Ü‚¹‚ñB_cls‚Ü‚½‚Í_clrobj‚ðŽg—p‚µ‚Ä‚­‚¾‚³‚¢B %href treeadd treedel treesel treeget treemax treesort treeicon treehit sel_treebox %index udbtn ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̍쐬 %group –½—ߊT—v %prm n1,n2,n3,n4 n1 : • n2 : ‚‚³ n3 : ƒXƒ^ƒCƒ‹ n4 : ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ð•t‚¯‚éƒIƒuƒWƒFƒNƒg‚ÌID %inst ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ðì¬‚µ‚Ü‚·B ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ƃ͖îˆóƒ{ƒ^ƒ“‚ðŽ‚Á‚½ƒIƒuƒWƒFƒNƒg‚Å‚·B input“™‚Ì“ü—̓{ƒbƒNƒX‚©‚琔’l‚𓾂½‚¢‚Æ‚«‚ȂǂɎg‚¤‚ƕ֗˜‚Å‚·B (ˆê‰žbutton“™‚É‚à•t‚¯‚邱‚Ƃ͂ł«‚Ü‚·B) n4‚É‚Íbutton‚Ȃǂ̃IƒuƒWƒFƒNƒg‚ÌID‚ð“ü‚ê‚Ü‚·B ^ ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½ŒãAstat‚ɃAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ÌID‚ª‘ã“ü‚³‚ê‚Ü‚·B udbtn‚̃Xƒ^ƒCƒ‹(n4)‚͈ȉº‚Ì’l‚ð‘g‚ݍ‡‚킹‚ÄŽ‚‚±‚Æ‚ª‚Å‚«‚Ü‚·B ^p n4‚Ì’l ƒXƒ^ƒCƒ‹ 1 Å¬AÅ‘å’l‚É’B‚µ‚½‚Æ‚«‚ɐ܂è•Ô‚· 4 ƒIƒuƒWƒFƒNƒg‚̉E‘¤‚É•t‚¯‚é 8 ƒIƒuƒWƒFƒNƒg‚̍¶‘¤‚É•t‚¯‚é $20 •ûŒüƒL[‚Ő”’l‚ð•Ï‚¦‚ç‚ê‚邿‚¤‚É‚·‚é $40 …•½ƒ^ƒCƒv $80 ”Žš‚ðƒJƒ“ƒ}‚Å‹æØ‚ç‚È‚¢ ^ ¦udbtn‚ÍHSP•W€–½—ß‚Ìcls‚âscreen–½—߂ł͏Á‹Ž‚³‚ê‚Ü‚¹‚ñB_cls‚Ü‚½‚Í_clrobj‚ðŽg—p‚µ‚Ä‚­‚¾‚³‚¢B %href udset udget sel_udbtn %index sel_udbtn ‘€ì‚·‚éƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ì‘I‘ð %group –½—ߊT—v %prm n1 n1 : ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ÌID %inst ‚±‚Ì–½—ߎg—pŒã‚Ín1‚ÅŽw’肵‚½ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ɑ΂µ‚ÄŠe‘€ì‚ðs‚¢‚Ü‚·B ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“V‹K‚ɍ쐬‚·‚邯AŽ©“®“I‚É‚»‚̃Aƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ªŠe‘€ì‚̑ΏۂɂȂè‚Ü‚·B •¡”‚̃Aƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ðì¬‚µ‚½‚Æ‚«‚́A‚±‚Ì–½—߂ő€ì‚·‚éƒAƒbƒvƒ_ƒEƒ“ƒ{ƒbƒNƒX‚ð‘I‘ð‚µ‚Ä‚­‚¾‚³‚¢B %href udbtn %index udset ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̐ݒè %group –½—ߊT—v %prm n1,n2,n3 n1 : ƒZƒbƒg‚·‚éƒ^ƒCƒv n2 : n1‚É‚æ‚Á‚ĕςí‚é n3 : n1‚É‚æ‚Á‚ĕςí‚é %inst ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̐ݒè‚ð‚µ‚Ü‚·B Ý’è‚·‚é‚à‚Ì‚Ín1‚Ì’l‚É‚æ‚Á‚ĈقȂè‚Ü‚·B ^p n1‚Ì’l 0 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ì‘ŠŽè‚ðÝ’肵‚Ü‚·B n2‚ɃIƒuƒWƒFƒNƒg‚ÌID‚ðŽw’肵‚Ü‚·B ^ 1 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚͈̔͂ðÝ’肵‚Ü‚·B n2‚ɍŏ¬’l(0‚©‚ç$7FFF‚Ü‚Å),n3‚ɍőå’l(0‚©‚ç$7FFF‚Ü‚Å)‚ðŽw’肵‚Ü‚·B ^ 2 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̈ʒu‚ðÝ’肵‚Ü‚·B n2‚ɃAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̈ʒu‚ðŽw’肵‚Ü‚·B ^ 3 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̐”’l‚ð16i‚Å•\ަ‚µ‚Ü‚·B n2‚ðÈ—ª‚·‚邯16i‚ɂȂèA0ˆÈŠO‚Ì’l‚ð“ü‚ê‚邯10i‚É–ß‚è‚Ü‚·B ^ 4 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ì‘‰Á—Ê‚ð•ω»‚³‚¹‚鎞ŠÔ‚ðÝ’肵‚Ü‚·B n2‚ɐݒ肷‚鐔An3‚ɐݒ肷‚鎞ŠÔA‘‰Á—Ê‚ª“ü‚Á‚½”’l•ϐ”‚̃|ƒCƒ“ƒ^‚ðŽw’肵‚Ü‚·B (—á) udbtn 100,100 ;Å‰‚Í1‚¸‚A3•bŒã‚É10‚¸‚A6•bŒã‚É50‚¸‚‘‰Á‚³‚¹‚邿‚¤‚É‚·‚é a=0,1, 3,10, 6,50 getptr p,a udset 4,3,p ;Ý’è‚·‚鎞ŠÔ‚Í3‚ ^p ‚±‚Ì–½—ß‚ðŒÄ‚яo‚µ‚½Œã‚Ìstat‚Ì’l n1‚Ì’l stat‚Ì’l 0 ƒZƒbƒg‚·‚é‘O‚Ì‘ŠŽè‚ÌID 1 0 2 ƒZƒbƒg‚·‚é‘O‚̈ʒu 3 ƒZƒbƒg‚·‚é‘O‚Ì•\ަƒ^ƒCƒv 4 ƒZƒbƒg‚ª¬Œ÷‚µ‚½ê‡‚Í1Aޏ”s‚µ‚½ê‡‚Í0 %href udget udbtn %index udget ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ÌŠeŽíî•ñ‚̎擾 %group –½—ߊT—v %prm n1 n1 : Žæ“¾‚·‚éƒ^ƒCƒv %inst ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚ÌŠeŽíî•ñ‚ðŽæ“¾‚µ‚Ü‚·B Žæ“¾‚·‚é‚à‚Ì‚Ín1‚Ì’l‚É‚æ‚Á‚ĈقȂè‚Ü‚·B Šeî•ñ‚Ístat‚É‘ã“ü‚³‚ê‚Ü‚·B ^p n1‚Ì’l 0 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ì‘ŠŽè‚ðŽæ“¾‚µ‚Ü‚·B 1 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚Ì”ÍˆÍ‚ðŽæ“¾‚µ‚Ü‚·B stat‚Ì’l‚Í ' Å¬’l | (Å‘å’l<<16) ' ‚ÌŒ`Ž®‚ɂȂÁ‚Ä‚¢‚Ü‚·B 2 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̈ʒu‚ðŽæ“¾‚µ‚Ü‚·B 3 ƒAƒbƒvƒ_ƒEƒ“ƒ{ƒ^ƒ“‚̐”’l‚Ì•\ަ‚ÌŽd•û‚ðŽæ“¾‚µ‚Ü‚·B ^p %href udset udbtn
zakki/openhsp
package/hsphelp/llmod3_objects.hs
bsd-3-clause
26,454
8,414
17
3,994
15,088
10,802
4,286
-1
-1
{-# LANGUAGE TemplateHaskell #-} import System.IO (hFlush, stdout) import System.Environment (getArgs) import Control.Monad (unless, forever) import Control.Monad.IO.Class (liftIO) import Control.Distributed.Process (Process, expect) import Control.Distributed.Process.Closure (remotable, mkClosure) import Control.Distributed.Process.Backend.Azure echoRemote :: () -> Backend -> Process () echoRemote () _backend = forever $ do str <- expect remoteSend (str :: String) remotable ['echoRemote] echoLocal :: LocalProcess () echoLocal = do str <- liftIO $ putStr "# " >> hFlush stdout >> getLine unless (null str) $ do localSend str liftIO $ putStr "Echo: " >> hFlush stdout echo <- localExpect liftIO $ putStrLn echo echoLocal main :: IO () main = do args <- getArgs case args of "onvm":args' -> -- Pass execution to 'onVmMain' if we are running on the VM -- ('callOnVM' will provide the right arguments) onVmMain __remoteTable args' sid:x509:pkey:user:cloudService:virtualMachine:port:_ -> do -- Initialize the Azure backend params <- defaultAzureParameters sid x509 pkey let params' = params { azureSshUserName = user } backend <- initializeBackend params' cloudService -- Find the specified virtual machine Just vm <- findNamedVM backend virtualMachine -- Run the echo client proper callOnVM backend vm port $ ProcessPair ($(mkClosure 'echoRemote) ()) echoLocal
haskell-distributed/distributed-process-azure
demos/Echo.hs
bsd-3-clause
1,506
1
18
329
439
218
221
36
2
{-# LANGUAGE QuasiQuotes #-} -- small test for playing around with exports. import LiquidHaskell import Language.Haskell.Liquid.Prelude data Goo = G Int [lq| data Goo = G (x :: {v:Int | v > 0}) |] [lq| f :: Goo -> Goo |] f (G n) | n > 0 = G (n + 1) | otherwise = liquidError "ad"
spinda/liquidhaskell
tests/gsoc15/unknown/pos/exp0.hs
bsd-3-clause
302
2
8
80
83
45
38
9
1
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies, FlexibleContexts #-} {-# LANGUAGE FlexibleInstances, UndecidableInstances #-} {-# LANGUAGE TemplateHaskell #-} {-# OPTIONS -Wall #-} module RegisterMachine.Machine ( RM(RM), IsHalt(isHaltInstr), RM1 ) where import Basic.Types (Trans, EndCond(endcond), GetValue(readMem), Language) import Basic.Features (HasState(..), HasTransition(..), HasProgram(..), HasStatic(..), HasQ(..), IsAtProgramEnd(..), IsHalt(..)) import Control.Lens ((^.), makeLenses) -------------------------------------------------------------------------- ---------------------------- Generic Register Machine -------------------------------------------------------------------------- -- Machine Definition -- instr - type of language of instructions -- c - container type for sequence of instructions (aka program) -- st - type of states data RM prg st = RM { _prog :: prg , _stat :: st , _trans :: Trans prg st} makeLenses ''RM instance HasState (RM prg st) where type State (RM prg st) = st state = stat instance HasTransition (RM prg st) where transition = trans instance HasProgram (RM prg st) where type Program (RM prg st) = prg program = prog instance HasStatic (RM prg st) where type Static (RM prg st) = prg static = prog -------------------------------------------------------------------------- ----------------------------------generic machine operations -------------------------------------------------------------------------- instance (IsAtProgramEnd (Q st) l, GetValue (Q st) l instr, Language instr, IsHalt instr, HasQ st) => EndCond (RM (l instr) st) where endcond mach = isAtProgramEnd addr p || isHaltInstr (readMem addr p) where p = mach^.program addr = mach^.state^.q type RM1 instr c st = RM (c instr) st
davidzhulijun/TAM
RegisterMachine/Machine.hs
bsd-3-clause
1,809
2
9
276
461
269
192
37
0
module TwoStepsForward (solve) where import Data.List (intersect, foldl', maximumBy) import Data.Ord (comparing) import Data.Maybe (fromJust, mapMaybe) import Data.Bits ((.&.), shiftR) import Data.Word (Word8) import qualified Data.ByteString as B import Data.ByteString.Char8 (pack) import Crypto.Hash.MD5 (hash) import qualified Data.OrdPSQ as PSQ data Direction = U | D | L | R deriving (Eq, Ord, Show) type GridDimensions = (Int, Int) type Position = (Int, Int) inBounds :: GridDimensions -> Position -> Bool inBounds (m, n) (i, j) = i >= 0 && i < m && j >= 0 && j < n neighbors :: GridDimensions -> Position -> [Direction] neighbors gd (i, j) = map fst . filter (inBounds gd . snd) . zip [U, D, L, R] $ adjacents where adjacents = [(i - 1, j), (i + 1, j), (i, j - 1), (i, j + 1)] nibbles :: Word8 -> (Word8, Word8) nibbles b = ((b .&. 0xF0) `shiftR` 4, b .&. 0x0F) openDoors :: String -> [Direction] -> [Direction] openDoors prefix path = map fst . filter ((> 10) . snd) $ zip [U, D, L, R] [up, down, left, right] where h = hash . pack $ prefix ++ concatMap show path (up, down) = nibbles (h `B.index` 0) (left, right) = nibbles (h `B.index` 1) adj :: Position -> Direction -> Position adj (i, j) U = (i - 1, j) adj (i, j) D = (i + 1, j) adj (i, j) L = (i, j - 1) adj (i, j) R = (i, j + 1) stepOnce :: GridDimensions -> String -> (Position, [Direction]) -> [(Position, [Direction])] stepOnce gd prefix (pos, ds) = map (\dir -> (adj pos dir, ds ++ [dir])) validDoors where validDoors = neighbors gd pos `intersect` openDoors prefix ds search :: GridDimensions -> String -> Position -> PSQ.OrdPSQ [Direction] Int Position -> [Direction] search gd prefix dst q | minV == dst = minK | otherwise = search gd prefix dst newQ where (minK, minP, minV) = fromJust . PSQ.findMin $ q neighbors = stepOnce gd prefix (minV, minK) updateQ q (pos, dirs) = PSQ.insert dirs (minP + 1) pos q newQ = foldl' updateQ (PSQ.deleteMin q) neighbors shortestPath :: GridDimensions -> String -> [Direction] shortestPath gd@(m, n) prefix = search gd prefix (m - 1, n - 1) (PSQ.singleton [] 0 (0, 0)) longestPath' :: GridDimensions -> String -> (Position, [Direction]) -> Position -> Maybe [Direction] longestPath' gd prefix src@(pos, path) dst | pos == dst = Just path | null adjacentPaths = Nothing | otherwise = Just $ maximumBy (comparing length) adjacentPaths where neighbors = stepOnce gd prefix src adjacentPaths = mapMaybe (\neighbor -> longestPath' gd prefix neighbor dst) neighbors longestPath :: GridDimensions -> String -> [Direction] longestPath gd@(m, n) prefix = fromJust $ longestPath' gd prefix ((0, 0), []) (m - 1, n - 1) solve :: String -> IO () solve input = do let passcode = head . lines $ input let gridDimensions = (4, 4) let directionList = shortestPath gridDimensions passcode putStrLn . concatMap show $ directionList let path = longestPath gridDimensions passcode print . length $ path
cjlarose/advent-2016
src/TwoStepsForward.hs
bsd-3-clause
3,080
0
11
705
1,382
763
619
58
1
{-# Language OverloadedStrings #-} {-| Module : Client.Hooks Description : Available hooks Copyright : (c) Dan Doel, 2016 License : ISC Maintainer : [email protected] The collection of all hooks available in the client. -} module Client.Hooks ( messageHooks ) where import Data.Text import Data.HashMap.Strict import Client.Hook import Client.Hook.DroneBLRelay import Client.Hook.Matterbridge import Client.Hook.Snotice import Client.Hook.Znc.Buffextras -- | All the available message hooks. messageHooks :: HashMap Text ([Text] -> Maybe MessageHook) messageHooks = fromList [ ("snotice", \_ -> Just snoticeHook) , ("droneblrelay", droneblRelayHook) , ("buffextras", buffextrasHook) , ("matterbridge", matterbridgeHook) ]
glguy/irc-core
src/Client/Hooks.hs
isc
756
0
9
121
130
81
49
16
1
module FloatUnaryMinus2 where main :: Int main = case -.2.0 of 3.0 -> 0 2.0 -> 1 1.0 -> 2 _ -> 3
roberth/uu-helium
test/correct/FloatUnaryMinus2.hs
gpl-3.0
140
1
7
66
46
25
21
-1
-1
module Distribution.Simple.GHCJS ( configure, getInstalledPackages, getPackageDBContents, buildLib, buildExe, replLib, replExe, startInterpreter, installLib, installExe, libAbiHash, hcPkgInfo, registerPackage, componentGhcOptions, getLibDir, isDynamic, getGlobalPackageDB, runCmd ) where import Distribution.Simple.GHC.ImplInfo ( getImplInfo, ghcjsVersionImplInfo ) import qualified Distribution.Simple.GHC.Internal as Internal import Distribution.PackageDescription as PD ( PackageDescription(..), BuildInfo(..), Executable(..) , Library(..), libModules, exeModules , hcOptions, hcProfOptions, hcSharedOptions , allExtensions ) import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo ( InstalledPackageInfo(..) ) import Distribution.Package ( ComponentId(..), getHSLibraryName ) import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..), ComponentLocalBuildInfo(..) ) import qualified Distribution.Simple.Hpc as Hpc import Distribution.Simple.InstallDirs hiding ( absoluteInstallDirs ) import Distribution.Simple.BuildPaths import Distribution.Simple.Utils import Distribution.Simple.Program ( Program(..), ConfiguredProgram(..), ProgramConfiguration , ProgramSearchPath , rawSystemProgramConf , rawSystemProgramStdout, rawSystemProgramStdoutConf , getProgramInvocationOutput , requireProgramVersion, requireProgram , userMaybeSpecifyPath, programPath , lookupProgram, addKnownPrograms , ghcjsProgram, ghcjsPkgProgram, c2hsProgram, hsc2hsProgram , ldProgram, haddockProgram, stripProgram ) import qualified Distribution.Simple.Program.HcPkg as HcPkg import qualified Distribution.Simple.Program.Ar as Ar import qualified Distribution.Simple.Program.Ld as Ld import qualified Distribution.Simple.Program.Strip as Strip import Distribution.Simple.Program.GHC import Distribution.Simple.Setup ( toFlag, fromFlag, configCoverage, configDistPref ) import qualified Distribution.Simple.Setup as Cabal ( Flag(..) ) import Distribution.Simple.Compiler ( CompilerFlavor(..), CompilerId(..), Compiler(..) , PackageDB(..), PackageDBStack, AbiTag(..) ) import Distribution.Version ( Version(..), anyVersion, orLaterVersion ) import Distribution.System ( Platform(..) ) import Distribution.Verbosity import Distribution.Utils.NubList ( overNubListR, toNubListR ) import Distribution.Text ( display ) import Language.Haskell.Extension ( Extension(..) , KnownExtension(..)) import Control.Monad ( unless, when ) import Data.Char ( isSpace ) import qualified Data.Map as M ( fromList ) import Data.Monoid as Mon ( Monoid(..) ) import System.Directory ( doesFileExist ) import System.FilePath ( (</>), (<.>), takeExtension, takeDirectory, replaceExtension, splitExtension ) configure :: Verbosity -> Maybe FilePath -> Maybe FilePath -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) configure verbosity hcPath hcPkgPath conf0 = do (ghcjsProg, ghcjsVersion, conf1) <- requireProgramVersion verbosity ghcjsProgram (orLaterVersion (Version [0,1] [])) (userMaybeSpecifyPath "ghcjs" hcPath conf0) Just ghcjsGhcVersion <- findGhcjsGhcVersion verbosity (programPath ghcjsProg) let implInfo = ghcjsVersionImplInfo ghcjsVersion ghcjsGhcVersion -- This is slightly tricky, we have to configure ghcjs first, then we use the -- location of ghcjs to help find ghcjs-pkg in the case that the user did not -- specify the location of ghc-pkg directly: (ghcjsPkgProg, ghcjsPkgVersion, conf2) <- requireProgramVersion verbosity ghcjsPkgProgram { programFindLocation = guessGhcjsPkgFromGhcjsPath ghcjsProg } anyVersion (userMaybeSpecifyPath "ghcjs-pkg" hcPkgPath conf1) Just ghcjsPkgGhcjsVersion <- findGhcjsPkgGhcjsVersion verbosity (programPath ghcjsPkgProg) when (ghcjsVersion /= ghcjsPkgGhcjsVersion) $ die $ "Version mismatch between ghcjs and ghcjs-pkg: " ++ programPath ghcjsProg ++ " is version " ++ display ghcjsVersion ++ " " ++ programPath ghcjsPkgProg ++ " is version " ++ display ghcjsPkgGhcjsVersion when (ghcjsGhcVersion /= ghcjsPkgVersion) $ die $ "Version mismatch between ghcjs and ghcjs-pkg: " ++ programPath ghcjsProg ++ " was built with GHC version " ++ display ghcjsGhcVersion ++ " " ++ programPath ghcjsPkgProg ++ " was built with GHC version " ++ display ghcjsPkgVersion -- be sure to use our versions of hsc2hs, c2hs, haddock and ghc let hsc2hsProgram' = hsc2hsProgram { programFindLocation = guessHsc2hsFromGhcjsPath ghcjsProg } c2hsProgram' = c2hsProgram { programFindLocation = guessC2hsFromGhcjsPath ghcjsProg } haddockProgram' = haddockProgram { programFindLocation = guessHaddockFromGhcjsPath ghcjsProg } conf3 = addKnownPrograms [ hsc2hsProgram', c2hsProgram', haddockProgram' ] conf2 languages <- Internal.getLanguages verbosity implInfo ghcjsProg extensions <- Internal.getExtensions verbosity implInfo ghcjsProg ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcjsProg let ghcInfoMap = M.fromList ghcInfo let comp = Compiler { compilerId = CompilerId GHCJS ghcjsVersion, compilerAbiTag = AbiTag $ "ghc" ++ intercalate "_" (map show . versionBranch $ ghcjsGhcVersion), compilerCompat = [CompilerId GHC ghcjsGhcVersion], compilerLanguages = languages, compilerExtensions = extensions, compilerProperties = ghcInfoMap } compPlatform = Internal.targetPlatform ghcInfo -- configure gcc and ld let conf4 = if ghcjsNativeToo comp then Internal.configureToolchain implInfo ghcjsProg ghcInfoMap conf3 else conf3 return (comp, compPlatform, conf4) ghcjsNativeToo :: Compiler -> Bool ghcjsNativeToo = Internal.ghcLookupProperty "Native Too" guessGhcjsPkgFromGhcjsPath :: ConfiguredProgram -> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath) guessGhcjsPkgFromGhcjsPath = guessToolFromGhcjsPath ghcjsPkgProgram guessHsc2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath) guessHsc2hsFromGhcjsPath = guessToolFromGhcjsPath hsc2hsProgram guessC2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath) guessC2hsFromGhcjsPath = guessToolFromGhcjsPath c2hsProgram guessHaddockFromGhcjsPath :: ConfiguredProgram -> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath) guessHaddockFromGhcjsPath = guessToolFromGhcjsPath haddockProgram guessToolFromGhcjsPath :: Program -> ConfiguredProgram -> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath) guessToolFromGhcjsPath tool ghcjsProg verbosity searchpath = do let toolname = programName tool path = programPath ghcjsProg dir = takeDirectory path versionSuffix = takeVersionSuffix (dropExeExtension path) guessNormal = dir </> toolname <.> exeExtension guessGhcjsVersioned = dir </> (toolname ++ "-ghcjs" ++ versionSuffix) <.> exeExtension guessGhcjs = dir </> (toolname ++ "-ghcjs") <.> exeExtension guessVersioned = dir </> (toolname ++ versionSuffix) <.> exeExtension guesses | null versionSuffix = [guessGhcjs, guessNormal] | otherwise = [guessGhcjsVersioned, guessGhcjs, guessVersioned, guessNormal] info verbosity $ "looking for tool " ++ toolname ++ " near compiler in " ++ dir exists <- mapM doesFileExist guesses case [ file | (file, True) <- zip guesses exists ] of -- If we can't find it near ghc, fall back to the usual -- method. [] -> programFindLocation tool verbosity searchpath (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp return (Just fp) where takeVersionSuffix :: FilePath -> String takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . reverse dropExeExtension :: FilePath -> FilePath dropExeExtension filepath = case splitExtension filepath of (filepath', extension) | extension == exeExtension -> filepath' | otherwise -> filepath -- | Given a single package DB, return all installed packages. getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration -> IO InstalledPackageIndex getPackageDBContents verbosity packagedb conf = do pkgss <- getInstalledPackages' verbosity [packagedb] conf toPackageIndex verbosity pkgss conf -- | Given a package DB stack, return all installed packages. getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackages verbosity packagedbs conf = do checkPackageDbEnvVar checkPackageDbStack packagedbs pkgss <- getInstalledPackages' verbosity packagedbs conf index <- toPackageIndex verbosity pkgss conf return $! index toPackageIndex :: Verbosity -> [(PackageDB, [InstalledPackageInfo])] -> ProgramConfiguration -> IO InstalledPackageIndex toPackageIndex verbosity pkgss conf = do -- On Windows, various fields have $topdir/foo rather than full -- paths. We need to substitute the right value in so that when -- we, for example, call gcc, we have proper paths to give it. topDir <- getLibDir' verbosity ghcjsProg let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) | (_, pkgs) <- pkgss ] return $! (mconcat indices) where Just ghcjsProg = lookupProgram ghcjsProgram conf checkPackageDbEnvVar :: IO () checkPackageDbEnvVar = Internal.checkPackageDbEnvVar "GHCJS" "GHCJS_PACKAGE_PATH" checkPackageDbStack :: PackageDBStack -> IO () checkPackageDbStack (GlobalPackageDB:rest) | GlobalPackageDB `notElem` rest = return () checkPackageDbStack rest | GlobalPackageDB `notElem` rest = die $ "With current ghc versions the global package db is always used " ++ "and must be listed first. This ghc limitation may be lifted in " ++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977" checkPackageDbStack _ = die $ "If the global package db is specified, it must be " ++ "specified first and cannot be specified multiple times" getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration -> IO [(PackageDB, [InstalledPackageInfo])] getInstalledPackages' verbosity packagedbs conf = sequence [ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb return (packagedb, pkgs) | packagedb <- packagedbs ] getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath getLibDir verbosity lbi = (reverse . dropWhile isSpace . reverse) `fmap` rawSystemProgramStdoutConf verbosity ghcjsProgram (withPrograms lbi) ["--print-libdir"] getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath getLibDir' verbosity ghcjsProg = (reverse . dropWhile isSpace . reverse) `fmap` rawSystemProgramStdout verbosity ghcjsProg ["--print-libdir"] -- | Return the 'FilePath' to the global GHC package database. getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath getGlobalPackageDB verbosity ghcjsProg = (reverse . dropWhile isSpace . reverse) `fmap` rawSystemProgramStdout verbosity ghcjsProg ["--print-global-package-db"] toJSLibName :: String -> String toJSLibName lib | takeExtension lib `elem` [".dll",".dylib",".so"] = replaceExtension lib "js_so" | takeExtension lib == ".a" = replaceExtension lib "js_a" | otherwise = lib <.> "js_a" buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo -> IO () buildLib = buildOrReplLib False replLib = buildOrReplLib True buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo -> IO () buildOrReplLib forRepl verbosity numJobs _pkg_descr lbi lib clbi = do let libName@(ComponentId cname) = componentId clbi libTargetDir = buildDir lbi whenVanillaLib forceVanilla = when (not forRepl && (forceVanilla || withVanillaLib lbi)) whenProfLib = when (not forRepl && withProfLib lbi) whenSharedLib forceShared = when (not forRepl && (forceShared || withSharedLib lbi)) whenGHCiLib = when (not forRepl && withGHCiLib lbi && withVanillaLib lbi) ifReplLib = when forRepl comp = compiler lbi implInfo = getImplInfo comp hole_insts = map (\(k,(p,n)) -> (k,(InstalledPackageInfo.installedComponentId p,n))) (instantiatedWith lbi) nativeToo = ghcjsNativeToo comp (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) let runGhcjsProg = runGHC verbosity ghcjsProg comp libBi = libBuildInfo lib isGhcjsDynamic = isDynamic comp dynamicTooSupported = supportsDynamicToo comp doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi forceVanillaLib = doingTH && not isGhcjsDynamic forceSharedLib = doingTH && isGhcjsDynamic -- TH always needs default libs, even when building for profiling -- Determine if program coverage should be enabled and if so, what -- '-hpcdir' should be. let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi distPref = fromFlag $ configDistPref $ configFlags lbi hpcdir way | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname | otherwise = Mon.mempty createDirectoryIfMissingVerbose verbosity True libTargetDir -- TODO: do we need to put hs-boot files into place for mutually recursive -- modules? let cObjs = map (`replaceExtension` objExtension) (cSources libBi) jsSrcs = jsSources libBi baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir linkJsLibOpts = mempty { ghcOptExtra = toNubListR $ [ "-link-js-lib" , getHSLibraryName libName , "-js-lib-outputdir", libTargetDir ] ++ concatMap (\x -> ["-js-lib-src",x]) jsSrcs } vanillaOptsNoJsLib = baseOpts `mappend` mempty { ghcOptMode = toFlag GhcModeMake, ghcOptNumJobs = numJobs, ghcOptSigOf = hole_insts, ghcOptInputModules = toNubListR $ libModules lib, ghcOptHPCDir = hpcdir Hpc.Vanilla } vanillaOpts = vanillaOptsNoJsLib `mappend` linkJsLibOpts profOpts = adjustExts "p_hi" "p_o" vanillaOpts `mappend` mempty { ghcOptProfilingMode = toFlag True, ghcOptExtra = toNubListR $ ghcjsProfOptions libBi, ghcOptHPCDir = hpcdir Hpc.Prof } sharedOpts = adjustExts "dyn_hi" "dyn_o" vanillaOpts `mappend` mempty { ghcOptDynLinkMode = toFlag GhcDynamicOnly, ghcOptFPic = toFlag True, ghcOptExtra = toNubListR $ ghcjsSharedOptions libBi, ghcOptHPCDir = hpcdir Hpc.Dyn } linkerOpts = mempty { ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi, ghcOptLinkLibs = toNubListR $ extraLibs libBi, ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, ghcOptInputFiles = toNubListR $ [libTargetDir </> x | x <- cObjs] ++ jsSrcs } replOpts = vanillaOptsNoJsLib { ghcOptExtra = overNubListR Internal.filterGhciFlags (ghcOptExtra vanillaOpts), ghcOptNumJobs = mempty } `mappend` linkerOpts `mappend` mempty { ghcOptMode = toFlag GhcModeInteractive, ghcOptOptimisation = toFlag GhcNoOptimisation } vanillaSharedOpts = vanillaOpts `mappend` mempty { ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, ghcOptDynHiSuffix = toFlag "dyn_hi", ghcOptDynObjSuffix = toFlag "dyn_o", ghcOptHPCDir = hpcdir Hpc.Dyn } unless (forRepl || (null (libModules lib) && null jsSrcs && null cObjs)) $ do let vanilla = whenVanillaLib forceVanillaLib (runGhcjsProg vanillaOpts) shared = whenSharedLib forceSharedLib (runGhcjsProg sharedOpts) useDynToo = dynamicTooSupported && (forceVanillaLib || withVanillaLib lbi) && (forceSharedLib || withSharedLib lbi) && null (ghcjsSharedOptions libBi) if useDynToo then do runGhcjsProg vanillaSharedOpts case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do -- When the vanilla and shared library builds are done -- in one pass, only one set of HPC module interfaces -- are generated. This set should suffice for both -- static and dynamically linked executables. We copy -- the modules interfaces so they are available under -- both ways. copyDirectoryRecursive verbosity dynDir vanillaDir _ -> return () else if isGhcjsDynamic then do shared; vanilla else do vanilla; shared whenProfLib (runGhcjsProg profOpts) -- build any C sources unless (null (cSources libBi) || not nativeToo) $ do info verbosity "Building C Sources..." sequence_ [ do let vanillaCcOpts = (Internal.componentCcGhcOptions verbosity implInfo lbi libBi clbi libTargetDir filename) profCcOpts = vanillaCcOpts `mappend` mempty { ghcOptProfilingMode = toFlag True, ghcOptObjSuffix = toFlag "p_o" } sharedCcOpts = vanillaCcOpts `mappend` mempty { ghcOptFPic = toFlag True, ghcOptDynLinkMode = toFlag GhcDynamicOnly, ghcOptObjSuffix = toFlag "dyn_o" } odir = fromFlag (ghcOptObjDir vanillaCcOpts) createDirectoryIfMissingVerbose verbosity True odir runGhcjsProg vanillaCcOpts whenSharedLib forceSharedLib (runGhcjsProg sharedCcOpts) whenProfLib (runGhcjsProg profCcOpts) | filename <- cSources libBi] -- TODO: problem here is we need the .c files built first, so we can load them -- with ghci, but .c files can depend on .h files generated by ghc by ffi -- exports. unless (null (libModules lib)) $ ifReplLib (runGhcjsProg replOpts) -- link: when (nativeToo && not forRepl) $ do info verbosity "Linking..." let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) (cSources libBi) cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) (cSources libBi) cid = compilerId (compiler lbi) vanillaLibFilePath = libTargetDir </> mkLibName libName profileLibFilePath = libTargetDir </> mkProfLibName libName sharedLibFilePath = libTargetDir </> mkSharedLibName cid libName ghciLibFilePath = libTargetDir </> Internal.mkGHCiLibName libName hObjs <- Internal.getHaskellObjects implInfo lib lbi libTargetDir objExtension True hProfObjs <- if (withProfLib lbi) then Internal.getHaskellObjects implInfo lib lbi libTargetDir ("p_" ++ objExtension) True else return [] hSharedObjs <- if (withSharedLib lbi) then Internal.getHaskellObjects implInfo lib lbi libTargetDir ("dyn_" ++ objExtension) False else return [] unless (null hObjs && null cObjs) $ do let staticObjectFiles = hObjs ++ map (libTargetDir </>) cObjs profObjectFiles = hProfObjs ++ map (libTargetDir </>) cProfObjs ghciObjFiles = hObjs ++ map (libTargetDir </>) cObjs dynamicObjectFiles = hSharedObjs ++ map (libTargetDir </>) cSharedObjs -- After the relocation lib is created we invoke ghc -shared -- with the dependencies spelled out as -package arguments -- and ghc invokes the linker with the proper library paths ghcSharedLinkArgs = mempty { ghcOptShared = toFlag True, ghcOptDynLinkMode = toFlag GhcDynamicOnly, ghcOptInputFiles = toNubListR dynamicObjectFiles, ghcOptOutputFile = toFlag sharedLibFilePath, ghcOptNoAutoLinkPackages = toFlag True, ghcOptPackageDBs = withPackageDB lbi, ghcOptPackages = toNubListR $ Internal.mkGhcOptPackages clbi, ghcOptLinkLibs = toNubListR $ extraLibs libBi, ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi } whenVanillaLib False $ do Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles whenProfLib $ do Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles whenGHCiLib $ do (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) Ld.combineObjectFiles verbosity ldProg ghciLibFilePath ghciObjFiles whenSharedLib False $ runGhcjsProg ghcSharedLinkArgs -- | Start a REPL without loading any source files. startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler -> PackageDBStack -> IO () startInterpreter verbosity conf comp packageDBs = do let replOpts = mempty { ghcOptMode = toFlag GhcModeInteractive, ghcOptPackageDBs = packageDBs } checkPackageDbStack packageDBs (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram conf runGHC verbosity ghcjsProg comp replOpts buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo -> IO () buildExe = buildOrReplExe False replExe = buildOrReplExe True buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo -> IO () buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi exe@Executable { exeName = exeName', modulePath = modPath } clbi = do (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) let comp = compiler lbi implInfo = getImplInfo comp runGhcjsProg = runGHC verbosity ghcjsProg comp exeBi = buildInfo exe -- exeNameReal, the name that GHC really uses (with .exe on Windows) let exeNameReal = exeName' <.> (if takeExtension exeName' /= ('.':exeExtension) then exeExtension else "") let targetDir = (buildDir lbi) </> exeName' let exeDir = targetDir </> (exeName' ++ "-tmp") createDirectoryIfMissingVerbose verbosity True targetDir createDirectoryIfMissingVerbose verbosity True exeDir -- TODO: do we need to put hs-boot files into place for mutually recursive -- modules? FIX: what about exeName.hi-boot? -- Determine if program coverage should be enabled and if so, what -- '-hpcdir' should be. let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi distPref = fromFlag $ configDistPref $ configFlags lbi hpcdir way | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName' | otherwise = mempty -- build executables srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath let isGhcjsDynamic = isDynamic comp dynamicTooSupported = supportsDynamicToo comp buildRunner = case clbi of ExeComponentLocalBuildInfo {} -> False _ -> True isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] jsSrcs = jsSources exeBi cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] cObjs = map (`replaceExtension` objExtension) cSrcs nativeToo = ghcjsNativeToo comp baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) `mappend` mempty { ghcOptMode = toFlag GhcModeMake, ghcOptInputFiles = toNubListR $ [ srcMainFile | isHaskellMain], ghcOptInputModules = toNubListR $ [ m | not isHaskellMain, m <- exeModules exe], ghcOptExtra = if buildRunner then toNubListR ["-build-runner"] else mempty } staticOpts = baseOpts `mappend` mempty { ghcOptDynLinkMode = toFlag GhcStaticOnly, ghcOptHPCDir = hpcdir Hpc.Vanilla } profOpts = adjustExts "p_hi" "p_o" baseOpts `mappend` mempty { ghcOptProfilingMode = toFlag True, ghcOptExtra = toNubListR $ ghcjsProfOptions exeBi, ghcOptHPCDir = hpcdir Hpc.Prof } dynOpts = adjustExts "dyn_hi" "dyn_o" baseOpts `mappend` mempty { ghcOptDynLinkMode = toFlag GhcDynamicOnly, ghcOptExtra = toNubListR $ ghcjsSharedOptions exeBi, ghcOptHPCDir = hpcdir Hpc.Dyn } dynTooOpts = adjustExts "dyn_hi" "dyn_o" staticOpts `mappend` mempty { ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, ghcOptHPCDir = hpcdir Hpc.Dyn } linkerOpts = mempty { ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi, ghcOptLinkLibs = toNubListR $ extraLibs exeBi, ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi, ghcOptInputFiles = toNubListR $ [exeDir </> x | x <- cObjs] ++ jsSrcs } replOpts = baseOpts { ghcOptExtra = overNubListR Internal.filterGhciFlags (ghcOptExtra baseOpts) } -- For a normal compile we do separate invocations of ghc for -- compiling as for linking. But for repl we have to do just -- the one invocation, so that one has to include all the -- linker stuff too, like -l flags and any .o files from C -- files etc. `mappend` linkerOpts `mappend` mempty { ghcOptMode = toFlag GhcModeInteractive, ghcOptOptimisation = toFlag GhcNoOptimisation } commonOpts | withProfExe lbi = profOpts | withDynExe lbi = dynOpts | otherwise = staticOpts compileOpts | useDynToo = dynTooOpts | otherwise = commonOpts withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) -- For building exe's that use TH with -prof or -dynamic we actually have -- to build twice, once without -prof/-dynamic and then again with -- -prof/-dynamic. This is because the code that TH needs to run at -- compile time needs to be the vanilla ABI so it can be loaded up and run -- by the compiler. -- With dynamic-by-default GHC the TH object files loaded at compile-time -- need to be .dyn_o instead of .o. doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi -- Should we use -dynamic-too instead of compiling twice? useDynToo = dynamicTooSupported && isGhcjsDynamic && doingTH && withStaticExe && null (ghcjsSharedOptions exeBi) compileTHOpts | isGhcjsDynamic = dynOpts | otherwise = staticOpts compileForTH | forRepl = False | useDynToo = False | isGhcjsDynamic = doingTH && (withProfExe lbi || withStaticExe) | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) linkOpts = commonOpts `mappend` linkerOpts `mappend` mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } -- Build static/dynamic object files for TH, if needed. when compileForTH $ runGhcjsProg compileTHOpts { ghcOptNoLink = toFlag True , ghcOptNumJobs = numJobs } unless forRepl $ runGhcjsProg compileOpts { ghcOptNoLink = toFlag True , ghcOptNumJobs = numJobs } -- build any C sources unless (null cSrcs || not nativeToo) $ do info verbosity "Building C Sources..." sequence_ [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi clbi exeDir filename) `mappend` mempty { ghcOptDynLinkMode = toFlag (if withDynExe lbi then GhcDynamicOnly else GhcStaticOnly), ghcOptProfilingMode = toFlag (withProfExe lbi) } odir = fromFlag (ghcOptObjDir opts) createDirectoryIfMissingVerbose verbosity True odir runGhcjsProg opts | filename <- cSrcs ] -- TODO: problem here is we need the .c files built first, so we can load them -- with ghci, but .c files can depend on .h files generated by ghc by ffi -- exports. when forRepl $ runGhcjsProg replOpts -- link: unless forRepl $ do info verbosity "Linking..." runGhcjsProg linkOpts { ghcOptOutputFile = toFlag (targetDir </> exeNameReal) } -- |Install for ghc, .hi, .a and, if --with-ghci given, .o installLib :: Verbosity -> LocalBuildInfo -> FilePath -- ^install location -> FilePath -- ^install location for dynamic libraries -> FilePath -- ^Build location -> PackageDescription -> Library -> ComponentLocalBuildInfo -> IO () installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do whenVanilla $ copyModuleFiles "js_hi" whenProf $ copyModuleFiles "js_p_hi" whenShared $ copyModuleFiles "js_dyn_hi" whenVanilla $ installOrdinary builtDir targetDir $ toJSLibName vanillaLibName whenProf $ installOrdinary builtDir targetDir $ toJSLibName profileLibName whenShared $ installShared builtDir dynlibTargetDir $ toJSLibName sharedLibName when (ghcjsNativeToo $ compiler lbi) $ do -- copy .hi files over: whenVanilla $ copyModuleFiles "hi" whenProf $ copyModuleFiles "p_hi" whenShared $ copyModuleFiles "dyn_hi" -- copy the built library files over: whenVanilla $ installOrdinary builtDir targetDir vanillaLibName whenProf $ installOrdinary builtDir targetDir profileLibName whenGHCi $ installOrdinary builtDir targetDir ghciLibName whenShared $ installShared builtDir dynlibTargetDir sharedLibName where install isShared srcDir dstDir name = do let src = srcDir </> name dst = dstDir </> name createDirectoryIfMissingVerbose verbosity True dstDir if isShared then installExecutableFile verbosity src dst else installOrdinaryFile verbosity src dst when (stripLibs lbi) $ Strip.stripLib verbosity (hostPlatform lbi) (withPrograms lbi) dst installOrdinary = install False installShared = install True copyModuleFiles ext = findModuleFiles [builtDir] [ext] (libModules lib) >>= installOrdinaryFiles verbosity targetDir cid = compilerId (compiler lbi) libName = componentId clbi vanillaLibName = mkLibName libName profileLibName = mkProfLibName libName ghciLibName = Internal.mkGHCiLibName libName sharedLibName = (mkSharedLibName cid) libName hasLib = not $ null (libModules lib) && null (cSources (libBuildInfo lib)) whenVanilla = when (hasLib && withVanillaLib lbi) whenProf = when (hasLib && withProfLib lbi) whenGHCi = when (hasLib && withGHCiLib lbi) whenShared = when (hasLib && withSharedLib lbi) installExe :: Verbosity -> LocalBuildInfo -> InstallDirs FilePath -- ^Where to copy the files to -> FilePath -- ^Build location -> (FilePath, FilePath) -- ^Executable (prefix,suffix) -> PackageDescription -> Executable -> IO () installExe verbosity lbi installDirs buildPref (progprefix, progsuffix) _pkg exe = do let binDir = bindir installDirs createDirectoryIfMissingVerbose verbosity True binDir let exeFileName = exeName exe fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix installBinary dest = do rawSystemProgramConf verbosity ghcjsProgram (withPrograms lbi) $ [ "--install-executable" , buildPref </> exeName exe </> exeFileName , "-o", dest ] ++ case (stripExes lbi, lookupProgram stripProgram $ withPrograms lbi) of (True, Just strip) -> ["-strip-program", programPath strip] _ -> [] installBinary (binDir </> fixedExeBaseName) libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo -> IO String libAbiHash verbosity _pkg_descr lbi lib clbi = do let libBi = libBuildInfo lib comp = compiler lbi vanillaArgs = (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) `mappend` mempty { ghcOptMode = toFlag GhcModeAbiHash, ghcOptInputModules = toNubListR $ exposedModules lib } profArgs = adjustExts "js_p_hi" "js_p_o" vanillaArgs `mappend` mempty { ghcOptProfilingMode = toFlag True, ghcOptExtra = toNubListR (ghcjsProfOptions libBi) } ghcArgs = if withVanillaLib lbi then vanillaArgs else if withProfLib lbi then profArgs else error "libAbiHash: Can't find an enabled library way" -- (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) getProgramInvocationOutput verbosity (ghcInvocation ghcjsProg comp ghcArgs) adjustExts :: String -> String -> GhcOptions -> GhcOptions adjustExts hiSuf objSuf opts = opts `mappend` mempty { ghcOptHiSuffix = toFlag hiSuf, ghcOptObjSuffix = toFlag objSuf } registerPackage :: Verbosity -> InstalledPackageInfo -> PackageDescription -> LocalBuildInfo -> Bool -> PackageDBStack -> IO () registerPackage verbosity installedPkgInfo _pkg lbi _inplace packageDbs = HcPkg.reregister (hcPkgInfo $ withPrograms lbi) verbosity packageDbs (Right installedPkgInfo) componentGhcOptions :: Verbosity -> LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo -> FilePath -> GhcOptions componentGhcOptions verbosity lbi bi clbi odir = let opts = Internal.componentGhcOptions verbosity lbi bi clbi odir in opts { ghcOptExtra = ghcOptExtra opts `mappend` toNubListR (hcOptions GHCJS bi) } ghcjsProfOptions :: BuildInfo -> [String] ghcjsProfOptions bi = hcProfOptions GHC bi `mappend` hcProfOptions GHCJS bi ghcjsSharedOptions :: BuildInfo -> [String] ghcjsSharedOptions bi = hcSharedOptions GHC bi `mappend` hcSharedOptions GHCJS bi isDynamic :: Compiler -> Bool isDynamic = Internal.ghcLookupProperty "GHC Dynamic" supportsDynamicToo :: Compiler -> Bool supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" findGhcjsGhcVersion :: Verbosity -> FilePath -> IO (Maybe Version) findGhcjsGhcVersion verbosity pgm = findProgramVersion "--numeric-ghc-version" id verbosity pgm findGhcjsPkgGhcjsVersion :: Verbosity -> FilePath -> IO (Maybe Version) findGhcjsPkgGhcjsVersion verbosity pgm = findProgramVersion "--numeric-ghcjs-version" id verbosity pgm -- ----------------------------------------------------------------------------- -- Registering hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcjsPkgProg , HcPkg.noPkgDbStack = False , HcPkg.noVerboseFlag = False , HcPkg.flagPackageConf = False , HcPkg.useSingleFileDb = v < [7,9] } where v = versionBranch ver Just ghcjsPkgProg = lookupProgram ghcjsPkgProgram conf Just ver = programVersion ghcjsPkgProg -- | Get the JavaScript file name and command and arguments to run a -- program compiled by GHCJS -- the exe should be the base program name without exe extension runCmd :: ProgramConfiguration -> FilePath -> (FilePath, FilePath, [String]) runCmd conf exe = ( script , programPath ghcjsProg , programDefaultArgs ghcjsProg ++ programOverrideArgs ghcjsProg ++ ["--run"] ) where script = exe <.> "jsexe" </> "all" <.> "js" Just ghcjsProg = lookupProgram ghcjsProgram conf
trskop/cabal
Cabal/Distribution/Simple/GHCJS.hs
bsd-3-clause
40,695
0
23
13,058
8,035
4,195
3,840
700
6
{-# LANGUAGE PatternGuards #-} module Distribution.Client where import Network.HTTP import Network.Browser import Network.URI (URI(..), URIAuth(..), parseURI) import Distribution.Server.LegacyImport.UploadLog as UploadLog (read, Entry(..)) import Distribution.Server.Users.Types (UserId(..), UserName(UserName)) import Distribution.Server.Util.Index as PackageIndex (read) import Distribution.Server.Util.Merge import Distribution.Package import Distribution.Verbosity import Distribution.Simple.Utils import Distribution.Text import Data.List import Data.Maybe import Control.Applicative import Control.Exception import Data.Time import Data.Time.Clock.POSIX import Data.ByteString.Lazy.Char8 (ByteString) import qualified Data.ByteString.Lazy.Char8 as BS import qualified Codec.Compression.GZip as GZip import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import System.IO import System.IO.Error import System.FilePath import System.Directory import qualified System.FilePath.Posix as Posix import Paths_hackage_server (version) ------------------------- -- Command line handling ------------------------- validateHackageURI :: String -> Either String URI validateHackageURI str = case parseURI str of Nothing -> Left ("invalid URL " ++ str) Just uri | uriScheme uri /= "http:" -> Left ("only http URLs are supported " ++ str) | isNothing (uriAuthority uri) -> Left ("server name required in URL " ++ str) | otherwise -> Right uri validatePackageIds :: [String] -> Either String [PackageId] validatePackageIds pkgstrs = case errs of (err:_) -> Left $ "'" ++ err ++ "' is not a valid package name or id" _ -> Right pkgs where pkgstrs' = [ (pkgstr, simpleParse pkgstr) | pkgstr <- pkgstrs ] pkgs = [ pkgid | (_, Just pkgid) <- pkgstrs' ] errs = [ pkgstr | (pkgstr, Nothing) <- pkgstrs' ] ---------------------------------------------------- -- Fetching info from source and destination servers ---------------------------------------------------- data PkgIndexInfo = PkgIndexInfo PackageId (Maybe UTCTime) -- Upload time (Maybe UserName) -- Name of uploader (Maybe UserId) -- Id of uploader deriving Show downloadIndex :: URI -> FilePath -> HttpSession [PkgIndexInfo] downloadIndex uri | isOldHackageURI uri = downloadOldIndex uri | otherwise = downloadNewIndex uri where isOldHackageURI :: URI -> Bool isOldHackageURI uri | Just auth <- uriAuthority uri = uriRegName auth == "hackage.haskell.org" | otherwise = False downloadOldIndex :: URI -> FilePath -> HttpSession [PkgIndexInfo] downloadOldIndex uri cacheDir = do downloadFile indexURI indexFile downloadFile logURI logFile ioAction $ do pkgids <- withFile indexFile ReadMode $ \hnd -> do content <- BS.hGetContents hnd case PackageIndex.read (\pkgid _ -> pkgid) (GZip.decompress content) of Left err -> die $ "Error parsing index at " ++ show uri ++ ": " ++ err Right pkgs -> return pkgs log <- withFile logFile ReadMode $ \hnd -> do content <- hGetContents hnd case UploadLog.read content of Right log -> return log Left err -> die $ "Error parsing log at " ++ show uri ++ ": " ++ err return (mergeLogInfo pkgids log) where indexURI = uri <//> "packages" </> "archive" </> "00-index.tar.gz" indexFile = cacheDir </> "00-index.tar.gz" logURI = uri <//> "packages" </> "archive" </> "log" logFile = cacheDir </> "log" mergeLogInfo pkgids log = catMaybes . map selectDetails $ mergeBy (\pkgid entry -> compare pkgid (entryPkgId entry)) (sort pkgids) ( map (maximumBy (comparing entryTime)) . groupBy (equating entryPkgId) . sortBy (comparing entryPkgId) $ log ) selectDetails (OnlyInRight _) = Nothing selectDetails (OnlyInLeft pkgid) = Just $ PkgIndexInfo pkgid Nothing Nothing Nothing selectDetails (InBoth pkgid (UploadLog.Entry time uname _)) = Just $ PkgIndexInfo pkgid (Just time) (Just uname) Nothing entryPkgId (Entry _ _ pkgid) = pkgid entryTime (Entry time _ _) = time downloadNewIndex :: URI -> FilePath -> HttpSession [PkgIndexInfo] downloadNewIndex uri cacheDir = do downloadFile indexURI indexFile ioAction $ withFile indexFile ReadMode $ \hnd -> do content <- BS.hGetContents hnd case PackageIndex.read selectDetails (GZip.decompress content) of Left err -> error $ "Error parsing index at " ++ show uri ++ ": " ++ err Right pkgs -> return pkgs where indexURI = uri <//> "packages/00-index.tar.gz" indexFile = cacheDir </> "00-index.tar.gz" selectDetails :: PackageId -> Tar.Entry -> PkgIndexInfo selectDetails pkgid entry = PkgIndexInfo pkgid (Just time) (if null username then Nothing else Just (UserName username)) (if userid == 0 then Nothing else Just (UserId userid)) where time = epochTimeToUTC (Tar.entryTime entry) username = Tar.ownerName (Tar.entryOwnership entry) userid = Tar.ownerId (Tar.entryOwnership entry) epochTimeToUTC :: Tar.EpochTime -> UTCTime epochTimeToUTC = posixSecondsToUTCTime . realToFrac ------------------------- -- HTTP utilities ------------------------- infixr 5 <//> (<//>) :: URI -> FilePath -> URI uri <//> path = uri { uriPath = Posix.addTrailingPathSeparator (uriPath uri) Posix.</> path } extractURICredentials :: URI -> Maybe (String, String) extractURICredentials uri | Just authority <- uriAuthority uri , (username, ':':passwd0) <- break (==':') (uriUserInfo authority) , let passwd = takeWhile (/='@') passwd0 , not (null username) , not (null passwd) = Just (username, passwd) extractURICredentials _ = Nothing removeURICredentials :: URI -> URI removeURICredentials uri = uri { uriAuthority = fmap (\auth -> auth { uriUserInfo = "" }) (uriAuthority uri) } provideAuthInfo :: URI -> Maybe (String, String) -> URI -> String -> IO (Maybe (String, String)) provideAuthInfo for_uri credentials = \uri _realm -> do if uriHostName uri == uriHostName for_uri then return credentials else return Nothing uriHostName :: URI -> Maybe String uriHostName = fmap uriRegName . uriAuthority type HttpSession a = BrowserAction (HandleStream ByteString) a httpSession :: Verbosity -> HttpSession a -> IO a httpSession verbosity action = browse $ do setUserAgent ("hackage-mirror/" ++ display version) setErrHandler die setOutHandler (debug verbosity) setAllowBasicAuth True setCheckForProxy True action downloadFile :: URI -> FilePath -> HttpSession () downloadFile uri file = do out $ "downloading " ++ show uri ++ " to " ++ file let etagFile = file <.> "etag" metag <- ioAction $ catchJustDoesNotExistError (Just <$> readFile etagFile) (\_ -> return Nothing) case metag of Just etag -> do let headers = [mkHeader HdrIfNoneMatch (quote etag)] (_, rsp) <- request (Request uri GET headers BS.empty) case rspCode rsp of (3,0,4) -> out $ file ++ " unchanged with ETag " ++ etag (2,0,0) -> ioAction $ writeDowloadedFileAndEtag rsp _ -> err (showFailure uri rsp) Nothing -> do (_, rsp) <- request (Request uri GET [] BS.empty) case rspCode rsp of (2,0,0) -> ioAction $ writeDowloadedFileAndEtag rsp _ -> err (showFailure uri rsp) where writeDowloadedFileAndEtag rsp = do BS.writeFile file (rspBody rsp) setETag file (unquote <$> findHeader HdrETag rsp) getETag :: FilePath -> IO (Maybe String) getETag file = catchJustDoesNotExistError (Just <$> readFile (file </> ".etag")) (\_ -> return Nothing) setETag :: FilePath -> Maybe String -> IO () setETag file Nothing = catchJustDoesNotExistError (removeFile (file <.> "etag")) (\_ -> return ()) setETag file (Just etag) = writeFile (file <.> "etag") etag catchJustDoesNotExistError :: IO a -> (IOError -> IO a) -> IO a catchJustDoesNotExistError = catchJust (\e -> if isDoesNotExistError e then Just e else Nothing) quote :: String -> String quote s = '"' : s ++ ['"'] unquote :: String -> String unquote ('"':s) = go s where go [] = [] go ('"':[]) = [] go (c:cs) = c : go cs unquote s = s -- AAARG! total lack of exception handling in HTTP monad! downloadFile' :: URI -> FilePath -> HttpSession Bool downloadFile' uri file = do out $ "downloading " ++ show uri ++ " to " ++ file mcontent <- requestGET' uri case mcontent of Nothing -> do out $ "404 " ++ show uri return False Just content -> do ioAction $ BS.writeFile file content return True requestGET :: URI -> HttpSession ByteString requestGET uri = do (_, rsp) <- request (Request uri GET headers BS.empty) checkStatus uri rsp return (rspBody rsp) where headers = [] -- Really annoying! requestGET' :: URI -> HttpSession (Maybe ByteString) requestGET' uri = do (_, rsp) <- request (Request uri GET headers BS.empty) case rspCode rsp of (4,0,4) -> return Nothing _ -> do checkStatus uri rsp return (Just (rspBody rsp)) where headers = [] requestPUTFile :: URI -> String -> FilePath -> HttpSession () requestPUTFile uri mime_type file = do content <- ioAction $ BS.readFile file requestPUT uri mime_type content requestPOST, requestPUT :: URI -> String -> ByteString -> HttpSession () requestPOST = requestPOSTPUT POST requestPUT = requestPOSTPUT PUT requestPOSTPUT :: RequestMethod -> URI -> String -> ByteString -> HttpSession () requestPOSTPUT meth uri mimetype body = do (_, rsp) <- request (Request uri meth headers body) checkStatus uri rsp where headers = [ Header HdrContentLength (show (BS.length body)) , Header HdrContentType mimetype ] checkStatus :: URI -> Response ByteString -> HttpSession () checkStatus uri rsp = case rspCode rsp of (2,0,0) -> return () (4,0,0) -> ioAction (warn normal (showFailure uri rsp)) >> return () _code -> err (showFailure uri rsp) showFailure :: URI -> Response ByteString -> String showFailure uri rsp = show (rspCode rsp) ++ " " ++ rspReason rsp ++ show uri ++ case lookupHeader HdrContentType (rspHeaders rsp) of Just mimetype | "text/plain" `isPrefixOf` mimetype -> '\n' : BS.unpack (rspBody rsp) _ -> ""
isomorphism/hackage2
Distribution/Client.hs
bsd-3-clause
10,985
0
21
2,812
3,440
1,742
1,698
239
5
{-# LANGUAGE ScopedTypeVariables #-} module Main ( main ) where import Control.Applicative import Control.Exception as C (SomeException, catch, evaluate) import Control.Monad (unless) import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Lazy.Internal as L import Data.Int import Data.Ratio import System.IO.Unsafe import Test.Framework import Test.Framework.Providers.QuickCheck2 import Test.QuickCheck import qualified Action (tests) import Arbitrary () import Data.Binary import Data.Binary.Get import Data.Binary.Put ------------------------------------------------------------------------ roundTrip :: (Eq a, Binary a) => a -> (L.ByteString -> L.ByteString) -> Bool roundTrip a f = a == {-# SCC "decode.refragment.encode" #-} decode (f (encode a)) roundTripWith :: Eq a => (a -> Put) -> Get a -> a -> Property roundTripWith putter getter x = forAll positiveList $ \xs -> x == runGet getter (refragment xs (runPut (putter x))) -- make sure that a test fails mustThrowError :: B a mustThrowError a = unsafePerformIO $ C.catch (do _ <- C.evaluate a return False) (\(_e :: SomeException) -> return True) -- low level ones: prop_Word16be :: Word16 -> Property prop_Word16be = roundTripWith putWord16be getWord16be prop_Word16le :: Word16 -> Property prop_Word16le = roundTripWith putWord16le getWord16le prop_Word16host :: Word16 -> Property prop_Word16host = roundTripWith putWord16host getWord16host prop_Word32be :: Word32 -> Property prop_Word32be = roundTripWith putWord32be getWord32be prop_Word32le :: Word32 -> Property prop_Word32le = roundTripWith putWord32le getWord32le prop_Word32host :: Word32 -> Property prop_Word32host = roundTripWith putWord32host getWord32host prop_Word64be :: Word64 -> Property prop_Word64be = roundTripWith putWord64be getWord64be prop_Word64le :: Word64 -> Property prop_Word64le = roundTripWith putWord64le getWord64le prop_Word64host :: Word64 -> Property prop_Word64host = roundTripWith putWord64host getWord64host prop_Wordhost :: Word -> Property prop_Wordhost = roundTripWith putWordhost getWordhost -- done, partial and fail -- | Test partial results. -- May or may not use the whole input, check conditions for the different -- outcomes. prop_partial :: L.ByteString -> Property prop_partial lbs = forAll (choose (0, L.length lbs * 2)) $ \skipN -> let result = pushChunks (runGetIncremental decoder) lbs decoder = do s <- getByteString (fromIntegral skipN) return (L.fromChunks [s]) in case result of Partial _ -> L.length lbs < skipN Done unused _pos value -> and [ L.length value == skipN , L.append value (L.fromChunks [unused]) == lbs ] Fail _ _ _ -> False -- | Fail a decoder and make sure the result is sane. prop_fail :: L.ByteString -> String -> Property prop_fail lbs msg = forAll (choose (0, L.length lbs)) $ \pos -> let result = pushChunks (runGetIncremental decoder) lbs decoder = do -- use part of the input... _ <- getByteString (fromIntegral pos) -- ... then fail fail msg in case result of Fail unused pos' msg' -> and [ pos == pos' , msg == msg' , L.length lbs - pos == fromIntegral (B.length unused) , L.fromChunks [unused] `L.isSuffixOf` lbs ] _ -> False -- wuut? -- read negative length prop_getByteString_negative :: Int -> Property prop_getByteString_negative n = n < 1 ==> runGet (getByteString n) L.empty == B.empty prop_bytesRead :: L.ByteString -> Property prop_bytesRead lbs = forAll (makeChunks 0 totalLength) $ \chunkSizes -> let result = pushChunks (runGetIncremental decoder) lbs decoder = do -- Read some data and invoke bytesRead several times. -- Each time, check that the values are what we expect. flip mapM_ chunkSizes $ \(total, step) -> do _ <- getByteString (fromIntegral step) n <- bytesRead unless (n == total) $ fail "unexpected position" bytesRead in case result of Done unused pos value -> and [ value == totalLength , pos == value , B.null unused ] Partial _ -> False Fail _ _ _ -> False where totalLength = L.length lbs makeChunks total i | i == 0 = return [] | otherwise = do n <- choose (0,i) let total' = total + n rest <- makeChunks total' (i - n) return ((total',n):rest) -- | We're trying to guarantee that the Decoder will not ask for more input -- with Partial if it has been given Nothing once. -- In this test we're making the decoder return 'Partial' to get more -- input, and to get knownledge of the current position using 'BytesRead'. -- Both of these operations, when used with the <|> operator, result internally -- in that the decoder return with Partial and BytesRead multiple times, -- in which case we need to keep track of if the user has passed Nothing to a -- Partial in the past. prop_partialOnlyOnce :: Property prop_partialOnlyOnce = property $ let result = runGetIncremental (decoder <|> decoder) decoder = do 0 <- bytesRead _ <- getWord8 -- this will make the decoder return with Partial return "shouldn't get here" in case result of -- we expect Partial followed by Fail Partial k -> case k Nothing of -- push down a Nothing Fail _ _ _ -> True Partial _ -> error $ "partial twice! oh noes!" Done _ _ _ -> error $ "we're not supposed to be done." _ -> error $ "not partial, error!" -- read too much prop_readTooMuch :: (Eq a, Binary a) => a -> Bool prop_readTooMuch x = mustThrowError $ x == a && x /= b where -- encode 'a', but try to read 'b' too (a,b) = decode (encode x) _types = [a,b] -- In binary-0.5 the Get monad looked like -- -- > data S = S {-# UNPACK #-} !B.ByteString -- > L.ByteString -- > {-# UNPACK #-} !Int64 -- > -- > newtype Get a = Get { unGet :: S -> (# a, S #) } -- -- with a helper function -- -- > mkState :: L.ByteString -> Int64 -> S -- > mkState l = case l of -- > L.Empty -> S B.empty L.empty -- > L.Chunk x xs -> S x xs -- -- Note that mkState is strict in its first argument. This goes wrong in this -- function: -- -- > getBytes :: Int -> Get B.ByteString -- > getBytes n = do -- > S s ss bytes <- traceNumBytes n $ get -- > if n <= B.length s -- > then do let (consume,rest) = B.splitAt n s -- > put $! S rest ss (bytes + fromIntegral n) -- > return $! consume -- > else -- > case L.splitAt (fromIntegral n) (s `join` ss) of -- > (consuming, rest) -> -- > do let now = B.concat . L.toChunks $ consuming -- > put $ mkState rest (bytes + fromIntegral n) -- > -- forces the next chunk before this one is returned -- > if (B.length now < n) -- > then -- > fail "too few bytes" -- > else -- > return now -- -- Consider the else-branch of this function; suppose we ask for n bytes; -- the call to L.splitAt gives us a lazy bytestring 'consuming' of precisely @n@ -- bytes (unless we don't have enough data, in which case we fail); but then -- the strict evaluation of mkState on 'rest' means we look ahead too far. -- -- Although this is all done completely differently in binary-0.7 it is -- important that the same bug does not get introduced in some other way. The -- test is basically the same test that already exists in this test suite, -- verifying that -- -- > decode . refragment . encode == id -- -- However, we use a different 'refragment', one that introduces an exception -- as the tail of the bytestring after rechunking. If we don't look ahead too -- far then this should make no difference, but if we do then this will throw -- an exception (for instance, in binary-0.5, this will throw an exception for -- certain rechunkings, but not for others). -- -- To make sure that the property holds no matter what refragmentation we use, -- we test exhaustively for a single chunk, and all ways to break the string -- into 2, 3 and 4 chunks. prop_lookAheadIndepOfChunking :: (Eq a, Binary a) => a -> Property prop_lookAheadIndepOfChunking testInput = forAll (testCuts (L.length (encode testInput))) $ roundTrip testInput . rechunk where testCuts :: forall a. (Num a, Enum a) => a -> Gen [a] testCuts len = elements $ [ [] ] ++ [ [i] | i <- [0 .. len] ] ++ [ [i, j] | i <- [0 .. len] , j <- [0 .. len - i] ] ++ [ [i, j, k] | i <- [0 .. len] , j <- [0 .. len - i] , k <- [0 .. len - i - j] ] -- Rechunk a bytestring, leaving the tail as an exception rather than Empty rechunk :: forall a. Integral a => [a] -> L.ByteString -> L.ByteString rechunk cuts = fromChunks . cut cuts . B.concat . L.toChunks where cut :: [a] -> B.ByteString -> [B.ByteString] cut [] bs = [bs] cut (i:is) bs = let (bs0, bs1) = B.splitAt (fromIntegral i) bs in bs0 : cut is bs1 fromChunks :: [B.ByteString] -> L.ByteString fromChunks [] = error "Binary should not have to ask for this chunk!" fromChunks (bs:bss) = L.Chunk bs (fromChunks bss) -- String utilities prop_getLazyByteString :: L.ByteString -> Property prop_getLazyByteString lbs = forAll (choose (0, 2 * L.length lbs)) $ \len -> let result = pushChunks (runGetIncremental decoder) lbs decoder = getLazyByteString len in case result of Done unused _pos value -> and [ value == L.take len lbs , L.fromChunks [unused] == L.drop len lbs ] Partial _ -> len > L.length lbs _ -> False prop_getLazyByteStringNul :: Word16 -> [Int] -> Property prop_getLazyByteStringNul count0 fragments = count >= 0 ==> forAll (choose (0, count)) $ \pos -> let lbs = case L.splitAt pos (L.replicate count 65) of (start,end) -> refragment fragments $ L.concat [start, L.singleton 0, end] result = pushEndOfInput $ pushChunks (runGetIncremental getLazyByteStringNul) lbs in case result of Done unused pos' value -> and [ value == L.take pos lbs , pos + 1 == pos' -- 1 for the NUL , L.fromChunks [unused] == L.drop (pos + 1) lbs ] _ -> False where count = fromIntegral count0 -- to make the generated numbers a bit smaller -- | Same as prop_getLazyByteStringNul, but without any NULL in the string. prop_getLazyByteStringNul_noNul :: Word16 -> [Int] -> Property prop_getLazyByteStringNul_noNul count0 fragments = count >= 0 ==> let lbs = refragment fragments $ L.replicate count 65 result = pushEndOfInput $ pushChunks (runGetIncremental getLazyByteStringNul) lbs in case result of Fail _ _ _ -> True _ -> False where count = fromIntegral count0 -- to make the generated numbers a bit smaller prop_getRemainingLazyByteString :: L.ByteString -> Property prop_getRemainingLazyByteString lbs = property $ let result = pushEndOfInput $ pushChunks (runGetIncremental getRemainingLazyByteString) lbs in case result of Done unused pos value -> and [ value == lbs , B.null unused , fromIntegral pos == L.length lbs ] _ -> False -- sanity: invariant_lbs :: L.ByteString -> Bool invariant_lbs (L.Empty) = True invariant_lbs (L.Chunk x xs) = not (B.null x) && invariant_lbs xs prop_invariant :: (Binary a) => a -> Bool prop_invariant = invariant_lbs . encode -- refragment a lazy bytestring's chunks refragment :: [Int] -> L.ByteString -> L.ByteString refragment [] lbs = lbs refragment (x:xs) lbs = let x' = fromIntegral . (+1) . abs $ x rest = refragment xs (L.drop x' lbs) in L.append (L.fromChunks [B.concat . L.toChunks . L.take x' $ lbs]) rest -- check identity of refragmentation prop_refragment :: L.ByteString -> [Int] -> Bool prop_refragment lbs xs = lbs == refragment xs lbs -- check that refragmention still hold invariant prop_refragment_inv :: L.ByteString -> [Int] -> Bool prop_refragment_inv lbs xs = invariant_lbs $ refragment xs lbs main :: IO () main = defaultMain tests ------------------------------------------------------------------------ type T a = a -> Property type B a = a -> Bool p :: (Testable p) => p -> Property p = property test :: (Eq a, Binary a) => a -> Property test a = forAll positiveList (roundTrip a . refragment) positiveList :: Gen [Int] positiveList = fmap (filter (/=0) . map abs) $ arbitrary tests :: [Test] tests = [ testGroup "Utils" [ testProperty "refragment id" (p prop_refragment) , testProperty "refragment invariant" (p prop_refragment_inv) ] , testGroup "Boundaries" [ testProperty "read to much" (p (prop_readTooMuch :: B Word8)) , testProperty "read negative length" (p (prop_getByteString_negative :: T Int)) , -- Arbitrary test input let testInput :: [Int] ; testInput = [0 .. 10] in testProperty "look-ahead independent of chunking" (p (prop_lookAheadIndepOfChunking testInput)) ] , testGroup "Partial" [ testProperty "partial" (p prop_partial) , testProperty "fail" (p prop_fail) , testProperty "bytesRead" (p prop_bytesRead) , testProperty "partial only once" (p prop_partialOnlyOnce) ] , testGroup "Model" Action.tests , testGroup "Primitives" [ testProperty "Word16be" (p prop_Word16be) , testProperty "Word16le" (p prop_Word16le) , testProperty "Word16host" (p prop_Word16host) , testProperty "Word32be" (p prop_Word32be) , testProperty "Word32le" (p prop_Word32le) , testProperty "Word32host" (p prop_Word32host) , testProperty "Word64be" (p prop_Word64be) , testProperty "Word64le" (p prop_Word64le) , testProperty "Word64host" (p prop_Word64host) , testProperty "Wordhost" (p prop_Wordhost) ] , testGroup "String utils" [ testProperty "getLazyByteString" prop_getLazyByteString , testProperty "getLazyByteStringNul" prop_getLazyByteStringNul , testProperty "getLazyByteStringNul No Null" prop_getLazyByteStringNul_noNul , testProperty "getRemainingLazyByteString" prop_getRemainingLazyByteString ] , testGroup "Using Binary class, refragmented ByteString" $ map (uncurry testProperty) [ ("()", p (test :: T () )) , ("Bool", p (test :: T Bool )) , ("Ordering", p (test :: T Ordering )) , ("Ratio Int", p (test :: T (Ratio Int) )) , ("Word8", p (test :: T Word8 )) , ("Word16", p (test :: T Word16 )) , ("Word32", p (test :: T Word32 )) , ("Word64", p (test :: T Word64 )) , ("Int8", p (test :: T Int8 )) , ("Int16", p (test :: T Int16 )) , ("Int32", p (test :: T Int32 )) , ("Int64", p (test :: T Int64 )) , ("Word", p (test :: T Word )) , ("Int", p (test :: T Int )) , ("Integer", p (test :: T Integer )) , ("Float", p (test :: T Float )) , ("Double", p (test :: T Double )) , ("Char", p (test :: T Char )) , ("[()]", p (test :: T [()] )) , ("[Word8]", p (test :: T [Word8] )) , ("[Word32]", p (test :: T [Word32] )) , ("[Word64]", p (test :: T [Word64] )) , ("[Word]", p (test :: T [Word] )) , ("[Int]", p (test :: T [Int] )) , ("[Integer]", p (test :: T [Integer] )) , ("String", p (test :: T String )) , ("((), ())", p (test :: T ((), ()) )) , ("(Word8, Word32)", p (test :: T (Word8, Word32) )) , ("(Int8, Int32)", p (test :: T (Int8, Int32) )) , ("(Int32, [Int])", p (test :: T (Int32, [Int]) )) , ("Maybe Int8", p (test :: T (Maybe Int8) )) , ("Either Int8 Int16", p (test :: T (Either Int8 Int16) )) , ("(Int, ByteString)", p (test :: T (Int, B.ByteString) )) , ("[(Int, ByteString)]", p (test :: T [(Int, B.ByteString)] )) , ("(Maybe Int64, Bool, [Int])", p (test :: T (Maybe Int64, Bool, [Int]))) , ("(Maybe Word8, Bool, [Int], Either Bool Word8)", p (test :: T (Maybe Word8, Bool, [Int], Either Bool Word8) )) , ("(Maybe Word16, Bool, [Int], Either Bool Word16, Int)", p (test :: T (Maybe Word16, Bool, [Int], Either Bool Word16, Int) )) , ("(Int,Int,Int,Int,Int,Int)", p (test :: T (Int,Int,Int,Int,Int,Int))) , ("(Int,Int,Int,Int,Int,Int,Int)", p (test :: T (Int,Int,Int,Int,Int,Int,Int))) , ("(Int,Int,Int,Int,Int,Int,Int,Int)", p (test :: T (Int,Int,Int,Int,Int,Int,Int,Int))) , ("(Int,Int,Int,Int,Int,Int,Int,Int,Int)", p (test :: T (Int,Int,Int,Int,Int,Int,Int,Int,Int))) , ("(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)", p (test :: T (Int,Int,Int,Int,Int,Int,Int,Int,Int,Int))) , ("B.ByteString", p (test :: T B.ByteString )) , ("L.ByteString", p (test :: T L.ByteString )) ] , testGroup "Invariants" $ map (uncurry testProperty) [ ("B.ByteString invariant", p (prop_invariant :: B B.ByteString )) , ("[B.ByteString] invariant", p (prop_invariant :: B [B.ByteString] )) , ("L.ByteString invariant", p (prop_invariant :: B L.ByteString )) , ("[L.ByteString] invariant", p (prop_invariant :: B [L.ByteString] )) ] ]
ezyang/binary
tests/QC.hs
bsd-3-clause
19,615
0
20
6,519
4,897
2,650
2,247
-1
-1
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox -- Maintainer : [email protected] -- Portability : portable -- -- UI for the sandboxing functionality. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox ( sandboxInit, sandboxDelete, sandboxAddSource, sandboxAddSourceSnapshot, sandboxDeleteSource, sandboxListSources, sandboxHcPkg, dumpPackageEnvironment, withSandboxBinDirOnSearchPath, getSandboxConfigFilePath, loadConfigOrSandboxConfig, findSavedDistPref, initPackageDBIfNeeded, maybeWithSandboxDirOnSearchPath, WereDepsReinstalled(..), reinstallAddSourceDeps, maybeReinstallAddSourceDeps, SandboxPackageInfo(..), maybeWithSandboxPackageInfo, tryGetIndexFilePath, sandboxBuildDir, getInstalledPackagesInSandbox, updateSandboxConfigFileFlag, updateInstallDirs, -- FIXME: move somewhere else configPackageDB', configCompilerAux' ) where import Distribution.Client.Setup ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) , GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags , defaultSandboxLocation, globalRepos ) import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps , maybeAddCompilerTimestampRecord , withAddTimestamps , withRemoveTimestamps ) import Distribution.Client.Config ( SavedConfig(..), defaultUserInstall, loadConfig ) import Distribution.Client.Dependency ( foldProgress ) import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) import Distribution.Client.Install ( InstallArgs, makeInstallContext, makeInstallPlan, processInstallPlan ) import Distribution.Utils.NubList ( fromNubList ) import Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironment(..), PackageEnvironmentType(..) , createPackageEnvironmentFile, classifyPackageEnvironment , tryLoadSandboxPackageEnvironmentFile, loadUserConfig , commentPackageEnvironment, showPackageEnvironmentWithComments , sandboxPackageEnvironmentFile, userPackageEnvironmentFile ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) , UseSandbox(..) ) import Distribution.Client.SetupWrapper ( SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Types ( PackageLocation(..) , SourcePackage(..) ) import Distribution.Client.Utils ( inDir, tryCanonicalizePath , tryFindAddSourcePackageDesc ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) , PackageDBStack ) import Distribution.Simple.Configure ( configCompilerAuxEx , interpretPackageDbFlags , getPackageDBContents , findDistPref ) import Distribution.Simple.PreProcess ( knownSuffixHandlers ) import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) , fromFlagOrDefault ) import Distribution.Simple.SrcDist ( prepareTree ) import Distribution.Simple.Utils ( die, debug, notice, info, warn , debugNoWrap, defaultPackageDesc , intercalate, topHandlerWith , createDirectoryIfMissingVerbose ) import Distribution.Package ( Package(..) ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity, lessVerbose ) import Distribution.Compat.Environment ( lookupEnv, setEnv ) import Distribution.Client.Compat.FilePerms ( setFileHidden ) import qualified Distribution.Client.Sandbox.Index as Index import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.Simple.Register as Register import qualified Data.Map as M import qualified Data.Set as S import Control.Exception ( assert, bracket_ ) import Control.Monad ( forM, liftM2, unless, when ) import Data.Bits ( shiftL, shiftR, xor ) import Data.Char ( ord ) import Data.IORef ( newIORef, writeIORef, readIORef ) import Data.List ( delete, foldl' ) import Data.Maybe ( fromJust ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty, mappend ) #endif import Data.Word ( Word32 ) import Numeric ( showHex ) import System.Directory ( createDirectory , doesDirectoryExist , doesFileExist , getCurrentDirectory , removeDirectoryRecursive , removeFile , renameDirectory ) import System.FilePath ( (</>), equalFilePath , getSearchPath , searchPathSeparator , takeDirectory ) -- -- * Constants -- -- | The name of the sandbox subdirectory where we keep snapshots of add-source -- dependencies. snapshotDirectoryName :: FilePath snapshotDirectoryName = "snapshots" -- | Non-standard build dir that is used for building add-source deps instead of -- "dist". Fixes surprising behaviour in some cases (see issue #1281). sandboxBuildDir :: FilePath -> FilePath sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" where sandboxDirHash = jenkins sandboxDir -- See http://en.wikipedia.org/wiki/Jenkins_hash_function jenkins :: String -> Word32 jenkins str = loop_finish $ foldl' loop 0 str where loop :: Word32 -> Char -> Word32 loop hash key_i' = hash''' where key_i = toEnum . ord $ key_i' hash' = hash + key_i hash'' = hash' + (shiftL hash' 10) hash''' = hash'' `xor` (shiftR hash'' 6) loop_finish :: Word32 -> Word32 loop_finish hash = hash''' where hash' = hash + (shiftL hash 3) hash'' = hash' `xor` (shiftR hash' 11) hash''' = hash'' + (shiftL hash'' 15) -- -- * Basic sandbox functions. -- -- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the -- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to -- 'NoFlag'. updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags updateSandboxConfigFileFlag globalFlags = case globalSandboxConfigFile globalFlags of Flag _ -> return globalFlags NoFlag -> do f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" return globalFlags { globalSandboxConfigFile = f' } -- | Return the path to the sandbox config file - either the default or the one -- specified with @--sandbox-config-file@. getSandboxConfigFilePath :: GlobalFlags -> IO FilePath getSandboxConfigFilePath globalFlags = do let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags case sandboxConfigFileFlag of NoFlag -> do pkgEnvDir <- getCurrentDirectory return (pkgEnvDir </> sandboxPackageEnvironmentFile) Flag path -> return path -- | Load the @cabal.sandbox.config@ file (and possibly the optional -- @cabal.config@). In addition to a @PackageEnvironment@, also return a -- canonical path to the sandbox. Exit with error if the sandbox directory or -- the package environment file do not exist. tryLoadSandboxConfig :: Verbosity -> GlobalFlags -> IO (FilePath, PackageEnvironment) tryLoadSandboxConfig verbosity globalFlags = do path <- getSandboxConfigFilePath globalFlags tryLoadSandboxPackageEnvironmentFile verbosity path (globalConfigFile globalFlags) -- | Return the name of the package index file for this package environment. tryGetIndexFilePath :: SavedConfig -> IO FilePath tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config) -- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of -- 'SavedConfig'. tryGetIndexFilePath' :: GlobalFlags -> IO FilePath tryGetIndexFilePath' globalFlags = do let paths = fromNubList $ globalLocalRepos globalFlags case paths of [] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ "no local repos found. " ++ checkConfiguration _ -> return $ (last paths) </> Index.defaultIndexFileName where checkConfiguration = "Please check your configuration ('" ++ userPackageEnvironmentFile ++ "')." -- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error -- message than just pattern-matching. getSandboxPackageDB :: ConfigFlags -> IO PackageDB getSandboxPackageDB configFlags = do case configPackageDBs configFlags of [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? [] -> die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt [_] -> die $ "Unexpected contents of the 'package-db' field. " ++ sandboxConfigCorrupt _ -> die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt where sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." -- | Which packages are installed in the sandbox package DB? getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackagesInSandbox verbosity configFlags comp conf = do sandboxDB <- getSandboxPackageDB configFlags getPackageDBContents verbosity comp sandboxDB conf -- | Temporarily add $SANDBOX_DIR/bin to $PATH. withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir where -- TODO: Instead of modifying the global process state, it'd be better to -- set the environment individually for each subprocess invocation. This -- will have to wait until the Shell monad is implemented; without it the -- required changes are too intrusive. addBinDir :: IO () addBinDir = do mbOldPath <- lookupEnv "PATH" let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) mbOldPath setEnv "PATH" newPath rmBinDir :: IO () rmBinDir = do oldPath <- getSearchPath let newPath = intercalate [searchPathSeparator] (delete sandboxBin oldPath) setEnv "PATH" newPath sandboxBin = sandboxDir </> "bin" -- | Initialise a package DB for this compiler if it doesn't exist. initPackageDBIfNeeded :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO () initPackageDBIfNeeded verbosity configFlags comp conf = do SpecificPackageDB dbPath <- getSandboxPackageDB configFlags packageDBExists <- doesDirectoryExist dbPath unless packageDBExists $ Register.initPackageDB verbosity comp conf dbPath when packageDBExists $ debug verbosity $ "The package database already exists: " ++ dbPath -- | Entry point for the 'cabal sandbox dump-pkgenv' command. dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags commentPkgEnv <- commentPackageEnvironment sandboxDir putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv -- | Entry point for the 'cabal sandbox init' command. sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxInit verbosity sandboxFlags globalFlags = do -- Warn if there's a 'cabal-dev' sandbox. isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") (doesFileExist $ "cabal-dev" </> "cabal.config") when isCabalDevSandbox $ warn verbosity $ "You are apparently using a legacy (cabal-dev) sandbox. " ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " ++ "You may want to delete the 'cabal-dev' directory to prevent issues." -- Create the sandbox directory. let sandboxDir' = fromFlagOrDefault defaultSandboxLocation (sandboxLocation sandboxFlags) createDirectoryIfMissingVerbose verbosity True sandboxDir' sandboxDir <- tryCanonicalizePath sandboxDir' setFileHidden sandboxDir -- Determine which compiler to use (using the value from ~/.cabal/config). userConfig <- loadConfig verbosity (globalConfigFile globalFlags) (comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig) -- Create the package environment file. pkgEnvFile <- getSandboxConfigFilePath globalFlags createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let config = pkgEnvSavedConfig pkgEnv configFlags = savedConfigureFlags config -- Create the index file if it doesn't exist. indexFile <- tryGetIndexFilePath config indexFileExists <- doesFileExist indexFile if indexFileExists then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir Index.createEmpty verbosity indexFile -- Create the package DB for the default compiler. initPackageDBIfNeeded verbosity configFlags comp conf maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform -- | Entry point for the 'cabal sandbox delete' command. sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxDelete verbosity _sandboxFlags globalFlags = do (useSandbox, _) <- loadConfigOrSandboxConfig verbosity globalFlags { globalRequireSandbox = Flag False } case useSandbox of NoSandbox -> warn verbosity "Not in a sandbox." UseSandbox sandboxDir -> do curDir <- getCurrentDirectory pkgEnvFile <- getSandboxConfigFilePath globalFlags -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard -- location. let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ curDir </> sandboxPackageEnvironmentFile if isNonDefaultConfigLocation then warn verbosity $ "Sandbox config file is in non-default location: '" ++ pkgEnvFile ++ "'.\n Please delete manually." else removeFile pkgEnvFile -- Remove the sandbox directory, unless we're using a shared sandbox. let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ curDir </> defaultSandboxLocation when isNonDefaultSandboxLocation $ die $ "Non-default sandbox location used: '" ++ sandboxDir ++ "'.\nAssuming a shared sandbox. Please delete '" ++ sandboxDir ++ "' manually." notice verbosity $ "Deleting the sandbox located at " ++ sandboxDir removeDirectoryRecursive sandboxDir -- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> BuildTreeRefType -> IO () doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do let savedConfig = pkgEnvSavedConfig pkgEnv indexFile <- tryGetIndexFilePath savedConfig -- If we're running 'sandbox add-source' for the first time for this compiler, -- we need to create an initial timestamp record. (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform withAddTimestamps sandboxDir $ do -- FIXME: path canonicalisation is done in addBuildTreeRefs, but we do it -- twice because of the timestamps file. buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType return buildTreeRefs' -- | Entry point for the 'cabal sandbox add-source' command. sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef -- | Entry point for the 'cabal sandbox add-source --snapshot' command. sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> IO () sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do let snapshotDir = sandboxDir </> snapshotDirectoryName -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private -- location. createDirectoryIfMissingVerbose verbosity True snapshotDir -- Collect the package descriptions first, so that if some path does not refer -- to a cabal package, we fail immediately. pkgs <- forM buildTreeRefs $ \buildTreeRef -> inDir (Just buildTreeRef) $ return . flattenPackageDescription =<< readPackageDescription verbosity =<< defaultPackageDesc verbosity -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If -- 'prepareTree' throws an error at any point, the old snapshots will still be -- in consistent state. tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> inDir (Just buildTreeRef) $ do let targetDir = snapshotDir </> (display . packageId $ pkg) targetTmpDir = targetDir ++ "-tmp" dirExists <- doesDirectoryExist targetTmpDir when dirExists $ removeDirectoryRecursive targetDir createDirectory targetTmpDir prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers return (targetTmpDir, targetDir) -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to -- "snapshots/$PKGNAME-$VERSION". snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do dirExists <- doesDirectoryExist targetDir when dirExists $ removeDirectoryRecursive targetDir renameDirectory targetTmpDir targetDir return targetDir -- Once the packages are copied, just 'add-source' them as usual. doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef -- | Entry point for the 'cabal sandbox delete-source' command. sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) withRemoveTimestamps sandboxDir $ do Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ "source dependency, but does not remove the package " ++ "from the sandbox package DB.\n\n" ++ "Use 'sandbox hc-pkg -- unregister' to do that." -- | Entry point for the 'cabal sandbox list-sources' command. sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxListSources verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) refs <- Index.listBuildTreeRefs verbosity Index.ListIgnored Index.LinksAndSnapshots indexFile when (null refs) $ notice verbosity $ "Index file '" ++ indexFile ++ "' has no references to local build trees." when (not . null $ refs) $ do notice verbosity $ "Source dependencies registered " ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" mapM_ putStrLn refs notice verbosity $ "\nTo unregister source dependencies, " ++ "use the 'sandbox delete-source' command." -- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ -- tool with provided arguments, restricted to the sandbox. sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv dbStack = configPackageDB' configFlags (comp, _platform, conf) <- configCompilerAux' configFlags Register.invokeHcPkg verbosity comp conf dbStack extraArgs updateInstallDirs :: Flag Bool -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) updateInstallDirs userInstallFlag (useSandbox, savedConfig) = case useSandbox of NoSandbox -> let savedConfig' = savedConfig { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } in (useSandbox, savedConfig') _ -> (useSandbox, savedConfig) where configureFlags = savedConfigureFlags savedConfig userInstallDirs = savedUserInstallDirs savedConfig globalInstallDirs = savedGlobalInstallDirs savedConfig installDirs | userInstall = userInstallDirs | otherwise = globalInstallDirs userInstall = fromFlagOrDefault defaultUserInstall (configUserInstall configureFlags `mappend` userInstallFlag) -- | Check which type of package environment we're in and return a -- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates -- whether we're working in a sandbox. loadConfigOrSandboxConfig :: Verbosity -> GlobalFlags -- ^ For @--config-file@ and -- @--sandbox-config-file@. -> IO (UseSandbox, SavedConfig) loadConfigOrSandboxConfig verbosity globalFlags = do let configFileFlag = globalConfigFile globalFlags sandboxConfigFileFlag = globalSandboxConfigFile globalFlags ignoreSandboxFlag = globalIgnoreSandbox globalFlags pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag case pkgEnvType of -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. SandboxPackageEnvironment -> do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags -- ^ Prints an error message and exits on error. let config = pkgEnvSavedConfig pkgEnv return (UseSandbox sandboxDir, config) -- Only @cabal.config@ is present. UserPackageEnvironment -> do config <- loadConfig verbosity configFileFlag userConfig <- loadUserConfig verbosity pkgEnvDir let config' = config `mappend` userConfig dieIfSandboxRequired config' return (NoSandbox, config') -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. AmbientPackageEnvironment -> do config <- loadConfig verbosity configFileFlag dieIfSandboxRequired config return (NoSandbox, config) where -- Return the path to the package environment directory - either the -- current directory or the one that @--sandbox-config-file@ resides in. getPkgEnvDir :: (Flag FilePath) -> IO FilePath getPkgEnvDir sandboxConfigFileFlag = do case sandboxConfigFileFlag of NoFlag -> getCurrentDirectory Flag path -> tryCanonicalizePath . takeDirectory $ path -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. dieIfSandboxRequired :: SavedConfig -> IO () dieIfSandboxRequired config = checkFlag flag where flag = (globalRequireSandbox . savedGlobalFlags $ config) `mappend` (globalRequireSandbox globalFlags) checkFlag (Flag True) = die $ "'require-sandbox' is set to True, but no sandbox is present. " ++ "Use '--no-require-sandbox' if you want to override " ++ "'require-sandbox' temporarily." checkFlag (Flag False) = return () checkFlag (NoFlag) = return () -- | Return the saved \"dist/\" prefix, or the default prefix. findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath findSavedDistPref config flagDistPref = do let defDistPref = useDistPref defaultSetupScriptOptions flagDistPref' = configDistPref (savedConfigureFlags config) `mappend` flagDistPref findDistPref defDistPref flagDistPref' -- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do -- nothing. maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a maybeWithSandboxDirOnSearchPath NoSandbox act = act maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = withSandboxBinDirOnSearchPath sandboxDir $ act -- | Had reinstallAddSourceDeps actually reinstalled any dependencies? data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled -- | Reinstall those add-source dependencies that have been modified since -- we've last installed them. Assumes that we're working inside a sandbox. reinstallAddSourceDeps :: Verbosity -> ConfigFlags -> ConfigExFlags -> InstallFlags -> GlobalFlags -> FilePath -> IO WereDepsReinstalled reinstallAddSourceDeps verbosity configFlags' configExFlags installFlags globalFlags sandboxDir = topHandler' $ do let sandboxDistPref = sandboxBuildDir sandboxDir configFlags = configFlags' { configDistPref = Flag sandboxDistPref } haddockFlags = mempty { haddockDistPref = Flag sandboxDistPref } (comp, platform, conf) <- configCompilerAux' configFlags retVal <- newIORef NoDepsReinstalled withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir $ \sandboxPkgInfo -> unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do let args :: InstallArgs args = ((configPackageDB' configFlags) ,(globalRepos globalFlags) ,comp, platform, conf ,UseSandbox sandboxDir, Just sandboxPkgInfo ,globalFlags, configFlags, configExFlags, installFlags ,haddockFlags) -- This can actually be replaced by a call to 'install', but we use a -- lower-level API because of layer separation reasons. Additionally, we -- might want to use some lower-level features this in the future. withSandboxBinDirOnSearchPath sandboxDir $ do installContext <- makeInstallContext verbosity args Nothing installPlan <- foldProgress logMsg die' return =<< makeInstallPlan verbosity args installContext processInstallPlan verbosity args installContext installPlan writeIORef retVal ReinstalledSomeDeps readIORef retVal where die' message = die (message ++ installFailedInSandbox) -- TODO: use a better error message, remove duplication. installFailedInSandbox = "Note: when using a sandbox, all packages are required to have consistent dependencies. Try reinstalling/unregistering the offending packages or recreating the sandbox." logMsg message rest = debugNoWrap verbosity message >> rest topHandler' = topHandlerWith $ \_ -> do warn verbosity "Couldn't reinstall some add-source dependencies." -- Here we can't know whether any deps have been reinstalled, so we have -- to be conservative. return ReinstalledSomeDeps -- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that -- we don't update the timestamp file here - this is done in -- 'postInstallActions'. withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> FilePath -> (SandboxPackageInfo -> IO ()) -> IO () withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir cont = do -- List all add-source deps. indexFile <- tryGetIndexFilePath' globalFlags buildTreeRefs <- Index.listBuildTreeRefs verbosity Index.DontListIgnored Index.OnlyLinks indexFile let allAddSourceDepsSet = S.fromList buildTreeRefs -- List all packages installed in the sandbox. installedPkgIndex <- getInstalledPackagesInSandbox verbosity configFlags comp conf let err = "Error reading sandbox package information." -- Get the package descriptions for all add-source deps. depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) isInstalled pkgid = not . null . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid installedDepsMap = M.filter (isInstalled . packageId) depsMap -- Get the package ids of modified (and installed) add-source deps. modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir (compilerId comp) platform installedDepsMap -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to -- be a subset of the keys of 'depsMap'. let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) | modDepPath <- modifiedAddSourceDeps ] modifiedDepsMap = M.fromList modifiedDeps assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) if (null modifiedDeps) then info verbosity $ "Found no modified add-source deps." else notice verbosity $ "Some add-source dependencies have been modified. " ++ "They will be reinstalled..." -- Get the package ids of the remaining add-source deps (some are possibly not -- installed). let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) -- Finally, assemble a 'SandboxPackageInfo'. cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet where toSourcePackage (path, pkgDesc) = SourcePackage (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing -- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and a no-op -- otherwise. maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> UseSandbox -> (Maybe SandboxPackageInfo -> IO ()) -> IO () maybeWithSandboxPackageInfo verbosity configFlags globalFlags comp platform conf useSandbox cont = case useSandbox of NoSandbox -> cont Nothing UseSandbox sandboxDir -> withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir (\spi -> cont (Just spi)) -- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that -- case. maybeReinstallAddSourceDeps :: Verbosity -> Flag (Maybe Int) -- ^ The '-j' flag -> ConfigFlags -- ^ Saved configure flags -- (from dist/setup-config) -> GlobalFlags -> (UseSandbox, SavedConfig) -> IO WereDepsReinstalled maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' globalFlags' (useSandbox, config) = do case useSandbox of NoSandbox -> return NoDepsReinstalled UseSandbox sandboxDir -> do -- Reinstall the modified add-source deps. let configFlags = savedConfigureFlags config `mappendSomeSavedFlags` configFlags' configExFlags = defaultConfigExFlags `mappend` savedConfigureExFlags config installFlags' = defaultInstallFlags `mappend` savedInstallFlags config installFlags = installFlags' { installNumJobs = installNumJobs installFlags' `mappend` numJobsFlag } globalFlags = savedGlobalFlags config -- This makes it possible to override things like 'remote-repo-cache' -- from the command line. These options are hidden, and are only -- useful for debugging, so this should be fine. `mappend` globalFlags' reinstallAddSourceDeps verbosity configFlags configExFlags installFlags globalFlags sandboxDir where -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ -- because we don't want to auto-enable things like 'library-profiling' for -- all add-source dependencies even if the user has passed -- '--enable-library-profiling' to 'cabal configure'. These options are -- supposed to be set in 'cabal.config'. mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags mappendSomeSavedFlags sandboxConfigFlags savedFlags = sandboxConfigFlags { configHcFlavor = configHcFlavor sandboxConfigFlags `mappend` configHcFlavor savedFlags, configHcPath = configHcPath sandboxConfigFlags `mappend` configHcPath savedFlags, configHcPkg = configHcPkg sandboxConfigFlags `mappend` configHcPkg savedFlags, configProgramPaths = configProgramPaths sandboxConfigFlags `mappend` configProgramPaths savedFlags, configProgramArgs = configProgramArgs sandboxConfigFlags `mappend` configProgramArgs savedFlags, -- NOTE: Unconditionally choosing the value from -- 'dist/setup-config'. Sandbox package DB location may have been -- changed by 'configure -w'. configPackageDBs = configPackageDBs savedFlags -- FIXME: Is this compatible with the 'inherit' feature? } -- -- Utils (transitionary) -- -- FIXME: configPackageDB' and configCompilerAux' don't really belong in this -- module -- configPackageDB' :: ConfigFlags -> PackageDBStack configPackageDB' cfg = interpretPackageDbFlags userInstall (configPackageDBs cfg) where userInstall = fromFlagOrDefault True (configUserInstall cfg) configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramConfiguration) configCompilerAux' configFlags = configCompilerAuxEx configFlags --FIXME: make configCompilerAux use a sensible verbosity { configVerbosity = fmap lessVerbose (configVerbosity configFlags) }
x-y-z/cabal
cabal-install/Distribution/Client/Sandbox.hs
bsd-3-clause
37,285
0
19
10,121
5,801
3,039
2,762
545
6
module UnitTests.Distribution.Client.UserConfig ( tests ) where import Control.Exception (bracket) import Data.List (sort, nub) import Data.Monoid import System.Directory (getCurrentDirectory, removeDirectoryRecursive, createDirectoryIfMissing) import System.FilePath (takeDirectory) import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, assertBool) import Distribution.Client.Compat.Environment (lookupEnv, setEnv) import Distribution.Client.Config import Distribution.Utils.NubList (fromNubList) import Distribution.Client.Setup (GlobalFlags (..), InstallFlags (..)) import Distribution.Simple.Setup (ConfigFlags (..), fromFlag) import Distribution.Verbosity (silent) tests :: [TF.Test] tests = [ testCase "nullDiffOnCreate" nullDiffOnCreateTest , testCase "canDetectDifference" canDetectDifference , testCase "canUpdateConfig" canUpdateConfig , testCase "doubleUpdateConfig" doubleUpdateConfig ] nullDiffOnCreateTest :: Assertion nullDiffOnCreateTest = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty -- Now we read it in and compare it against the default. diff <- userConfigDiff mempty assertBool (unlines $ "Following diff should be empty:" : diff) $ null diff canDetectDifference :: Assertion canDetectDifference = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty cabalFile <- defaultConfigFile appendFile cabalFile "verbose: 0\n" diff <- userConfigDiff mempty assertBool (unlines $ "Should detect a difference:" : diff) $ diff == [ "- verbose: 1", "+ verbose: 0" ] canUpdateConfig :: Assertion canUpdateConfig = bracketTest . const $ do cabalFile <- defaultConfigFile createDirectoryIfMissing True $ takeDirectory cabalFile -- Write a trivial cabal file. writeFile cabalFile "tests: True\n" -- Update the config file. userConfigUpdate silent mempty -- Load it again. updated <- loadConfig silent mempty mempty assertBool ("Field 'tests' should be True") $ fromFlag (configTests $ savedConfigureFlags updated) doubleUpdateConfig :: Assertion doubleUpdateConfig = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty -- Update it. userConfigUpdate silent mempty userConfigUpdate silent mempty -- Load it again. updated <- loadConfig silent mempty mempty assertBool ("Field 'remote-repo' doesn't contain duplicates") $ listUnique (map show . fromNubList . globalRemoteRepos $ savedGlobalFlags updated) assertBool ("Field 'extra-prog-path' doesn't contain duplicates") $ listUnique (map show . fromNubList . configProgramPathExtra $ savedConfigureFlags updated) assertBool ("Field 'build-summary' doesn't contain duplicates") $ listUnique (map show . fromNubList . installSummaryFile $ savedInstallFlags updated) listUnique :: Ord a => [a] -> Bool listUnique xs = let sorted = sort xs in nub sorted == xs bracketTest :: ((FilePath, FilePath) -> IO ()) -> Assertion bracketTest = bracket testSetup testTearDown where testSetup :: IO (FilePath, FilePath) testSetup = do Just oldHome <- lookupEnv "HOME" testdir <- fmap (++ "/test-user-config") getCurrentDirectory setEnv "HOME" testdir return (oldHome, testdir) testTearDown :: (FilePath, FilePath) -> IO () testTearDown (oldHome, testdir) = do setEnv "HOME" oldHome removeDirectoryRecursive testdir
christiaanb/cabal
cabal-install/tests/UnitTests/Distribution/Client/UserConfig.hs
bsd-3-clause
3,720
0
14
722
866
449
417
72
1
module Releases2016 where import PlatformDB import Types releases2016 :: [Release] releases2016 = [hp_8_0_1, hp_8_0_2] hp_8_0_1 :: Release hp_8_0_1 = releaseWithMinimal "8.0.1" [ incGHC "8.0.1" , incGHCLib "Cabal" "1.24.0.0" , incGHCLib "array" "0.5.1.1" , incGHCLib "base" "4.9.0.0" , incGHCLib "bytestring" "0.10.8.1" , incGHCLib "containers" "0.5.7.1" , incGHCLib "deepseq" "1.4.2.0" , incGHCLib "directory" "1.2.6.2" , incGHCLib "filepath" "1.4.1.0" , incGHCLib "hpc" "0.6.0.3" , incGHCLib "pretty" "1.1.3.3" , incGHCLib "process" "1.4.2.0" , incGHCLib "template-haskell" "2.11.0.0" , incGHCLib "time" "1.6.0.1" , incGHCLib "transformers" "0.5.2.0" , incGHCLib "xhtml" "3000.2.1" {- These packages are in the GHC distribution, and hence bundeled with the Platform. However, they are not officially part of the Platform, and as such, do not carry the same stability guaruntees. , incGHCLib "binary" "0.8.3.0" , incGHCLib "ghc-prim" "0.5.0.0" , incGHCLib "haskeline" "0.7.2.3" , incGHCLib "hoopl" "3.10.2.1" , incGHCLib "integer-gmp" "1.0.0.1" , incGHCLib "terminfo" "0.4.0.2" -} , notWindows $ incGHCLib "unix" "2.7.2.0" --, onlyWindows $ incGHCLib "Win32" "2.3.1.0" --, incTool "cabal-install" "1.25.0.0" , incTool "alex" "3.1.7" , incTool "happy" "1.19.5" , incTool "hscolour" "1.24" , incGHCTool "haddock" "2.16.1" , incGHCTool "cabal-install" "1.24.0.0" , incGHCTool "stack" "1.1.2" ] [ incLib "async" "2.1.0" , incLib "attoparsec" "0.13.0.2" , incLib "case-insensitive" "1.2.0.6" -- , incLib "cgi" "3001.2.2.2" , incLib "fgl" "5.5.2.3" , incLib "GLUT" "2.7.0.7" , incLib "GLURaw" "2.0.0.1" , incLib "haskell-src" "1.0.2.0" , incLib "hashable" "1.2.4.0" , incLib "html" "1.0.1.2" , incLib "HTTP" "4000.3.3" , incLib "HUnit" "1.3.1.1" , incLib "network" "2.6.2.1" , incLib "OpenGL" "3.0.0.2" , incLib "OpenGLRaw" "3.1.0.0" , incLib "parallel" "3.2.1.0" , incLib "parsec" "3.1.9" , incLib "regex-base" "0.93.2" , incLib "regex-compat" "0.95.1" , incLib "regex-posix" "0.95.2" , incLib "split" "0.2.3" , incLib "stm" "2.4.4.1" , incLib "syb" "0.6" , incLib "text" "1.2.2.1" , incLib "unordered-containers" "0.2.7.0" , incLib "vector" "0.11.0.0" , incLib "zlib" "0.6.1.1" --needed for happy or alex but otherwise would be in full only , incLib "mtl" "2.2.1" , incLib "primitive" "0.6.1.0" , incLib "random" "1.1" , incLib "QuickCheck" "2.8.2" -- Libs required by newer version of stuff - but not cleared for HP -- needed by alex & QuickCheck , incLib "tf-random" "0.5" -- was split out of network, so was in HP, just under different pacakge , incLib "network-uri" "2.6.1.0" -- needed by attoparsec , incLib "scientific" "0.3.4.6" -- needed by OpenGL , incLib "ObjectName" "1.1.0.1" , incLib "StateVar" "1.1.0.4" , incLib "half" "0.2.2.3" , incLib "fixed" "0.2.1.1" ] hp_8_0_2 :: Release hp_8_0_2 = releaseWithMinimal "8.0.2" [ incGHC "8.0.2" , incGHCLib "Cabal" "1.24.2.0" , incGHCLib "array" "0.5.1.1" , incGHCLib "base" "4.9.1.0" , incGHCLib "bytestring" "0.10.8.1" , incGHCLib "containers" "0.5.7.1" , incGHCLib "deepseq" "1.4.2.0" , incGHCLib "directory" "1.3.0.0" , incGHCLib "filepath" "1.4.1.1" , incGHCLib "hpc" "0.6.0.3" , incGHCLib "pretty" "1.1.3.3" , incGHCLib "process" "1.4.3.0" , incGHCLib "template-haskell" "2.11.1.0" , incGHCLib "time" "1.6.0.1" , incGHCLib "transformers" "0.5.2.0" , incGHCLib "xhtml" "3000.2.1" {- These packages are in the GHC distribution, and hence bundled with the Platform. However, they are not officially part of the Platform, and as such, do not carry the same stability guaruntees. , incGHCLib "binary" "0.8.3.0" , incGHCLib "ghc-prim" "0.5.0.0" , incGHCLib "haskeline" "0.7.3.0" , incGHCLib "hoopl" "3.10.2.1" , incGHCLib "integer-gmp" "1.0.0.1" , incGHCLib "terminfo" "0.4.0.2" -} , notWindows $ incGHCLib "unix" "2.7.2.1" --, onlyWindows $ incGHCLib "Win32" "2.3.1.0" --, incTool "cabal-install" "1.25.0.0" , incTool "alex" "3.2.1" , incTool "happy" "1.19.5" , incTool "hscolour" "1.24.1" , incGHCTool "haddock" "2.17.2" , incGHCTool "cabal-install" "1.24.0.2" , incGHCTool "stack" "1.3.2" ] [ incLib "async" "2.1.1" , incLib "attoparsec" "0.13.1.0" , incLib "case-insensitive" "1.2.0.7" , incLib "fgl" "5.5.3.0" , incLib "GLUT" "2.7.0.10" , incLib "GLURaw" "2.0.0.3" , incLib "haskell-src" "1.0.2.0" , incLib "hashable" "1.2.5.0" , incLib "html" "1.0.1.2" , incLib "HTTP" "4000.3.4" , incLib "HUnit" "1.5.0.0" , incLib "network" "2.6.3.1" , incLib "OpenGL" "3.0.1.0" , incLib "OpenGLRaw" "3.2.4.0" , incLib "parallel" "3.2.1.0" , incLib "parsec" "3.1.11" , incLib "regex-base" "0.93.2" , incLib "regex-compat" "0.95.1" , incLib "regex-posix" "0.95.2" , incLib "split" "0.2.3.1" , incLib "stm" "2.4.4.1" , incLib "syb" "0.6" , incLib "text" "1.2.2.1" , incLib "unordered-containers" "0.2.7.2" , incLib "vector" "0.11.0.0" , incLib "zlib" "0.6.1.2" --needed for happy or alex , incLib "mtl" "2.2.1" , incLib "primitive" "0.6.1.0" , incLib "random" "1.1" , incLib "QuickCheck" "2.9.2" -- Libs required by newer version of stuff - but not cleared for HP -- needed by alex & QuickCheck , incLib "tf-random" "0.5" -- was split out of network, so was in HP, just under different pacakge , incLib "network-uri" "2.6.1.0" -- needed by attoparsec , incLib "scientific" "0.3.4.10" , incLib "integer-logarithms" "1" -- needed by OpenGL , incLib "ObjectName" "1.1.0.1" , incLib "StateVar" "1.1.0.4" , incLib "half" "0.2.2.3" , incLib "fixed" "0.2.1.1" -- needed by HUnit , incLib "call-stack" "0.1.0" ]
erantapaa/haskell-platform
hptool/src/Releases2016.hs
bsd-3-clause
8,908
0
8
4,233
1,063
547
516
135
1
{-# OPTIONS_GHC -fno-warn-missing-signatures #-} {-# LANGUAGE MultiParamTypeClasses, Rank2Types #-} ----------------------------------------------------------------------------- -- | -- Module : XMonad.Layout.Groups.Wmii -- Copyright : Quentin Moser <[email protected]> -- License : BSD-style (see LICENSE) -- -- Maintainer : orphaned -- Stability : stable -- Portability : unportable -- -- A wmii-like layout algorithm. -- ----------------------------------------------------------------------------- module XMonad.Layout.Groups.Wmii ( -- * Usage -- $usage wmii , zoomGroupIn , zoomGroupOut , zoomGroupReset , toggleGroupFull , groupToNextLayout , groupToFullLayout , groupToTabbedLayout , groupToVerticalLayout -- * Useful re-exports , shrinkText , defaultTheme , module XMonad.Layout.Groups.Helpers ) where import XMonad hiding ((|||)) import qualified XMonad.Layout.Groups as G import XMonad.Layout.Groups.Examples import XMonad.Layout.Groups.Helpers import XMonad.Layout.Tabbed import XMonad.Layout.Named import XMonad.Layout.Renamed import XMonad.Layout.LayoutCombinators import XMonad.Layout.MessageControl import XMonad.Layout.Simplest -- $usage -- This module provides a layout inspired by the one used by the wmii -- (<http://wmii.suckless.org>) window manager. -- Windows are arranged into groups in a horizontal row, and each group can lay out -- its windows -- -- * by maximizing the focused one -- -- * by tabbing them (wmii uses a stacked layout, but I'm too lazy to write it) -- -- * by arranging them in a column. -- -- As the groups are arranged in a 'ZoomRow', the relative width of each group can be -- increased or decreased at will. Groups can also be set to use the whole screen -- whenever they have focus. -- -- You can use the contents of this module by adding -- -- > import XMonad.Layout.Groups.Wmii -- -- to the top of your @.\/.xmonad\/xmonad.hs@, and adding 'wmii' -- (with a 'Shrinker' and decoration 'Theme' as -- parameters) to your layout hook, for example: -- -- > myLayout = wmii shrinkText defaultTheme -- -- To be able to zoom in and out of groups, change their inner layout, etc., -- create key bindings for the relevant actions: -- -- > ((modMask, xK_f), toggleGroupFull) -- -- and so on. -- -- For more information on how to extend your layout hook and key bindings, see -- "XMonad.Doc.Extending". -- -- Finally, you will probably want to be able to move focus and windows -- between groups in a consistent fashion. For this, you should take a look -- at the "XMonad.Layout.Groups.Helpers" module, whose contents are re-exported -- by this module. -- | A layout inspired by wmii wmii s t = G.group innerLayout zoomRowG where column = named "Column" $ Tall 0 (3/100) (1/2) tabs = named "Tabs" $ Simplest innerLayout = renamed [CutWordsLeft 3] $ addTabs s t $ ignore NextLayout $ ignore (JumpToLayout "") $ unEscape $ column ||| tabs ||| Full -- | Increase the width of the focused group zoomGroupIn :: X () zoomGroupIn = zoomColumnIn -- | Decrease the size of the focused group zoomGroupOut :: X () zoomGroupOut = zoomColumnOut -- | Reset the size of the focused group to the default zoomGroupReset :: X () zoomGroupReset = zoomColumnReset -- | Toggle whether the currently focused group should be maximized -- whenever it has focus. toggleGroupFull :: X () toggleGroupFull = toggleGroupFull -- | Rotate the layouts in the focused group. groupToNextLayout :: X () groupToNextLayout = sendMessage $ escape NextLayout -- | Switch the focused group to the \"maximized\" layout. groupToFullLayout :: X () groupToFullLayout = sendMessage $ escape $ JumpToLayout "Full" -- | Switch the focused group to the \"tabbed\" layout. groupToTabbedLayout :: X () groupToTabbedLayout = sendMessage $ escape $ JumpToLayout "Tabs" -- | Switch the focused group to the \"column\" layout. groupToVerticalLayout :: X () groupToVerticalLayout = sendMessage $ escape $ JumpToLayout "Column"
MasseR/xmonadcontrib
XMonad/Layout/Groups/Wmii.hs
bsd-3-clause
4,589
0
16
1,267
472
294
178
49
1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TupleSections #-} module Language.Haskell.Liquid.Bare.OfType ( ofBareType , ofMeaSort , ofBSort , ofBPVar , mkSpecType , mkSpecType' ) where import BasicTypes import Name import TyCon hiding (synTyConRhs_maybe) import Type (expandTypeSynonyms) import TysWiredIn import Control.Applicative import Control.Monad.Reader hiding (forM) import Control.Monad.State hiding (forM) import Data.Maybe (fromMaybe) import Data.Monoid import Data.Traversable (forM) import Text.Parsec.Pos import Text.Printf import qualified Control.Exception as Ex import qualified Data.HashMap.Strict as M import Language.Fixpoint.Misc (errorstar) import Language.Fixpoint.Types (Expr(..), Reftable, Symbol, meet, mkSubst, subst, symbol) import Language.Haskell.Liquid.GhcMisc import Language.Haskell.Liquid.Misc (secondM) import Language.Haskell.Liquid.RefType import Language.Haskell.Liquid.Types import Language.Haskell.Liquid.Bounds import Language.Haskell.Liquid.Bare.Env import Language.Haskell.Liquid.Bare.Expand import Language.Haskell.Liquid.Bare.Lookup import Language.Haskell.Liquid.Bare.Resolve -- import Language.Haskell.Liquid.Bare.RefToLogic -------------------------------------------------------------------------------- ofBareType :: SourcePos -> BareType -> BareM SpecType ofBareType l = ofBRType expandRTAliasApp (resolve l <=< expandReft) ofMeaSort :: BareType -> BareM SpecType ofMeaSort = ofBRType failRTAliasApp return ofBSort :: BSort -> BareM RSort ofBSort = ofBRType failRTAliasApp return -------------------------------------------------------------------------------- ofBPVar :: BPVar -> BareM RPVar ofBPVar = mapM_pvar ofBSort mapM_pvar :: (Monad m) => (a -> m b) -> PVar a -> m (PVar b) mapM_pvar f (PV x t v txys) = do t' <- forM t f txys' <- mapM (\(t, x, y) -> liftM (, x, y) (f t)) txys return $ PV x t' v txys' -------------------------------------------------------------------------------- mkSpecType :: SourcePos -> BareType -> BareM SpecType mkSpecType l t = mkSpecType' l (ty_preds $ toRTypeRep t) t mkSpecType' :: SourcePos -> [PVar BSort] -> BareType -> BareM SpecType mkSpecType' l πs t = ofBRType expandRTAliasApp resolveReft t where resolveReft = (resolve l <=< expandReft) . txParam subvUReft (uPVar <$> πs) t txParam f πs t = f (txPvar (predMap πs t)) txPvar :: M.HashMap Symbol UsedPVar -> UsedPVar -> UsedPVar txPvar m π = π { pargs = args' } where args' | not (null (pargs π)) = zipWith (\(_,x ,_) (t,_,y) -> (t, x, y)) (pargs π') (pargs π) | otherwise = pargs π' π' = fromMaybe (errorstar err) $ M.lookup (pname π) m err = "Bare.replaceParams Unbound Predicate Variable: " ++ show π predMap πs t = M.fromList [(pname π, π) | π <- πs ++ rtypePredBinds t] rtypePredBinds = map uPVar . ty_preds . toRTypeRep -------------------------------------------------------------------------------- ofBRType :: (PPrint r, UReftable r) => (SourcePos -> RTAlias RTyVar SpecType -> [BRType r] -> r -> BareM (RRType r)) -> (r -> BareM r) -> BRType r -> BareM (RRType r) ofBRType appRTAlias resolveReft = go where go t@(RApp _ _ _ _) = do aliases <- (typeAliases . rtEnv) <$> get goRApp aliases t go (RAppTy t1 t2 r) = RAppTy <$> go t1 <*> go t2 <*> resolveReft r go (RFun x t1 t2 r) = do env <- get goRFun (bounds env) x t1 t2 r go (RVar a r) = RVar (symbolRTyVar a) <$> resolveReft r go (RAllT a t) = RAllT (symbolRTyVar a) <$> go t go (RAllP a t) = RAllP <$> ofBPVar a <*> go t go (RAllS x t) = RAllS x <$> go t go (RAllE x t1 t2) = RAllE x <$> go t1 <*> go t2 go (REx x t1 t2) = REx x <$> go t1 <*> go t2 go (RRTy e r o t) = RRTy <$> mapM (secondM go) e <*> resolveReft r <*> pure o <*> go t go (RHole r) = RHole <$> resolveReft r go (RExprArg (Loc l l' e)) = RExprArg . Loc l l' <$> resolve l e go_ref (RPropP ss r) = RPropP <$> mapM go_syms ss <*> resolveReft r go_ref (RProp ss t) = RProp <$> mapM go_syms ss <*> go t go_ref (RHProp _ _) = errorstar "TODO:EFFECTS:ofBRType" go_syms = secondM ofBSort goRFun bounds _ (RApp c ps' _ _) t _ | Just bnd <- M.lookup c bounds = do let (ts', ps) = splitAt (length $ tyvars bnd) ps' ts <- mapM go ts' makeBound bnd ts [x | RVar x _ <- ps] <$> go t goRFun _ x t1 t2 r = RFun x <$> go t1 <*> go t2 <*> resolveReft r goRApp aliases (RApp (Loc l _ c) ts _ r) | Just rta <- M.lookup c aliases = appRTAlias l rta ts =<< resolveReft r goRApp _ (RApp lc ts rs r) = do let l = loc lc r' <- resolveReft r lc' <- Loc l l <$> matchTyCon lc (length ts) rs' <- mapM go_ref rs ts' <- mapM go ts bareTCApp r' lc' rs' ts' goRApp _ _ = errorstar "This cannot happen" matchTyCon :: LocSymbol -> Int -> BareM TyCon matchTyCon lc@(Loc _ _ c) arity | isList c && arity == 1 = return listTyCon | isTuple c = return $ tupleTyCon BoxedTuple arity | otherwise = lookupGhcTyCon lc -------------------------------------------------------------------------------- failRTAliasApp :: SourcePos -> RTAlias RTyVar SpecType -> [BRType r] -> r -> BareM (RRType r) failRTAliasApp l rta _ _ = Ex.throw err where err :: Error err = ErrIllegalAliasApp (sourcePosSrcSpan l) (pprint $ rtName rta) (sourcePosSrcSpan $ rtPos rta) expandRTAliasApp :: SourcePos -> RTAlias RTyVar SpecType -> [BareType] -> RReft -> BareM SpecType expandRTAliasApp l rta args r | length args == length αs + length εs = do args' <- mapM (ofBareType l) args let ts = take (length αs) args' αts = zipWith (\α t -> (α, toRSort t, t)) αs ts return $ subst su . (`strengthen` r) . subsTyVars_meet αts $ rtBody rta | otherwise = Ex.throw err where su = mkSubst $ zip (symbol <$> εs) es αs = rtTArgs rta εs = rtVArgs rta es_ = drop (length αs) args es = map (exprArg $ show err) es_ err :: Error err = ErrAliasApp (sourcePosSrcSpan l) (length args) (pprint $ rtName rta) (sourcePosSrcSpan $ rtPos rta) (length αs + length εs) -- | exprArg converts a tyVar to an exprVar because parser cannot tell -- HORRIBLE HACK To allow treating upperCase X as value variables X -- e.g. type Matrix a Row Col = List (List a Row) Col exprArg _ (RExprArg e) = val e exprArg _ (RVar x _) = EVar (symbol x) exprArg _ (RApp x [] [] _) = EVar (symbol x) exprArg msg (RApp f ts [] _) = EApp (symbol <$> f) (exprArg msg <$> ts) exprArg msg (RAppTy (RVar f _) t _) = EApp (dummyLoc $ symbol f) [exprArg msg t] exprArg msg z = errorstar $ printf "Unexpected expression parameter: %s in %s" (show z) msg -------------------------------------------------------------------------------- bareTCApp r (Loc l _ c) rs ts | Just rhs <- synTyConRhs_maybe c = do when (realTcArity c < length ts) (Ex.throw err) return $ tyApp (subsTyVars_meet su $ ofType rhs) (drop nts ts) rs r where tvs = tyConTyVarsDef c su = zipWith (\a t -> (rTyVar a, toRSort t, t)) tvs ts nts = length tvs err :: Error err = ErrAliasApp (sourcePosSrcSpan l) (length ts) (pprint c) (getSrcSpan c) (realTcArity c) -- TODO expandTypeSynonyms here to bareTCApp r (Loc _ _ c) rs ts | isFamilyTyCon c && isTrivial t = return $ expandRTypeSynonyms $ t `strengthen` r where t = rApp c ts rs mempty bareTCApp r (Loc _ _ c) rs ts = return $ rApp c ts rs r tyApp (RApp c ts rs r) ts' rs' r' = RApp c (ts ++ ts') (rs ++ rs') (r `meet` r') tyApp t [] [] r = t `strengthen` r tyApp _ _ _ _ = errorstar $ "Bare.Type.tyApp on invalid inputs" expandRTypeSynonyms :: (PPrint r, Reftable r) => RRType r -> RRType r expandRTypeSynonyms = ofType . expandTypeSynonyms . toType
mightymoose/liquidhaskell
src/Language/Haskell/Liquid/Bare/OfType.hs
bsd-3-clause
8,130
0
16
2,028
3,081
1,550
1,531
185
17
----------------------------------------------------------------------------- -- | -- Module : Data.HashTable -- Copyright : (c) The University of Glasgow 2003 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- An implementation of extensible hash tables, as described in -- Per-Ake Larson, /Dynamic Hash Tables/, CACM 31(4), April 1988, -- pp. 446--457. The implementation is also derived from the one -- in GHC's runtime system (@ghc\/rts\/Hash.{c,h}@). -- ----------------------------------------------------------------------------- module Data.HashTab ( -- * Basic hash table operations HashTable, new, insert, delete, lookup, update, -- * Converting to and from lists fromList, toList, -- * Hash functions -- $hash_functions hashInt, hashString, prime, -- * Diagnostics longestChain ) where -- This module is imported by Data.Typeable, which is pretty low down in the -- module hierarchy, so don't import "high-level" modules -- Right now we import high-level modules with gay abandon. import Prelude hiding ( lookup ) import Data.Tuple ( fst ) import Data.Bits import Data.Maybe import Data.List ( maximumBy, partition, concat, foldl ) import Data.Int ( Int32 ) import Data.Array.Base import Data.Array hiding (bounds) import Data.Array.IO import Data.Char ( ord ) import Data.IORef ( IORef, newIORef, readIORef, writeIORef ) import Control.Monad ( mapM, sequence_ ) ----------------------------------------------------------------------- readHTArray :: HTArray a -> Int32 -> IO a readMutArray :: MutArray a -> Int32 -> IO a writeMutArray :: MutArray a -> Int32 -> a -> IO () freezeArray :: MutArray a -> IO (HTArray a) thawArray :: HTArray a -> IO (MutArray a) newMutArray :: (Int32, Int32) -> a -> IO (MutArray a) #if defined(DEBUG) || defined(__NHC__) type MutArray a = IOArray Int32 a type HTArray a = MutArray a newMutArray = newArray readHTArray = readArray readMutArray = readArray writeMutArray = writeArray freezeArray = return thawArray = return #else type MutArray a = IOArray Int32 a type HTArray a = Array Int32 a newMutArray = newArray readHTArray arr i = return $! (unsafeAt arr (fromIntegral i)) readMutArray arr i = unsafeRead arr (fromIntegral i) writeMutArray arr i x = unsafeWrite arr (fromIntegral i) x freezeArray = unsafeFreeze thawArray = unsafeThaw #endif newtype HashTable key val = HashTable (IORef (HT key val)) -- TODO: the IORef should really be an MVar. data HT key val = HT { kcount :: !Int32, -- Total number of keys. buckets :: !(HTArray [(key,val)]), bmask :: !Int32, hash_fn :: key -> Int32, cmp :: key -> key -> Bool } -- ----------------------------------------------------------------------------- -- Sample hash functions -- $hash_functions -- -- This implementation of hash tables uses the low-order /n/ bits of the hash -- value for a key, where /n/ varies as the hash table grows. A good hash -- function therefore will give an even distribution regardless of /n/. -- -- If your keyspace is integrals such that the low-order bits between -- keys are highly variable, then you could get away with using 'id' -- as the hash function. -- -- We provide some sample hash functions for 'Int' and 'String' below. -- | A sample hash function for 'Int', implemented as simply @(x `mod` P)@ -- where P is a suitable prime (currently 1500007). Should give -- reasonable results for most distributions of 'Int' values, except -- when the keys are all multiples of the prime! -- hashInt :: Int -> Int32 hashInt = (`rem` prime) . fromIntegral -- | A sample hash function for 'String's. The implementation is: -- -- > hashString = fromIntegral . foldr f 0 -- > where f c m = ord c + (m * 128) `rem` 1500007 -- -- which seems to give reasonable results. -- hashString :: String -> Int32 hashString = fromIntegral . foldl f 0 where f m c = ord c + (m * 128) `rem` fromIntegral prime -- | A prime larger than the maximum hash table size prime :: Int32 prime = 1500007 -- ----------------------------------------------------------------------------- -- Parameters tABLE_MAX = 1024 * 1024 :: Int32 -- Maximum size of hash table #if tABLE_MIN #else tABLE_MIN = 16 :: Int32 hLOAD = 4 :: Int32 -- Maximum average load of a single hash bucket hYSTERESIS = 0 :: Int32 -- entries to ignore in load computation #endif {- Hysteresis favors long association-list-like behavior for small tables. -} -- ----------------------------------------------------------------------------- -- Creating a new hash table -- | Creates a new hash table. The following property should hold for the @eq@ -- and @hash@ functions passed to 'new': -- -- > eq A B => hash A == hash B -- new :: (key -> key -> Bool) -- ^ @eq@: An equality comparison on keys -> (key -> Int32) -- ^ @hash@: A hash function on keys -> IO (HashTable key val) -- ^ Returns: an empty hash table new cmpr hash = do -- make a new hash table with a single, empty, segment let mask = tABLE_MIN-1 bkts' <- newMutArray (0,mask) [] bkts <- freezeArray bkts' let kcnt = 0 ht = HT { buckets=bkts, kcount=kcnt, bmask=mask, hash_fn=hash, cmp=cmpr } table <- newIORef ht return (HashTable table) -- ----------------------------------------------------------------------------- -- Inserting a key\/value pair into the hash table -- | Inserts an key\/value mapping into the hash table. -- -- Note that 'insert' doesn't remove the old entry from the table - -- the behaviour is like an association list, where 'lookup' returns -- the most-recently-inserted mapping for a key in the table. The -- reason for this is to keep 'insert' as efficient as possible. If -- you need to update a mapping, then we provide 'update'. -- insert :: HashTable key val -> key -> val -> IO () insert (HashTable ref) key val = do table@HT{ kcount=k, buckets=bkts, bmask=b } <- readIORef ref let table1 = table{ kcount = k+1 } indx = bucketIndex table key bucket <- readHTArray bkts indx bkts' <- thawArray bkts writeMutArray bkts' indx ((key,val):bucket) freezeArray bkts' table2 <- if tooBig k b then expandHashTable table1 else return table1 writeIORef ref table2 tooBig :: Int32 -> Int32 -> Bool tooBig k b = k-hYSTERESIS > hLOAD * b bucketIndex :: HT key val -> key -> Int32 bucketIndex HT{ hash_fn=hash, bmask=mask } key = let h = hash key in (h .&. mask) expandHashTable :: HT key val -> IO (HT key val) expandHashTable table@HT{ buckets=bkts, bmask=mask } = do let oldsize = mask + 1 newmask = mask + mask + 1 newsize = newmask + 1 -- if newsize > tABLE_MAX then return table else do -- newbkts' <- newMutArray (0,newmask) [] let table'=table{ bmask=newmask } splitBucket oldindex = do bucket <- readHTArray bkts oldindex let (oldb,newb) = partition ((oldindex==).bucketIndex table' . fst) bucket writeMutArray newbkts' oldindex oldb writeMutArray newbkts' (oldindex + oldsize) newb mapM_ splitBucket [0..mask] newbkts <- freezeArray newbkts' return ( table'{ buckets=newbkts } ) -- ----------------------------------------------------------------------------- -- Deleting a mapping from the hash table -- Remove a key from a bucket deleteBucket :: (key -> Bool) -> [(key,val)] -> (Int32, [(key, val)]) deleteBucket _ [] = (0,[]) deleteBucket del (pair@(k,_):bucket) = case deleteBucket del bucket of (dels, bucket') | del k -> dels' `seq` (dels', bucket') | otherwise -> (dels, pair:bucket') where dels' = dels + 1 -- | Remove an entry from the hash table. delete :: HashTable key val -> key -> IO () delete (HashTable ref) key = do table@HT{ buckets=bkts, kcount=kcnt, cmp=cmpr } <- readIORef ref let indx = bucketIndex table key bkts' <- thawArray bkts bucket <- readMutArray bkts' indx let (removed,bucket') = deleteBucket (cmpr key) bucket writeMutArray bkts' indx bucket' freezeArray bkts' writeIORef ref ( table{kcount = kcnt - removed} ) -- ----------------------------------------------------------------------------- -- Updating a mapping in the hash table -- | Updates an entry in the hash table, returning 'True' if there was -- already an entry for this key, or 'False' otherwise. After 'update' -- there will always be exactly one entry for the given key in the table. -- -- 'insert' is more efficient than 'update' if you don't care about -- multiple entries, or you know for sure that multiple entries can't -- occur. However, 'update' is more efficient than 'delete' followed -- by 'insert'. update :: HashTable key val -> key -> val -> IO Bool update (HashTable ref) key val = do table@HT{ kcount=k, buckets=bkts, cmp=cmpr, bmask=b } <- readIORef ref let indx = bucketIndex table key bkts' <- thawArray bkts bucket <- readMutArray bkts' indx let (deleted,bucket') = deleteBucket (cmpr key) bucket k' = k + 1 - deleted table1 = table{ kcount=k' } writeMutArray bkts' indx ((key,val):bucket') freezeArray bkts' table2 <- if tooBig k' b -- off by one from insert's resize heuristic. then expandHashTable table1 else return table1 writeIORef ref table2 return (deleted>0) -- ----------------------------------------------------------------------------- -- Looking up an entry in the hash table -- | Looks up the value of a key in the hash table. lookup :: HashTable key val -> key -> IO (Maybe val) lookup (HashTable ref) key = do table@HT{ buckets=bkts, cmp=cmpr } <- readIORef ref let indx = bucketIndex table key bucket <- readHTArray bkts indx case [ val | (key',val) <- bucket, cmpr key key' ] of [] -> return Nothing (v:_) -> return (Just v) -- ----------------------------------------------------------------------------- -- Converting to/from lists -- | Convert a list of key\/value pairs into a hash table. Equality on keys -- is taken from the Eq instance for the key type. -- fromList :: (Eq key) => (key -> Int32) -> [(key,val)] -> IO (HashTable key val) fromList hash list = do table <- new (==) hash sequence_ [ insert table k v | (k,v) <- list ] return table -- | Converts a hash table to a list of key\/value pairs. -- toList :: (Ord key, Ord val) => HashTable key val -> IO [(key,val)] toList (HashTable ref) = do HT{ buckets=bkts, bmask=b } <- readIORef ref fmap concat (mapM (readHTArray bkts) [0..b]) -- ----------------------------------------------------------------------------- -- Diagnostics -- | This function is useful for determining whether your hash function -- is working well for your data set. It returns the longest chain -- of key\/value pairs in the hash table for which all the keys hash to -- the same bucket. If this chain is particularly long (say, longer -- than 10 elements), then it might be a good idea to try a different -- hash function. -- longestChain :: HashTable key val -> IO [(key,val)] longestChain (HashTable ref) = do HT{ buckets=bkts, bmask=b } <- readIORef ref let lengthCmp (_:x)(_:y) = lengthCmp x y lengthCmp [] [] = EQ lengthCmp [] _ = LT lengthCmp _ [] = GT fmap (maximumBy lengthCmp) (mapM (readHTArray bkts) [0..b])
urbanslug/ghc
testsuite/tests/programs/maessen-hashtab/Data/HashTab.hs
bsd-3-clause
11,368
46
23
2,272
2,572
1,394
1,178
-1
-1
{-# LANGUAGE FlexibleContexts #-} module Futhark.Actions ( printAction , interpretAction , impCodeGenAction , kernelImpCodeGenAction , rangeAction ) where import Control.Monad import Control.Monad.IO.Class import Data.List import Data.Maybe import Data.Word import qualified Data.Text as T import qualified Data.Text.IO as T import System.Exit (exitWith, ExitCode(..)) import System.IO import Prelude import Futhark.Pipeline import Futhark.Analysis.Alias import Futhark.Analysis.Range import Futhark.Representation.AST import Futhark.Representation.AST.Attributes.Aliases import Futhark.Representation.SOACS (SOACS) import Futhark.Representation.ExplicitMemory (ExplicitMemory) import Futhark.Interpreter import qualified Futhark.CodeGen.ImpGen.Sequential as ImpGenSequential import qualified Futhark.CodeGen.ImpGen.Kernels as ImpGenKernels import Futhark.Representation.AST.Attributes.Ranges (CanBeRanged) import Futhark.Util.Pretty (text, ppr, prettyDoc, prettyText, brackets, (<>)) printAction :: (Attributes lore, CanBeAliased (Op lore)) => Action lore printAction = Action { actionName = "Prettyprint" , actionDescription = "Prettyprint the resulting internal representation on standard output." , actionProcedure = liftIO . putStrLn . pretty . aliasAnalysis } interpretAction :: Show error => (FilePath -> T.Text -> Either error [Value]) -> Name -> Action SOACS interpretAction parser entry = Action { actionName = "Interpret" , actionDescription = "Run the program via an interpreter." , actionProcedure = liftIO . interpret parser entry } rangeAction :: (Attributes lore, CanBeRanged (Op lore)) => Action lore rangeAction = Action { actionName = "Range analysis" , actionDescription = "Print the program with range annotations added." , actionProcedure = liftIO . putStrLn . pretty . rangeAnalysis } impCodeGenAction :: Action ExplicitMemory impCodeGenAction = Action { actionName = "Compile imperative" , actionDescription = "Translate program into imperative IL and write it on standard output." , actionProcedure = \prog -> either (`internalError` prettyText prog) (liftIO . putStrLn . pretty) =<< ImpGenSequential.compileProg prog } kernelImpCodeGenAction :: Action ExplicitMemory kernelImpCodeGenAction = Action { actionName = "Compile imperative kernels" , actionDescription = "Translate program into imperative IL with kernels and write it on standard output." , actionProcedure = \prog -> either (`internalError` prettyText prog) (liftIO . putStrLn . pretty) =<< ImpGenKernels.compileProg prog } interpret :: Show error => (FilePath -> T.Text -> Either error [Value]) -> Name -> Prog SOACS -> IO () interpret parseValues entry prog = case funDefByName entry prog of Nothing -> do hPutStrLn stderr "Interpreter error: no main function." exitWith $ ExitFailure 2 Just fundef -> do parseres <- fmap (parseValues "<stdin>") T.getContents args <- case parseres of Left e -> do hPutStrLn stderr $ "Read error: " ++ show e exitWith $ ExitFailure 2 Right vs -> return vs case runFunWithShapes entry args prog of Left err -> do hPutStrLn stderr $ "Interpreter error:\n" ++ show err exitWith $ ExitFailure 2 Right val -> putStrLn $ ppOutput val $ fromMaybe (repeat TypeDirect) $ snd <$> funDefEntryPoint fundef where ppOutput vs epts = intercalate "\n" $ zipWith prettyRetVal epts vs prettyRetVal ept v = prettyDoc 80 $ ppArray (prettyType ept) (prettyPrim ept) v prettyPrim TypeUnsigned (IntValue (Int8Value v)) = text $ show (fromIntegral v :: Word8) ++ "u8" prettyPrim TypeUnsigned (IntValue (Int16Value v)) = text $ show (fromIntegral v :: Word16) ++ "u16" prettyPrim TypeUnsigned (IntValue (Int32Value v)) = text $ show (fromIntegral v :: Word32) ++ "u32" prettyPrim TypeUnsigned (IntValue (Int64Value v)) = text $ show (fromIntegral v :: Word64) ++ "u64" prettyPrim _ v = ppr v prettyType TypeUnsigned (Prim (IntType Int8)) = text "u8" prettyType TypeUnsigned (Prim (IntType Int16)) = text "u16" prettyType TypeUnsigned (Prim (IntType Int32)) = text "u32" prettyType TypeUnsigned (Prim (IntType Int64)) = text "u64" prettyType ept (Array et (Rank n) u) = ppr u <> mconcat (replicate n $ brackets mempty) <> prettyType ept (Prim et) prettyType _ t = ppr t
ihc/futhark
src/Futhark/Actions.hs
isc
4,933
0
18
1,334
1,267
670
597
102
13
-- | Compile and execute programs in Orchid language. module Orchid.Executor ( executeProgram , Optimization (..) , Optimizations ) where import Data.Bifunctor (first) import Data.String.Conversions (convertString) import Data.Text (Text) import System.FilePath ((</>)) import System.IO.Temp (withSystemTempDirectory) import Turtle (ExitCode (..), procStrict) import Orchid.Compiler (Optimization (..), Optimizations, compileStr) executeProgram :: Optimizations -> Text -> Text -> IO (Bool, Text) executeProgram optimizations programSource programInput = withSystemTempDirectory "patak" cb where cb dir = do compileStr optimizations programSource $ dir </> "a.ll" () <$ procStrict "llvm-as" [ "-o" , convertString $ dir </> "a.bc" , convertString $ dir </> "a.ll"] mempty first convertExitCode <$> (procStrict "lli" [convertString $ dir </> "a.ll"] $ pure programInput) convertExitCode ExitSuccess = True convertExitCode _ = False
gromakovsky/Orchid
src/Orchid/Executor.hs
mit
1,299
0
15
485
273
152
121
29
2
module Pyrec.CPS where import Data.Word import Pyrec.Misc data Name = Name String Unique | Gen Word deriving (Show, Eq, Ord) data Val = Var Name | Num Double | Str String | Cont Name Expr deriving (Show, Eq) data Expr = App Val [Val] (Val, Val) | Continue Val Val | Fix [Fun] Expr deriving (Show, Eq) data Fun = Fun Name [Name] (Name, Name) Expr deriving (Show, Eq)
kmicklas/pyrec
src/Pyrec/CPS.hs
mit
399
0
7
102
169
97
72
21
0
module Geometry ( sphereVolume , sphereArea , cubeVolume , cubeArea , cuboidArea , cuboidVolume ) where sphereVolume :: Floating a => a -> a sphereVolume r = 4/3 * pi * r^3 sphereArea :: Floating a => a -> a sphereArea r = 4 * pi * r^2 cubeVolume :: Num a => a -> a cubeVolume a = cuboidVolume a a a cubeArea :: Num a => a -> a cubeArea a = cuboidArea a a a cuboidVolume :: Num a => a -> a -> a -> a cuboidVolume a b c = rectangleArea a b *c cuboidArea :: Num a => a -> a -> a -> a cuboidArea a b c = 2 * rectangleArea a b + 2 * rectangleArea b c + 2 * rectangleArea a c rectangleArea :: Num a => a -> a -> a rectangleArea a b = a * b
RAFIRAF/HASKELL
Geometry.hs
mit
655
0
10
170
312
157
155
21
1
module Euler.E20 where import Data.Char euler20 :: Integer -> Int euler20 n = sum $ map digitToInt $ show $ product [1..n] main :: IO () main = print $ euler20 100
D4r1/project-euler
Euler/E20.hs
mit
166
0
8
34
73
38
35
6
1
module Text.Serialization.Grammar where import Control.Exception.Base import Data.List as List import Data.Map as Map import Text.Parser as Parser import Text.Scanner as Scanner import qualified Text.Serialization.Tokens as Tokens begin_tree = (terminalParser Tokens.begin_tree) tree = (sequenceParser [begin_tree, identifier, expressions, end_tree]) end_tree = (terminalParser Tokens.end_tree) string = (terminalParser Tokens.string) identifier = (terminalParser Tokens.identifier) expression = (productionParser [ tree, string, identifier]) tree_case = 0 :: Int string_case = 1 :: Int identifier_case = 2 :: Int expressions = (listParser expression) parse :: String -> (ParseTree, [ParseError]) parse = \input -> let (tokens, scan_errors) = (Tokens.scan input) in (assert (List.null scan_errors) (Parser.parse expressions tokens))
stevedonnelly/haskell
code/Text/Serialization/Grammar.hs
mit
864
0
12
129
260
154
106
24
1
{- Copyright (c) 2015 Nils 'bash0r' Jonsson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -} {- | Module : $Header$ Description : A type class defining basic functionality for fully qualified names. Author : Nils 'bash0r' Jonsson Copyright : (c) 2015 Nils 'bash0r' Jonsson License : MIT Maintainer : [email protected] Stability : unstable Portability : non-portable (Portability is untested.) A type class defining basic functionalityp for fully qualified names. -} module Language.Transformation.Semantics.FullyQualifiedName ( FullyQualifiedName (..) ) where -- | Functionality for handling fully qualified names. class (Eq a) => FullyQualifiedName a where -- | The fully qualified name. fullyQualified :: a -> a -- | The namespace of the identifier. namespace :: a -> a -- | The name of the value. name :: a -> a
project-horizon/framework
src/lib/Language/Transformation/Semantics/FullyQualifiedName.hs
mit
1,854
0
7
343
68
42
26
6
0
module Settings.StaticFiles where import qualified Yesod.Static as Static import Settings (appStaticDir, compileTimeAppSettings) -- | use this to create your static file serving site -- staticSite :: IO Static.Static -- staticSite = if development then Static.staticDevel staticDir -- else Static.static staticDir -- | This generates easy references to files in the static directory at compile time, -- giving you compile-time verification that referenced files exist. -- Warning: any files added to your static directory during run-time can't be -- accessed this way. You'll have to use their FilePath or URL to access them. Static.staticFiles (appStaticDir compileTimeAppSettings)
collaborare/antikythera
src/Settings/StaticFiles.hs
mit
725
0
7
137
47
31
16
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-| Module : Test.Printer Description : The Printer tests Copyright : (c) Andrew Burnett 2014-2015 Maintainer : [email protected] Stability : experimental Portability : Unknown Contains the tests for the Printer type. -} module Test.Printer ( tests -- :: TestTree ) where import HSat.Printer import TestUtils name :: String name = "Printer" tests :: TestTree tests = testGroup name [ testGroup "errorDoc" [ errorDocTest1 ] ] instance Arbitrary PrinterType where arbitrary = oneof $ map return [Compact,NoUnicode,Unicode] instance Arbitrary Doc where arbitrary = fmap text arbitrary errorDocTest1 :: TestTree errorDocTest1 = testProperty "errorDoc prepends correct string" $ property (\(printerType,doc) -> let exptd = case printerType of Compact -> "ERR" <> colon <+> doc NoUnicode -> "Error" <> colon <+> doc Unicode -> red $ "Error" <> colon <+> doc gotten = errorDoc printerType doc in show exptd === show gotten --need to show as no Eq constraint )
aburnett88/HSat
tests-src/Test/Printer.hs
mit
1,114
0
18
271
223
120
103
28
3
module Cycles.IO where import Cycles.Aux import Cycles.Findcy import Cycles.Maxcy import Control.Exception ( SomeException, throwIO, catch ) import Control.Monad ( liftM, Monad((>>=), return), when, mapM_, mapM, filterM ) import Data.Char (ord) import System.Directory ( removeFile, removeDirectoryRecursive, getDirectoryContents ) import System.IO ( IO, FilePath, IOMode(WriteMode), IOMode(ReadMode), readFile, putStrLn, hPutStr, openFile, hClose ) import System.IO.Error ( isDoesNotExistError ) import System.Process ( readProcess ) -- | This function allows one to remove a file with included exception handling removeIfExists :: FilePath -> IO () removeIfExists fileName = removeFile fileName `catch` handleExists where handleExists e | isDoesNotExistError e = return () | otherwise = throwIO e -- | Like 'removeIfExists', this function allows one to remove a file with included exception handling removeDirIfExists :: FilePath -> IO () removeDirIfExists foldername = removeDirectoryRecursive foldername `catch` handleExists where handleExists e | isDoesNotExistError e = return () | otherwise = throwIO e -- | This function only prints a line when its length is > 2. -- It's useful, because without it, many of these function print blank lines when there are no compile/run errors. putStrLongLn :: String -> IO () putStrLongLn string = when (length string > 2) (putStrLn string) ------------------------------------------------------------------------------------------------------------------------------------------------------------ ------------------------------------------------------------------------------------------------------------------------------------------------------------ -- The following is wrapping together the two code-generators so that: -- compilation may be automatic (at least for findcy) -- can automatically pipe graph -> findcy -> cycles -> maxcy -> (compile/run) -> collect results ------------------------------------------------------------------------------------------------------------------------------------------------------------ ------------------------------------------------------------------------------------------------------------------------------------------------------------ -- | Note: the string is presumed to be in little endian order readInt :: String -> Int readInt str = sum $ zipWith (\s p ->(ord s - 48) * 10^p) str [0..] -- | Note: numStr is in little endian order readIntList' :: String -> String -> [Int] readIntList' str numStr | str == "[]" = [] | null str = [] | str0 == ']' = [readInt numStr] | str0 == '[' = readIntList' (tail str) numStr | str0 == ',' = readInt numStr : readIntList' (tail str) "" | otherwise = readIntList' (tail str) (str0 : numStr) where str0 = head str readIntList :: String -> [Int] readIntList str = readIntList' str "" -- | This allows one to easily use monadic values for if-then ifElseError :: Bool -> t -> t ifElseError bool thn = if bool then thn else error "Output file unfinished or unmatched to graph." -- | This function takes a graph and whether it is a digraph and returns the number of directed cycles in it graphToNumCycles :: [[Int]] -> Bool -> IO Int graphToNumCycles graphlist directed = do removeIfExists "countcy_temp.c" removeIfExists "countcy_temp" let tempfilename = "countcy_temp.c" let code = generateFindCyCode graphlist tempfilename True directed cfile <- openFile "countcy_temp.c" WriteMode write <- hPutStr cfile code hClose cfile comp_results <- readProcess "gcc" ["countcy_temp.c", "-O3", "-o", "countcy_temp"] [] -- when ((length comp_results) > 1) (putStrLongLn comp_results) putStrLongLn comp_results run_results <- readProcess "./countcy_temp" [] [] let numcy = read run_results :: Int -- when ((length run_results) > 12) (putStrLn ("results:" ++ run_results)) -- magic number 12 is one less than the shortest c error I found after little checking -- putStrLongLn ("results: " ++ run_results) return numcy -- | This function takes an output from the FindCy C code, performs a basic check ('is there a "done" mark?'), -- makes sure the output agrees with the expected graph, and reads the cycles into an [Int] processCycles :: String -> [[Int]] -> [[Int]] processCycles cycles_string graph = ifElseError good ((map read_cy . trim . lines) cycles_string) where good = (&&) finished good_graph finished = (eq_done . last . lines) cycles_string eq_done s = "DONE." == s good_graph = (eq_graph graph . head . lines) cycles_string eq_graph graph s = show graph == s read_cy cy = trimList $ readIntList cy --read cy -- | This function takes a graph and whether it is a digraph and returns a list of all directed cycles (somewhat slow, because it uses Haskell's read function) graphToCycles :: [[Int]] -> Bool -> IO [[Int]] graphToCycles graphlist directed = do removeIfExists "findcy_temp.c" removeIfExists "findcy_temp" removeIfExists "findcy_temp.txt" let tempfilename = "findcy_temp.c" let code = generateFindCyCode graphlist tempfilename False directed cfile <- openFile "findcy_temp.c" WriteMode write <- hPutStr cfile code hClose cfile comp_results <- readProcess "gcc" ["findcy_temp.c", "-O3", "-o", "findcy_temp"] [] putStrLongLn comp_results run_results <- readProcess "./findcy_temp" [] [] putStrLongLn run_results cycles_str <- catch (readFile "findcy_temp.txt") handler let cycles = processCycles cycles_str graphlist return cycles where handler :: SomeException -> IO String handler _ = error "The results have disappeared under my nose." -- | This function takes a graph and automatically finds all cycles to output a string containing the C code to compute its maximally cyclic orientations graphToMaxcyCode :: [[Int]] -> Int -> String -> String -> IO String graphToMaxcyCode graphlist splitbits foldername filename = do -- putStrLn "1" cycles <- graphToCycles graphlist False -- putStrLn "2" let endhere = splitbits -- putStrLn "3" let startmap start = (show start, (fst $ generateMaxCyCodeAtStart graphlist cycles endhere splitbits filename) start) --let startmap = \start ->liftM (\cy -> (show start, fst $ generateMaxCyCode graphlist cy start endhere splitbits)) cycles let codelist = map startmap [0..(2^splitbits)-1] :: [(String, String)] removeDirIfExists foldername mkdir_results <- readProcess "mkdir" [foldername] [] -- putStrLn "4" --writeC (head codelist) mapM_ (writeC . return) codelist return "Done." where -- writeC :: IO (String, String) ->IO (IO ()) writeC :: IO (String, String) -> IO String writeC input = do start <- liftM fst input --input >>= return . fst -- :: [Char] code <- liftM snd input --input >>= return . snd -- :: [Char] -- startline <- return $ (\st ->"start:" ++ st) start -- putStrLn startline let maxfilenum = 2^splitbits -- putStrLn "5" let outfilenamefun s = foldername ++ "/" ++ filename ++ "_" ++ s ++ "_" ++ show (maxfilenum - 1) ++ ".c" -- putStrLn "6" let outfilename = outfilenamefun start -- outfilenameline <- return $ (\ofn ->"7\n" ++ ofn) outfilename -- putStrLn outfilenameline outfile <- (`openFile` WriteMode) outfilename -- putStrLn "8" hPutStr outfile code -- putStrLn "9" hClose outfile return start -- | This function does exactly what it says, for '.c' files compileAllInDir :: String -> IO () compileAllInDir dir = do files <- getDirectoryContents dir let cFile file = (last file == 'c') && (last (init file) == '.') let code_files = filter cFile files let compile file = readProcess "gcc" [dir ++ "/" ++ file, "-O3", "-o", file ++ "_temp"] [] >>= putStrLn mapM_ compile code_files -- | This function runs all the files in a given directory, ascertained by those filenames that do not include a '.' runAllInDir :: String -> IO () runAllInDir dir = do files <- getDirectoryContents dir let exec_files = filter execFile files let run file = readProcess ("./" ++ file) [] [] >>= putStrLn mapM_ run exec_files where execFile name | null name = True | last name == '.' = False | otherwise = execFile $ init name
michaeljklein/HCy2C
Cycles/IO.hs
mit
8,478
0
18
1,718
1,871
937
934
126
2
module Main where import Data.Foldable (foldl', traverse_) import Data.List (sort) import Data.Maybe (mapMaybe) data OpenClose = Open | Close deriving Eq data Item = Paren | Bracket | Brace | Angle deriving (Eq, Enum) cToPair :: Char -> (OpenClose, Item) cToPair '(' = (Open, Paren) cToPair ')' = (Close, Paren) cToPair '[' = (Open, Bracket) cToPair ']' = (Close, Bracket) cToPair '{' = (Open, Brace) cToPair '}' = (Close, Brace) cToPair '<' = (Open, Angle) cToPair '>' = (Close, Angle) cToPair c = error $ "Unexpected Character: " ++ show c solve :: ([Int] -> Int) -- score -> Maybe Int -- complete -> ([Item] -> Maybe Int) -- incomplete -> (Item -> Maybe Int) -- corrupted -> String -- input -> Int -- output solve score complete incomplete corrupted = score . mapMaybe (go [] . fmap cToPair) . lines where go [] [] = complete go opens [] = incomplete opens go [] ((Close, x) : _) = corrupted x go (x : opens) ((Close, y) : pairs) | x == y = go opens pairs | otherwise = corrupted y go opens ((Open, x) : pairs) = go (x : opens) pairs part1 :: String -> Int part1 = solve sum Nothing (const Nothing) (Just . toInt) where toInt = ([3, 57, 1197, 25137] !!) . fromEnum part2 :: String -> Int part2 = solve (median . sort) Nothing (Just . toInt) (const Nothing) where median [] = error "Empty list in `median`" median [n] = n median [_, _] = error "They promised us only lists of odd length!" median ns = median . tail $ init ns toInt = foldl' (\n item -> 5 * n + 1 + fromEnum item) 0 main :: IO () main = do input <- readFile "input.txt" traverse_ (print . ($ input)) [part1, part2]
genos/online_problems
advent_of_code_2021/day10/Main.hs
mit
1,732
0
12
455
734
397
337
45
5
{-# LANGUAGE PackageImports #-} {-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-} -- | Reexports "Data.Functor.Identity.Compat" -- from a globally unique namespace. module Data.Functor.Identity.Compat.Repl.Batteries ( module Data.Functor.Identity.Compat ) where import "this" Data.Functor.Identity.Compat
haskell-compat/base-compat
base-compat-batteries/src/Data/Functor/Identity/Compat/Repl/Batteries.hs
mit
326
0
5
31
32
25
7
5
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE Strict #-} module Model.Camera ( Camera (..) , viewMatrix , CState (..) , initCamera, lookAtState , scroll, dragHorizontal, dragVertical, rotateCentered, twoFingerControl , dragObject, rotateObject, twoFingerObject , makeProjM ) where import Data.Fixed (mod') import JavaScript.JSON.Types.Instances import JavaScript.JSON.Types.Generic () import GHC.Generics import Commons.NoReflex.EasyTensorJSFFI () import Numeric.DataFrame import Numeric.Dimensions import qualified Numeric.Matrix as Matrix import qualified Numeric.Quaternion as Q ---------------------------------------------------------------------------------------------- -- Definitions ------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------- -- | Object-Centered Camera data Camera = Camera { viewportSize :: !(Float, Float) , clippingDist :: !Float , zoomLimits :: !(Float, Float) , projMatrix :: !Mat44f , oldState :: !CState -- ^ This state changes at the end of user action, e.g. PointerUp or MouseWheel. -- At such moments newState is copied to oldState , newState :: !CState -- ^ This state changes all the time and represents current state of the camera. -- Even if a mouse in drag state, newState keeps being updated. } deriving Show viewMatrix :: Camera -> Mat44f viewMatrix = stateToView . newState -- | State of the Camera data CState = CState { viewPoint :: !Vec3f, viewAngles :: !(Float, Float), viewDist :: !Float } deriving (Eq, Show, Generic) instance FromJSON CState instance ToJSON CState -- | Calculate the state base on the camera position and lookAt position lookAtState :: (Vec3f, Vec3f) -> CState lookAtState (camP, lookAtP) | l < 0.01 = CState { viewPoint = vec3 (-2) 3 0 , viewAngles = (2.745, 0.995) , viewDist = 668 } | otherwise = CState { viewPoint = lookAtP , viewAngles = (atan2 y x, asin $ z / l) , viewDist = l } where l = unScalar $ normL2 v v = camP - lookAtP (x, y, z) = unpackV3 v -- | Create camera initCamera :: Float -- ^ width of the viewport -> Float -- ^ height of the viewport -> (Float, Float) -- ^ minimum and maximum zoom distances -> Float -- ^ clipping distance -> CState -- ^ look position and direction -> Camera initCamera width height zl clippingD state = Camera { viewportSize = (width,height) , clippingDist = clippingD , zoomLimits = zl , projMatrix = makeProjM clippingD (width,height) , oldState = state , newState = state } makeProjM :: Float -> (Float, Float) -> Mat44f makeProjM f (width, height) = Matrix.perspective n f fovy ratio where ratio = width / height fovy = (1*) . atan2 height . sqrt $ height*height + width*width n = nearPlane f relativeToOldPoint :: Camera -> Camera relativeToOldPoint cam@Camera{oldState = os, newState = ns} = cam { oldState = os {viewPoint = 0} , newState = ns {viewPoint = viewPoint ns - viewPoint os} } nearPlane :: Float -> Float nearPlane = (/500) ---------------------------------------------------------------------------------------------- -- Camera convertions ------------------------------------------------------------------------ ---------------------------------------------------------------------------------------------- -- | Create view matrix out of camera state stateToView :: CState -> Mat44f stateToView CState { viewPoint = v, viewAngles = (φ, theta), viewDist = ρ } = Matrix.lookAt (vec3 0 0 1) (v + dv) v where dv = vec3 (t * cos φ) (t * sin φ) (ρ * sin theta) t = ρ * cos theta --eyePos :: CState -> Vec3f --eyePos CState { -- viewPoint = v, -- viewAngles = (φ, theta), -- viewDist = ρ -- } = v + dv -- where -- dv = vec3 (t * cos φ) (t * sin φ) (ρ * sin theta) -- t = ρ * cos theta -- | Camera position in NDC coordinates as per -- https://en.wikibooks.org/wiki/GLSL_Programming/Vertex_Transformations#Viewport_Transformation eyeNDC :: Camera -> Vec4f eyeNDC Camera { clippingDist = f} = vec4 0 0 (- (f+n)/(f-n)) 1 where n = nearPlane f -- | Assume the pointer is on the far clipping plane; -- thus, z coordinate is 1. -- The position of a pointer is given in screen coorinates (pixels) ptrNDC :: Camera -> Vec2f -> Vec4f ptrNDC Camera { viewportSize = (width, height) } p | (px, py) <- unpackV2 p = vec4 (2 * px / width - 1) (1 - 2 * py / height) 1 1 ---------------------------------------------------------------------------------------------- -- Camera movement functions ----------------------------------------------------------------- ---------------------------------------------------------------------------------------------- -- | Dragging - pan world on xy plane (e.g. using left mouse button) dragHorizontal :: Vec2f -- ^ Old screen coordinates -> Vec2f -- ^ New screen coordinates -> Camera -- ^ Modify the camera state -> CState dragHorizontal oldPoint newPoint camera@Camera { oldState = ostate@CState { viewPoint = vp } } = ostate { viewPoint = vp - proj newPoint + proj oldPoint } where proj = screenToWorld cam' (vp ! 3) cam' = relativeToOldPoint camera -- | Dragging - pan world on a plane parallel to screen dragVertical :: Vec2f -- ^ Old screen coordinates -> Vec2f -- ^ New screen coordinates -> Camera -- ^ Modify the camera state -> CState dragVertical op np cam@Camera { projMatrix = projmat, oldState = ostate@CState { viewPoint = v, viewDist = ρ } } = ostate { viewPoint = v + dv } where imat = inverse (projmat %* stateToView ostate) ndcOp = ptrNDC cam op ndcNp = ptrNDC cam np worldOfar = fromHom $ imat %* ndcOp worldNfar = fromHom $ imat %* ndcNp c = fromScalar $ scalar ρ / (3:!Z !. worldOfar) dv = (worldNfar - worldOfar) * c -- | Rotating around viewPoint rotateCentered :: Vec2f -- ^ Old screen coordinates -> Vec2f -- ^ New screen coordinates -> Camera -- ^ Modify the camera state -> CState rotateCentered (unpackV2 -> (ox,oy) ) (unpackV2 -> (x,y)) Camera { viewportSize = (width, height), oldState = ostate@CState { viewAngles = (φ, theta) } } = ostate { viewAngles = (φ', theta') } where dφ = 2*pi*(ox-x) / width dtheta = pi*(y-oy) / height φ' = mod' (φ+dφ+pi) (2*pi) - pi theta' = max (-0.35*pi) . min (0.45*pi) $ theta + dtheta -- | Scroll camera in and out scroll :: Float -- ^ Scrolling amout in fractions (i.e. `dist := dist*(1+x)`) -> Camera -- ^ Modify the camera state -> CState scroll s Camera { oldState = ostate@CState { viewDist = ρ } , zoomLimits = (zn, zf) } = ostate { viewDist = min zf . max zn $ ρ * (1 + min x (max (- min 0.8 x) s)) } where x = 8 / (1 + ρ/20) -- | Rotate, scale, and pan camera with two fingers twoFingerControl :: (Vec2f, Vec2f) -- ^ Old screen coordinates -> (Vec2f, Vec2f) -- ^ New screen coordinates -> Camera -- ^ Modify the camera state -> CState twoFingerControl (op1,op2) (np1,np2) cam@Camera { oldState = ostate@CState { viewPoint = ovp@((! 3) -> h), viewAngles = (φ, theta), viewDist = ρ }, zoomLimits = (zn, zf) } = ostate { viewPoint = nvp, viewAngles = (φ', theta), viewDist = min zf $ max zn (ρ * dlen) } where proj = screenToWorld cam h up = vec3 0 0 1 worldO1 = proj op1 worldO2 = proj op2 worldN1 = proj np1 worldN2 = proj np2 dOld = worldO2 - worldO1 dNew = worldN2 - worldN1 worldN = 0.5 * (worldN1 + worldN2) worldO = 0.5 * (worldO1 + worldO2) qs = Q.getRotScale dNew dOld -- scaling dlen = sqrt $ Q.square qs -- rotating dφ = let da = signum (unScalar . dot up $ Q.imVec qs) * Q.qArg qs in if abs da < 0.05 then 0 else da φ' = mod' (φ+dφ+pi) (2*pi) - pi -- panning -- combine actions nvp = Q.rotScale qs (ovp - worldN) + worldO ---------------------------------------------------------------------------------------------- -- Object movement functions ----------------------------------------------------------------- ---------------------------------------------------------------------------------------------- -- | Dragging - pan object on xy plane (e.g. using left mouse button) dragObject :: Vec2f -- ^ Old screen coordinates -> Vec2f -- ^ New screen coordinates -> Camera -- ^ Get matrices -> Mat44f -- ^ transformation matrix dragObject oldScreenPos newScreenPos camera = Matrix.translate3 $ proj newScreenPos - proj oldScreenPos where proj = screenToWorld cam' 0 cam' = relativeToOldPoint camera -- | Rotating - rotate object on w.r.t. z axis (e.g. using right mouse button) rotateObject :: Vec2f -- ^ Old screen coordinates -> Vec2f -- ^ New screen coordinates -> Camera -- ^ Get matrices -> Vec3f -- ^ World position of rotation center -> Mat44f -- ^ transformation matrix rotateObject oldScreenPos newScreenPos camera p = trans %* Matrix.rotateZ a %* trans' where trans = Matrix.translate3 p trans' = Matrix.translate3 ( - p ) proj = screenToWorld camera 0 op = proj oldScreenPos np = proj newScreenPos -- rotation angle dv1 = unit . update (3:!Z) (0 :: Scf) $ np - p dv0 = unit . update (3:!Z) (0 :: Scf) $ op - p a = unScalar $ atan2 (3 !. cross dv0 dv1) (dot dv0 dv1) -- | Rotate and pan with two fingers twoFingerObject :: (Vec2f, Vec2f) -- ^ Old screen coordinates -> (Vec2f, Vec2f) -- ^ New screen coordinates -> Camera -- ^ Get matrices -> Mat44f -- ^ transformation matrix twoFingerObject (oScreenPos1, oScreenPos2) (nScreenPos1, nScreenPos2) camera = Matrix.translate3 newPoint %* Q.toMatrix44 rotq %* Matrix.translate3 (-oldPoint) where proj = screenToWorld camera 0 oldPoint = (op1 + op2) / 2 newPoint = (np1 + np2) / 2 op1 = proj oScreenPos1 op2 = proj oScreenPos2 np1 = proj nScreenPos1 np2 = proj nScreenPos2 rotq = signum $ Q.getRotScale (op2 - op1) (np2 - np1) ---------------------------------------------------------------------------------------------- -- Helpers ---------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------- -- | Transform a point in screen coordinates into a point on a ground in world coordinates. -- z coordinate of the result is given as an argument to the function. -- -- Use this function partially applied if you need a projection several times. -- -- Note: -- -- The formulae are taken from -- https://en.wikibooks.org/wiki/GLSL_Programming/Vertex_Transformations#Viewport_Transformation -- -- Visible objects in NDC coordinates are always in MinMax (vec4 (-1) (-1) (-1) 1) (vec4 1 1 1 1). -- It is important that z coordinates varies from -1 (near clipping plane) to 1 (far clipping plane). -- -- Camera (eye) position in NDC is slightly behind the near clipping plane @vec4 0 0 (- (f+n)/(f-n)) 1@. screenToWorld :: Camera -> Scf -> Vec2f -> Vec3f screenToWorld camera z = imat `seq` campos `seq` f where imat = inverse $ projMatrix camera %* stateToView (oldState camera) campos = fromHom $ imat %* eyeNDC camera f p = findPos campos (fromHom (imat %* ptrNDC camera p) - campos) z -- | find position of the intersection of a ray traced from camera point to ground findPos :: Vec3f -- ^ camera position -> Vec3f -- ^ camera sight vector -> Scf -- ^ height level of the point -> Vec3f -- ^ position of the point in 3D findPos (unpackV3 -> (c1, c2, c3)) (unpackV3 -> (v1, v2, v3)) h = vec3 x y (unScalar h) where l = (unScalar h - c3)/v3' x = c1 + v1*l y = c2 + v2*l v3' = if abs v3 < 0.0000000001 then signum v3 * 0.0000000001 else v3 unit :: Vec3f -> Vec3f unit v | n == 0 = 0 | otherwise = ewmap (/n) v where n = normL2 v
achirkin/qua-view
src/Model/Camera.hs
mit
13,197
0
17
3,648
2,989
1,682
1,307
252
2
{-# INCLUDE "klee/klee.h" #-} {-# LANGUAGE ForeignFunctionInterface, MagicHash, BangPatterns #-} module Test.Scher.Klee.Pure ( range , int , integer , M , run ) where import Foreign.C import Foreign.C.String import Foreign.C.Types import System.IO.Unsafe foreign import capi "klee/klee.h klee_range" c_klee_range :: Int -> Int -> CString -> Int foreign import capi "klee/klee.h klee_int" c_klee_int :: CString -> Int foreign import capi "extras.h klee_intmax_t" c_klee_integer :: CString -> Integer range :: Int -> Int -> String -> M Int range !lo !hi name = M $ let c_name = unsafePerformIO $ newCString name in c_name `seq` c_klee_range lo hi c_name int :: String -> M Int int !name = M $ let c_name = unsafePerformIO $ newCString name in c_name `seq` c_klee_int c_name integer :: String -> M Integer integer !name = M $ let c_name = unsafePerformIO $ newCString name in c_name `seq` c_klee_integer c_name newtype M a = M { runIdentity :: a } instance Functor M where f `fmap` a = M $ f $ runIdentity a instance Monad M where return = M a >>= f = f $ runIdentity a run :: M a -> IO a run = return . runIdentity
m-alvarez/scher
Test/Scher/Klee/Pure.hs
mit
1,156
0
11
237
380
201
179
-1
-1
import Prelude hiding (gcd,lcm) import Data.List (foldl') gcd a b = if b == 0 then a else gcd b (a `mod` b) lcm a b = (a * b) `div` (gcd a b) solution = foldl' lcm 1 [1..20] main = do print solution
drcabana/euler-fp
source/hs/P05.hs
epl-1.0
216
0
8
62
118
65
53
9
2
module Main where import Control.Applicative import Data.List (sort) import System.IO (print) main = print $ answer 1 answer n = if sameDigits n then n else answer (n+1) sameDigits :: Int -> Bool sameDigits n = and $ map (((==) . sort . show) n) $ map (sort . show) ((*) <$> [n] <*> [2..6])
t00n/ProjectEuler
Euler/Euler52/main.hs
epl-1.0
369
0
12
133
150
83
67
10
2
{-# LANGUAGE DeriveFunctor #-} module Threal.ThrealM where import Threal.Base import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as M import Control.Monad.Free import Control.Monad.Reader (Reader, ask) import Control.Monad.State (State, get, gets, put, runState, evalState, StateT, liftIO, evalStateT) import Control.Monad (when, mapM, mapM_) data Comps = Comps { redComp :: Threal -> Threal -> Bool , greenComp :: Threal -> Threal -> Bool , blueComp :: Threal -> Threal -> Bool } data ThrealConfig = ThrealConfig { threalComps :: Comps } data ThrealCache = ThrealCache { dominatedCache :: HashMap Threal Threal , reversedCache :: HashMap Threal Threal , uniqueCache :: HashMap Threal Threal } type ThrealM a = StateT ThrealCache (ReaderT IO ThrealConfig) a writeLine :: String -> ThrealM a writeLine = liftIO . putStrLn getComps :: ThrealM Comps getComps = threalComps <$> lift ask getDominated :: Threal -> ThrealM a -> ThrealM Threal getDominated x = do cache <- gets dominatedCache return $ M.lookup x cache getReversed :: Threal -> ThrealM a -> ThrealM Threal getReversed x = do cache <- gets reversedCache return $ M.lookup x cache getUnique :: Threal -> ThrealM a -> ThrealM Threal getUnique x = do cache <- gets uniqueCache return $ M.lookup x cache data ThrealCalc = AddT ThrealCalc ThrealCalc | NubT ThrealCalc | DominateT ThrealCalc | ReverseT ThrealCalc | PureT Threal data ThrealF next = PerformCalc ThrealCalc next | PrintCalc next | Stop deriving (Functor) nubComps :: Comps -> [Threal] -> [Threal] nubComps c ts = nubBy eqC ts where eqC t s = (redComp c) t s && (greenComp c) t s && (blueComp c) t s uniqueFields :: Comps -> Threal -> Threal uniqueFields c (Threal r g b) = Threal (nubC r) (nubC g) (nubC b) where nubC = nubComps c completelyUnique :: Comps -> Threal -> Threal completelyUnique c (Threal r g b) = Threal (nubC (map cuc r)) (nubC (map cuc g)) (nubC (map cuc b)) where cuc = completelyUnique c nubC = nubComps c dominate :: Comps -> Threal -> Threal dominate c x = evalState (dominateState c x) M.empty dominateState :: Monad m => Comps -> Threal -> StateT (HashMap Threal Threal) m Threal dominateState c x = do cache <- get case M.lookup x cache of Just y -> return y Nothing -> do let (Threal uniqueR uniqueG uniqueB) = uniqueFields c x (dR, redState) = runState (traverse dsc uniqueR) cache (dG, greenState) = runState (traverse dsc uniqueG) redState (dB, blueState) = runState (traverse dsc uniqueB) greenState let redOpts = dR `seq` filter (noneLess (redComp c) dR) dR greenOpts = dG `seq` filter (noneLess (greenComp c) dG) dG blueOpts = dB `seq` filter (noneLess (blueComp c) dB) dB let res = Threal redOpts greenOpts blueOpts updatedCache = M.insert x res blueState put updatedCache return res where noneLess comp l a = none (a `comp`) (l \\ [a]) dsc = dominateState c reversible :: Comps -> Threal -> Threal reversible c x = evalState (reversibleState c x) M.empty reversibleState :: Monad m => Comps -> Threal -> StateT (HashMap Threal Threal) m Threal reversibleState c x = do cache <- get case M.lookup x cache of Just y -> return y Nothing -> do let (Threal uniqueR uniqueG uniqueB) = uniqueFields c x (rR, redState) = runState (traverse rsc uniqueR) cache (rG, greenState) = runState (traverse rsc uniqueG) redState (rB, blueState) = runState (traverse rsc uniqueB) greenState rComp r@(Threal rr rg rb) | not $ null gbBetter = concatMap redPart gbBetter | otherwise = [r] where --gbBetter = filter ((redComp c) x) (rg++rb) gbBetter = filter (\z -> greenComp c z x && blueComp c z x && redComp c x z) (rg++rb) gComp g@(Threal gr gg gb) | not $ null brBetter = concatMap greenPart brBetter | otherwise = [g] where -- brBetter = filter ((greenComp c) x) (gb++gr) brBetter = filter (\z -> blueComp c z x && redComp c z x && greenComp c x z) (gb++gr) bComp b@(Threal br bg bb) | not $ null rgBetter = concatMap bluePart rgBetter | otherwise = [b] where --rgBetter = filter ((blueComp c) x) (br++bg) rgBetter = filter (\z -> redComp c z x && greenComp c z x && blueComp c x z) (br++bg) redOpts = rR `seq` concatMap rComp rR greenOpts = rG `seq` concatMap gComp rG blueOpts = rB `seq` concatMap bComp rB res = Threal redOpts greenOpts blueOpts updatedCache = M.insert x res blueState put updatedCache return res where rsc = reversibleState c simplify :: Comps -> Threal -> Threal simplify c x = completelyUnique c x add :: Comps -> Threal -> Threal -> Threal add c x y = simplifyC c $ threalAdd x y
nomicflux/threals
src/Threal/ThrealM.hs
gpl-2.0
5,193
5
28
1,505
1,737
890
847
110
2
{-# LANGUAGE DoAndIfThenElse , OverloadedStrings , RecordWildCards #-} module CreateSendAPI.V3.PagedResult where import Control.Applicative ((<$>), (<*>)) import Data.Aeson (FromJSON, (.:)) import qualified Data.Aeson as JSON import Data.Default (Default (..)) import Data.Time.Calendar (Day, showGregorian) import qualified Data.Vector as V import qualified CreateSendAPI.V3.Subscriber as Subscriber -- -- Data Types: -- data OrderDirection = Asc | Desc deriving (Show, Eq) data SubscriberResultsPage = SubscriberResultsPage { subscriberResults :: V.Vector Subscriber.Details , subscriberResultsOrderedBy :: Subscriber.OrderField , subscriberOrderDirection :: OrderDirection , subscriberPageNumber :: Integer , subscriberPageSize :: Integer , subscriberRecordsOnThisPage :: Integer , subscriberTotalNumberOfRecords :: Integer , subscriberNumberOfPages :: Integer } deriving (Show, Eq) data SubscriberQueryParams = SubscriberQueryParams { subscriberGetFromDate :: Maybe Day , subscriberGetPageNumber :: Integer , subscriberGetPageSize :: Integer , subscriberGetOrderField :: Subscriber.OrderField , subscriberGetOrderDirection :: OrderDirection } deriving (Show, Eq) -- -- Data Type Instances, and Utility Functions: -- orderDirectionToStr Asc = "asc" orderDirectionToStr Desc = "desc" instance FromJSON OrderDirection where parseJSON (JSON.String "asc") = return Asc parseJSON (JSON.String "desc") = return Desc parseJSON v = fail $ "Unrecognized OrderDirection JSON value: " ++ (show v) instance Default SubscriberQueryParams where def = SubscriberQueryParams Nothing 1 1000 Subscriber.ByDate Asc subscriberQueryParamsToStr (SubscriberQueryParams {..}) = date ++ "&page=" ++ (show subscriberGetPageNumber) ++ "&pagesize=" ++ (show subscriberGetPageSize) ++ "&orderfield=" ++ (Subscriber.orderFieldToStr subscriberGetOrderField)++ "&orderdirection=" ++ (orderDirectionToStr subscriberGetOrderDirection) where date = case subscriberGetFromDate of Nothing -> "" Just day -> "date=" ++ (showGregorian day) instance FromJSON SubscriberResultsPage where parseJSON (JSON.Object v) = SubscriberResultsPage <$> v .: "Results" <*> v .: "ResultsOrderedBy" <*> v .: "OrderDirection" <*> v .: "PageNumber" <*> v .: "PageSize" <*> v .: "RecordsOnThisPage" <*> v .: "TotalNumberOfRecords" <*> v .: "NumberOfPages" parseJSON _ = fail "Expected a JSON object when parsing a SubscriberResultsPage"
pavpen/createsend-haskell
src/CreateSendAPI/V3/PagedResult.hs
gpl-2.0
2,541
68
15
436
566
350
216
57
2
import Data.Char import qualified Data.Map as M import qualified Data.Vector as V clean :: String -> String clean = filter (\x -> x `elem` "><+-.,[]") mapBracket :: String -> [(Int, Int)] mapBracket prog = aux prog 0 [] [] where aux [] _ _ ret = ret aux ('[':ps) cur stack ret = aux ps (cur + 1) (('[',cur):stack) ret aux (']':ps) cur stack ret = let (_, idx) = head stack in aux ps (cur + 1) (tail stack) ((cur, idx):(idx,cur):ret) aux (_:ps) cur stack ret = aux ps (cur + 1) stack ret runProgram :: String -> String -> (Int, [Int]) runProgram prog input = aux progv (map ord input) memo 0 0 0 [] where memo = V.fromList $ replicate 200 0 bmap = M.fromList $ mapBracket prog progv = V.fromList prog inc x = (x + 1) `mod` 256 dec x = (x - 1 + 256) `mod` 256 aux pv input memo pp dp step out | pp >= V.length pv = (step, reverse out) | step >= 100000 = (step + 1, reverse out) | otherwise = case (pv V.! pp) of '>' -> aux pv input memo (pp + 1) (dp + 1) (step + 1) out '<' -> aux pv input memo (pp + 1) (dp - 1) (step + 1) out '+' -> (aux pv input (memo V.// [(dp, inc $ memo V.! dp)]) (pp + 1) dp (step + 1) out) '-' -> aux pv input (memo V.// [(dp, dec $ memo V.! dp)]) (pp + 1) dp (step + 1) out ',' -> aux pv (tail input) (memo V.// [(dp, head input)]) (pp + 1) dp (step + 1) out '.' -> aux pv input memo (pp + 1) dp (step + 1) ((memo V.! dp):out) '[' -> if (memo V.! dp) == 0 then aux pv input memo (bmap M.! pp) dp (step + 1) out else aux pv input memo (pp + 1) dp (step + 1) out ']' -> if (memo V.! dp) /= 0 then aux pv input memo (bmap M.! pp) dp (step + 1) out else aux pv input memo (pp + 1) dp (step + 1) out getResult :: (Int, [Int]) -> [[Char]] getResult (step, output) = if step > 100000 then [map chr output, "PROCESS TIME OUT. KILLED!!!"] else [map chr output] main :: IO () main = do _ <- getLine input <- getLine progStr <- getContents let prog = clean progStr bmap = mapBracket prog res = runProgram prog input result = getResult $ res putStrLn $ show res mapM_ putStrLn result
m00nlight/hackerrank
functional/Interpreter-and-Compiler/BrainF__k-Interpreter/main.hs
gpl-2.0
2,743
13
17
1,180
1,183
626
557
59
10
--project euler problem 20 {-- n! means n × (n − 1) × ... × 3 × 2 × 1 Find the sum of the digits in the number 100! --} import Char fac 0 = 1 fac n = n * (fac (n-1)) main = print $ sum $ map digitToInt $ show (fac 100)
goalieca/haskelling
020.hs
gpl-3.0
229
0
9
59
69
35
34
4
1
data Procompose q p a b where Procompose :: q a c -> p c b -> Procompose q p a b
hmemcpy/milewski-ctfp-pdf
src/content/3.10/code/haskell/snippet18.hs
gpl-3.0
84
1
5
25
40
22
18
-1
-1
module PropT30 where import Prelude(Bool(..)) import Zeno -- Definitions True && x = x _ && _ = False False || x = x _ || _ = True not True = False not False = True -- Nats data Nat = S Nat | Z (+) :: Nat -> Nat -> Nat Z + y = y (S x) + y = S (x + y) (*) :: Nat -> Nat -> Nat Z * _ = Z (S x) * y = y + (x * y) (==),(/=) :: Nat -> Nat -> Bool Z == Z = True Z == _ = False S _ == Z = False S x == S y = x == y x /= y = not (x == y) (<=) :: Nat -> Nat -> Bool Z <= _ = True _ <= Z = False S x <= S y = x <= y one, zero :: Nat zero = Z one = S Z double :: Nat -> Nat double Z = Z double (S x) = S (S (double x)) even :: Nat -> Bool even Z = True even (S Z) = False even (S (S x)) = even x half :: Nat -> Nat half Z = Z half (S Z) = Z half (S (S x)) = S (half x) mult :: Nat -> Nat -> Nat -> Nat mult Z _ acc = acc mult (S x) y acc = mult x y (y + acc) fac :: Nat -> Nat fac Z = S Z fac (S x) = S x * fac x qfac :: Nat -> Nat -> Nat qfac Z acc = acc qfac (S x) acc = qfac x (S x * acc) exp :: Nat -> Nat -> Nat exp _ Z = S Z exp x (S n) = x * exp x n qexp :: Nat -> Nat -> Nat -> Nat qexp x Z acc = acc qexp x (S n) acc = qexp x n (x * acc) -- Lists length :: [a] -> Nat length [] = Z length (_:xs) = S (length xs) (++) :: [a] -> [a] -> [a] [] ++ ys = ys (x:xs) ++ ys = x : (xs ++ ys) drop :: Nat -> [a] -> [a] drop Z xs = xs drop _ [] = [] drop (S x) (_:xs) = drop x xs rev :: [a] -> [a] rev [] = [] rev (x:xs) = rev xs ++ [x] qrev :: [a] -> [a] -> [a] qrev [] acc = acc qrev (x:xs) acc = qrev xs (x:acc) revflat :: [[a]] -> [a] revflat [] = [] revflat ([]:xss) = revflat xss revflat ((x:xs):xss) = revflat (xs:xss) ++ [x] qrevflat :: [[a]] -> [a] -> [a] qrevflat [] acc = acc qrevflat ([]:xss) acc = qrevflat xss acc qrevflat ((x:xs):xss) acc = qrevflat (xs:xss) (x:acc) rotate :: Nat -> [a] -> [a] rotate Z xs = xs rotate _ [] = [] rotate (S n) (x:xs) = rotate n (xs ++ [x]) elem :: Nat -> [Nat] -> Bool elem _ [] = False elem n (x:xs) = n == x || elem n xs subset :: [Nat] -> [Nat] -> Bool subset [] ys = True subset (x:xs) ys = x `elem` xs && subset xs ys intersect,union :: [Nat] -> [Nat] -> [Nat] (x:xs) `intersect` ys | x `elem` ys = x:(xs `intersect` ys) | otherwise = xs `intersect` ys [] `intersect` ys = [] union (x:xs) ys | x `elem` ys = union xs ys | otherwise = x:(union xs ys) union [] ys = ys isort :: [Nat] -> [Nat] isort [] = [] isort (x:xs) = insert x (isort xs) insert :: Nat -> [Nat] -> [Nat] insert n [] = [n] insert n (x:xs) = case n <= x of True -> n : x : xs False -> x : (insert n xs) count :: Nat -> [Nat] -> Nat count n (x:xs) | n == x = S (count n xs) | otherwise = count n xs count n [] = Z sorted :: [Nat] -> Bool sorted (x:y:xs) = x <= y && sorted (y:xs) sorted _ = True -- Theorem prop_T30 :: [a] -> Prop prop_T30 x = prove (rev (rev x ++ []) :=: x)
danr/hipspec
testsuite/prod/zeno_version/PropT30.hs
gpl-3.0
2,963
0
11
915
1,995
1,038
957
114
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.AppEngine.Apps.Services.Versions.Create -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Deploys code and resource files to a new version. -- -- /See:/ <https://cloud.google.com/appengine/docs/admin-api/ Google App Engine Admin API Reference> for @appengine.apps.services.versions.create@. module Network.Google.Resource.AppEngine.Apps.Services.Versions.Create ( -- * REST Resource AppsServicesVersionsCreateResource -- * Creating a Request , appsServicesVersionsCreate , AppsServicesVersionsCreate -- * Request Lenses , asvcXgafv , asvcUploadProtocol , asvcPp , asvcAccessToken , asvcUploadType , asvcPayload , asvcBearerToken , asvcAppsId , asvcServicesId , asvcCallback ) where import Network.Google.AppEngine.Types import Network.Google.Prelude -- | A resource alias for @appengine.apps.services.versions.create@ method which the -- 'AppsServicesVersionsCreate' request conforms to. type AppsServicesVersionsCreateResource = "v1" :> "apps" :> Capture "appsId" Text :> "services" :> Capture "servicesId" Text :> "versions" :> QueryParam "$.xgafv" Text :> QueryParam "upload_protocol" Text :> QueryParam "pp" Bool :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "bearer_token" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] Version :> Post '[JSON] Operation -- | Deploys code and resource files to a new version. -- -- /See:/ 'appsServicesVersionsCreate' smart constructor. data AppsServicesVersionsCreate = AppsServicesVersionsCreate' { _asvcXgafv :: !(Maybe Text) , _asvcUploadProtocol :: !(Maybe Text) , _asvcPp :: !Bool , _asvcAccessToken :: !(Maybe Text) , _asvcUploadType :: !(Maybe Text) , _asvcPayload :: !Version , _asvcBearerToken :: !(Maybe Text) , _asvcAppsId :: !Text , _asvcServicesId :: !Text , _asvcCallback :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'AppsServicesVersionsCreate' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'asvcXgafv' -- -- * 'asvcUploadProtocol' -- -- * 'asvcPp' -- -- * 'asvcAccessToken' -- -- * 'asvcUploadType' -- -- * 'asvcPayload' -- -- * 'asvcBearerToken' -- -- * 'asvcAppsId' -- -- * 'asvcServicesId' -- -- * 'asvcCallback' appsServicesVersionsCreate :: Version -- ^ 'asvcPayload' -> Text -- ^ 'asvcAppsId' -> Text -- ^ 'asvcServicesId' -> AppsServicesVersionsCreate appsServicesVersionsCreate pAsvcPayload_ pAsvcAppsId_ pAsvcServicesId_ = AppsServicesVersionsCreate' { _asvcXgafv = Nothing , _asvcUploadProtocol = Nothing , _asvcPp = True , _asvcAccessToken = Nothing , _asvcUploadType = Nothing , _asvcPayload = pAsvcPayload_ , _asvcBearerToken = Nothing , _asvcAppsId = pAsvcAppsId_ , _asvcServicesId = pAsvcServicesId_ , _asvcCallback = Nothing } -- | V1 error format. asvcXgafv :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcXgafv = lens _asvcXgafv (\ s a -> s{_asvcXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). asvcUploadProtocol :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcUploadProtocol = lens _asvcUploadProtocol (\ s a -> s{_asvcUploadProtocol = a}) -- | Pretty-print response. asvcPp :: Lens' AppsServicesVersionsCreate Bool asvcPp = lens _asvcPp (\ s a -> s{_asvcPp = a}) -- | OAuth access token. asvcAccessToken :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcAccessToken = lens _asvcAccessToken (\ s a -> s{_asvcAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). asvcUploadType :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcUploadType = lens _asvcUploadType (\ s a -> s{_asvcUploadType = a}) -- | Multipart request metadata. asvcPayload :: Lens' AppsServicesVersionsCreate Version asvcPayload = lens _asvcPayload (\ s a -> s{_asvcPayload = a}) -- | OAuth bearer token. asvcBearerToken :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcBearerToken = lens _asvcBearerToken (\ s a -> s{_asvcBearerToken = a}) -- | Part of \`parent\`. Name of the parent resource to create this version -- under. Example: apps\/myapp\/services\/default. asvcAppsId :: Lens' AppsServicesVersionsCreate Text asvcAppsId = lens _asvcAppsId (\ s a -> s{_asvcAppsId = a}) -- | Part of \`parent\`. See documentation of \`appsId\`. asvcServicesId :: Lens' AppsServicesVersionsCreate Text asvcServicesId = lens _asvcServicesId (\ s a -> s{_asvcServicesId = a}) -- | JSONP asvcCallback :: Lens' AppsServicesVersionsCreate (Maybe Text) asvcCallback = lens _asvcCallback (\ s a -> s{_asvcCallback = a}) instance GoogleRequest AppsServicesVersionsCreate where type Rs AppsServicesVersionsCreate = Operation type Scopes AppsServicesVersionsCreate = '["https://www.googleapis.com/auth/cloud-platform"] requestClient AppsServicesVersionsCreate'{..} = go _asvcAppsId _asvcServicesId _asvcXgafv _asvcUploadProtocol (Just _asvcPp) _asvcAccessToken _asvcUploadType _asvcBearerToken _asvcCallback (Just AltJSON) _asvcPayload appEngineService where go = buildClient (Proxy :: Proxy AppsServicesVersionsCreateResource) mempty
rueshyna/gogol
gogol-appengine/gen/Network/Google/Resource/AppEngine/Apps/Services/Versions/Create.hs
mpl-2.0
6,629
5
23
1,665
1,016
590
426
148
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.DFAReporting.CreativeFieldValues.Update -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Updates an existing creative field value. -- -- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.creativeFieldValues.update@. module Network.Google.Resource.DFAReporting.CreativeFieldValues.Update ( -- * REST Resource CreativeFieldValuesUpdateResource -- * Creating a Request , creativeFieldValuesUpdate , CreativeFieldValuesUpdate -- * Request Lenses , cfvuCreativeFieldId , cfvuXgafv , cfvuUploadProtocol , cfvuAccessToken , cfvuUploadType , cfvuProFileId , cfvuPayload , cfvuCallback ) where import Network.Google.DFAReporting.Types import Network.Google.Prelude -- | A resource alias for @dfareporting.creativeFieldValues.update@ method which the -- 'CreativeFieldValuesUpdate' request conforms to. type CreativeFieldValuesUpdateResource = "dfareporting" :> "v3.5" :> "userprofiles" :> Capture "profileId" (Textual Int64) :> "creativeFields" :> Capture "creativeFieldId" (Textual Int64) :> "creativeFieldValues" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] CreativeFieldValue :> Put '[JSON] CreativeFieldValue -- | Updates an existing creative field value. -- -- /See:/ 'creativeFieldValuesUpdate' smart constructor. data CreativeFieldValuesUpdate = CreativeFieldValuesUpdate' { _cfvuCreativeFieldId :: !(Textual Int64) , _cfvuXgafv :: !(Maybe Xgafv) , _cfvuUploadProtocol :: !(Maybe Text) , _cfvuAccessToken :: !(Maybe Text) , _cfvuUploadType :: !(Maybe Text) , _cfvuProFileId :: !(Textual Int64) , _cfvuPayload :: !CreativeFieldValue , _cfvuCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'CreativeFieldValuesUpdate' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cfvuCreativeFieldId' -- -- * 'cfvuXgafv' -- -- * 'cfvuUploadProtocol' -- -- * 'cfvuAccessToken' -- -- * 'cfvuUploadType' -- -- * 'cfvuProFileId' -- -- * 'cfvuPayload' -- -- * 'cfvuCallback' creativeFieldValuesUpdate :: Int64 -- ^ 'cfvuCreativeFieldId' -> Int64 -- ^ 'cfvuProFileId' -> CreativeFieldValue -- ^ 'cfvuPayload' -> CreativeFieldValuesUpdate creativeFieldValuesUpdate pCfvuCreativeFieldId_ pCfvuProFileId_ pCfvuPayload_ = CreativeFieldValuesUpdate' { _cfvuCreativeFieldId = _Coerce # pCfvuCreativeFieldId_ , _cfvuXgafv = Nothing , _cfvuUploadProtocol = Nothing , _cfvuAccessToken = Nothing , _cfvuUploadType = Nothing , _cfvuProFileId = _Coerce # pCfvuProFileId_ , _cfvuPayload = pCfvuPayload_ , _cfvuCallback = Nothing } -- | Creative field ID for this creative field value. cfvuCreativeFieldId :: Lens' CreativeFieldValuesUpdate Int64 cfvuCreativeFieldId = lens _cfvuCreativeFieldId (\ s a -> s{_cfvuCreativeFieldId = a}) . _Coerce -- | V1 error format. cfvuXgafv :: Lens' CreativeFieldValuesUpdate (Maybe Xgafv) cfvuXgafv = lens _cfvuXgafv (\ s a -> s{_cfvuXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). cfvuUploadProtocol :: Lens' CreativeFieldValuesUpdate (Maybe Text) cfvuUploadProtocol = lens _cfvuUploadProtocol (\ s a -> s{_cfvuUploadProtocol = a}) -- | OAuth access token. cfvuAccessToken :: Lens' CreativeFieldValuesUpdate (Maybe Text) cfvuAccessToken = lens _cfvuAccessToken (\ s a -> s{_cfvuAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). cfvuUploadType :: Lens' CreativeFieldValuesUpdate (Maybe Text) cfvuUploadType = lens _cfvuUploadType (\ s a -> s{_cfvuUploadType = a}) -- | User profile ID associated with this request. cfvuProFileId :: Lens' CreativeFieldValuesUpdate Int64 cfvuProFileId = lens _cfvuProFileId (\ s a -> s{_cfvuProFileId = a}) . _Coerce -- | Multipart request metadata. cfvuPayload :: Lens' CreativeFieldValuesUpdate CreativeFieldValue cfvuPayload = lens _cfvuPayload (\ s a -> s{_cfvuPayload = a}) -- | JSONP cfvuCallback :: Lens' CreativeFieldValuesUpdate (Maybe Text) cfvuCallback = lens _cfvuCallback (\ s a -> s{_cfvuCallback = a}) instance GoogleRequest CreativeFieldValuesUpdate where type Rs CreativeFieldValuesUpdate = CreativeFieldValue type Scopes CreativeFieldValuesUpdate = '["https://www.googleapis.com/auth/dfatrafficking"] requestClient CreativeFieldValuesUpdate'{..} = go _cfvuProFileId _cfvuCreativeFieldId _cfvuXgafv _cfvuUploadProtocol _cfvuAccessToken _cfvuUploadType _cfvuCallback (Just AltJSON) _cfvuPayload dFAReportingService where go = buildClient (Proxy :: Proxy CreativeFieldValuesUpdateResource) mempty
brendanhay/gogol
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/CreativeFieldValues/Update.hs
mpl-2.0
6,131
0
21
1,448
905
522
383
134
1
{-# LANGUAGE DataKinds, OverloadedStrings #-} module Model.Ingest ( IngestKey , lookupIngestContainer , addIngestContainer , lookupIngestRecord , addIngestRecord , lookupIngestAsset , addIngestAsset , replaceSlotAsset , checkDetermineMapping , attemptParseRows , extractColumnsDistinctSampleJson , extractColumnsInitialJson , HeaderMappingEntry(..) , participantFieldMappingToJSON , parseParticipantFieldMapping -- for testing: , determineMapping ) where import Control.Monad (when) import qualified Data.ByteString as BS import qualified Data.Csv as Csv import Data.Csv hiding (Record) import qualified Data.List as L import qualified Data.Map as Map import qualified Data.Text as T import qualified Data.Text.Encoding as TE import Data.Text (Text) import Database.PostgreSQL.Typed.Query import Database.PostgreSQL.Typed.Types import qualified Data.ByteString import Data.ByteString (ByteString) import qualified Data.String import Data.Vector (Vector) import Data.Csv.Contrib (extractColumnsDistinctSample, decodeCsvByNameWith, extractColumnsInitialRows) import Service.DB import qualified JSON import JSON (FromJSON(..), ToJSON(..)) import Model.Volume.Types import Model.Container.Types import Model.Metric.Types import Model.Metric import qualified Model.Record.SQL import Model.Record.Types import Model.Record (columnSampleJson) import Model.Asset.Types import Model.Asset.SQL type IngestKey = T.Text mapQuery :: ByteString -> ([PGValue] -> a) -> PGSimpleQuery a mapQuery qry mkResult = fmap mkResult (rawPGSimpleQuery qry) lookupIngestContainer :: MonadDB c m => Volume -> IngestKey -> m (Maybe Container) lookupIngestContainer vol k = do let _tenv_a6Dpp = unknownPGTypeEnv dbQuery1 $ fmap ($ vol) -- .(selectQuery selectVolumeContainer "JOIN ingest.container AS ingest USING (id, volume) WHERE ingest.key = ${k} AND container.volume = ${volumeId $ volumeRow vol}") (fmap (\ (vid_a6Dph, vtop_a6Dpi, vname_a6Dpj, vdate_a6Dpk, vrelease_a6Dpl) -> Container (ContainerRow vid_a6Dph vtop_a6Dpi vname_a6Dpj vdate_a6Dpk) vrelease_a6Dpl) (mapQuery ((\ _p_a6Dpq _p_a6Dpr -> (Data.ByteString.concat [Data.String.fromString "SELECT container.id,container.top,container.name,container.date,slot_release.release FROM container LEFT JOIN slot_release ON container.id = slot_release.container AND slot_release.segment = '(,)' JOIN ingest.container AS ingest USING (id, volume) WHERE ingest.key = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6Dpq, Data.String.fromString " AND container.volume = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6Dpr])) k (volumeId $ volumeRow vol)) (\ [_cid_a6Dps, _ctop_a6Dpt, _cname_a6Dpu, _cdate_a6Dpv, _crelease_a6Dpw] -> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _cid_a6Dps, Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "boolean") _ctop_a6Dpt, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _cname_a6Dpu, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "date") _cdate_a6Dpv, Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6Dpp (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "release") _crelease_a6Dpw)))) addIngestContainer :: MonadDB c m => Container -> IngestKey -> m () addIngestContainer c k = do let _tenv_a6Dvh = unknownPGTypeEnv dbExecute1' -- [pgSQL|INSERT INTO ingest.container (id, volume, key) VALUES (${containerId $ containerRow c}, ${volumeId $ volumeRow $ containerVolume c}, ${k})|] (mapQuery ((\ _p_a6Dvi _p_a6Dvj _p_a6Dvk -> (Data.ByteString.concat [Data.String.fromString "INSERT INTO ingest.container (id, volume, key) VALUES (", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6Dvh (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6Dvi, Data.String.fromString ", ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6Dvh (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6Dvj, Data.String.fromString ", ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6Dvh (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6Dvk, Data.String.fromString ")"])) (containerId $ containerRow c) (volumeId $ volumeRow $ containerVolume c) k) (\ [] -> ())) lookupIngestRecord :: MonadDB c m => Volume -> IngestKey -> m (Maybe Record) lookupIngestRecord vol k = do let _tenv_a6GtF = unknownPGTypeEnv dbQuery1 $ fmap ($ vol) -- .(selectQuery selectVolumeRecord "JOIN ingest.record AS ingest USING (id, volume) WHERE ingest.key = ${k} AND record.volume = ${volumeId $ volumeRow vol}") (fmap (\ (vid_a6GtB, vcategory_a6GtC, vmeasures_a6GtD, vc_a6GtE) -> ($) (Model.Record.SQL.makeRecord vid_a6GtB vcategory_a6GtC vmeasures_a6GtD) vc_a6GtE) (mapQuery ((\ _p_a6GtG _p_a6GtH -> (Data.ByteString.concat [Data.String.fromString "SELECT record.id,record.category,record.measures,record_release(record.id) FROM record_measures AS record JOIN ingest.record AS ingest USING (id, volume) WHERE ingest.key = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6GtG, Data.String.fromString " AND record.volume = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6GtH])) k (volumeId $ volumeRow vol)) (\ [_cid_a6GtI, _ccategory_a6GtJ, _cmeasures_a6GtK, _crecord_release_a6GtL] -> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _cid_a6GtI, Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "smallint") _ccategory_a6GtJ, Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text[]") _cmeasures_a6GtK, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6GtF (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "release") _crecord_release_a6GtL)))) addIngestRecord :: MonadDB c m => Record -> IngestKey -> m () addIngestRecord r k = do let _tenv_a6PCz = unknownPGTypeEnv dbExecute1' -- [pgSQL|INSERT INTO ingest.record (id, volume, key) VALUES (${recordId $ recordRow r}, ${volumeId $ volumeRow $ recordVolume r}, ${k})|] (mapQuery ((\ _p_a6PCA _p_a6PCB _p_a6PCC -> (Data.ByteString.concat [Data.String.fromString "INSERT INTO ingest.record (id, volume, key) VALUES (", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PCz (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PCA, Data.String.fromString ", ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PCz (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PCB, Data.String.fromString ", ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PCz (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6PCC, Data.String.fromString ")"])) (recordId $ recordRow r) (volumeId $ volumeRow $ recordVolume r) k) (\ [] -> ())) lookupIngestAsset :: MonadDB c m => Volume -> FilePath -> m (Maybe Asset) lookupIngestAsset vol k = do let _tenv_a6PDv = unknownPGTypeEnv dbQuery1 $ fmap (`Asset` vol) -- .(selectQuery selectAssetRow "JOIN ingest.asset AS ingest USING (id) WHERE ingest.file = ${k} AND asset.volume = ${volumeId $ volumeRow vol}") (fmap (\ (vid_a6PDo, vformat_a6PDp, vrelease_a6PDq, vduration_a6PDr, vname_a6PDs, vc_a6PDt, vsize_a6PDu) -> Model.Asset.SQL.makeAssetRow vid_a6PDo vformat_a6PDp vrelease_a6PDq vduration_a6PDr vname_a6PDs vc_a6PDt vsize_a6PDu) (mapQuery ((\ _p_a6PDw _p_a6PDx -> (Data.ByteString.concat [Data.String.fromString "SELECT asset.id,asset.format,asset.release,asset.duration,asset.name,asset.sha1,asset.size FROM asset JOIN ingest.asset AS ingest USING (id) WHERE ingest.file = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6PDw, Data.String.fromString " AND asset.volume = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PDx])) k (volumeId $ volumeRow vol)) (\ [_cid_a6PDy, _cformat_a6PDz, _crelease_a6PDA, _cduration_a6PDB, _cname_a6PDC, _csha1_a6PDD, _csize_a6PDE] -> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _cid_a6PDy, Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "smallint") _cformat_a6PDz, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "release") _crelease_a6PDA, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "interval") _cduration_a6PDB, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _cname_a6PDC, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "bytea") _csha1_a6PDD, Database.PostgreSQL.Typed.Types.pgDecodeColumn _tenv_a6PDv (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "bigint") _csize_a6PDE)))) addIngestAsset :: MonadDB c m => Asset -> FilePath -> m () addIngestAsset r k = do let _tenv_a6PFc = unknownPGTypeEnv dbExecute1' -- [pgSQL|INSERT INTO ingest.asset (id, file) VALUES (${assetId $ assetRow r}, ${k})|] (mapQuery ((\ _p_a6PFd _p_a6PFe -> (Data.ByteString.concat [Data.String.fromString "INSERT INTO ingest.asset (id, file) VALUES (", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PFc (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PFd, Data.String.fromString ", ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PFc (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "text") _p_a6PFe, Data.String.fromString ")"])) (assetId $ assetRow r) k) (\ [] -> ())) replaceSlotAsset :: MonadDB c m => Asset -> Asset -> m Bool replaceSlotAsset o n = do let _tenv_a6PFB = unknownPGTypeEnv dbExecute1 -- [pgSQL|UPDATE slot_asset SET asset = ${assetId $ assetRow n} WHERE asset = ${assetId $ assetRow o}|] (mapQuery ((\ _p_a6PFC _p_a6PFD -> (Data.ByteString.concat [Data.String.fromString "UPDATE slot_asset SET asset = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PFB (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PFC, Data.String.fromString " WHERE asset = ", Database.PostgreSQL.Typed.Types.pgEscapeParameter _tenv_a6PFB (Database.PostgreSQL.Typed.Types.PGTypeProxy :: Database.PostgreSQL.Typed.Types.PGTypeName "integer") _p_a6PFD])) (assetId $ assetRow n) (assetId $ assetRow o)) (\ [] -> ())) checkDetermineMapping :: [Metric] -> [Text] -> BS.ByteString -> Either String ParticipantFieldMapping2 checkDetermineMapping participantActiveMetrics csvHeaders csvContents = do -- return skipped columns or not? mpng <- determineMapping participantActiveMetrics csvHeaders _ <- attemptParseRows mpng csvContents pure mpng attemptParseRows :: ParticipantFieldMapping2 -> BS.ByteString -> Either String (Csv.Header, Vector ParticipantRecord) attemptParseRows participantFieldMapping contents = decodeCsvByNameWith (participantRecordParseNamedRecord participantFieldMapping) contents participantRecordParseNamedRecord :: ParticipantFieldMapping2 -> Csv.NamedRecord -> Parser ParticipantRecord participantRecordParseNamedRecord fieldMap m = do mId <- extractIfUsed2 (lookupField participantMetricId) validateParticipantId mInfo <- extractIfUsed2 (lookupField participantMetricInfo) validateParticipantInfo mDescription <- extractIfUsed2 (lookupField participantMetricDescription) validateParticipantDescription mBirthdate <- extractIfUsed2 (lookupField participantMetricBirthdate) validateParticipantBirthdate mGender <- extractIfUsed2 (lookupField participantMetricGender) validateParticipantGender mRace <- extractIfUsed2 (lookupField participantMetricRace) validateParticipantRace mEthnicity <- extractIfUsed2 (lookupField participantMetricEthnicity) validateParticipantEthnicity mGestationalAge <- extractIfUsed2 (lookupField participantMetricGestationalAge) validateParticipantGestationalAge mPregnancyTerm <- extractIfUsed2 (lookupField participantMetricPregnancyTerm) validateParticipantPregnancyTerm mBirthWeight <- extractIfUsed2 (lookupField participantMetricBirthWeight) validateParticipantBirthWeight mDisability <- extractIfUsed2 (lookupField participantMetricDisability) validateParticipantDisability mLanguage <- extractIfUsed2 (lookupField participantMetricLanguage) validateParticipantLanguage mCountry <- extractIfUsed2 (lookupField participantMetricCountry) validateParticipantCountry mState <- extractIfUsed2 (lookupField participantMetricState) validateParticipantState mSetting <- extractIfUsed2 (lookupField participantMetricSetting) validateParticipantSetting pure ParticipantRecord { prdId = mId , prdInfo = mInfo , prdDescription = mDescription , prdBirthdate = mBirthdate , prdGender = mGender , prdRace = mRace , prdEthnicity = mEthnicity , prdGestationalAge = mGestationalAge , prdPregnancyTerm = mPregnancyTerm , prdBirthWeight = mBirthWeight , prdDisability = mDisability , prdLanguage = mLanguage , prdCountry = mCountry , prdState = mState , prdSetting = mSetting } where extractIfUsed2 :: (ParticipantFieldMapping2 -> Maybe Text) -> (BS.ByteString -> Maybe (Maybe a)) -> Parser (FieldUse a) extractIfUsed2 maybeGetField validateValue = case maybeGetField fieldMap of Just colName -> do contents <- m .: TE.encodeUtf8 colName maybe (fail ("invalid value for " ++ show colName ++ ", found " ++ show contents)) (pure . maybe FieldEmpty (Field contents)) (validateValue contents) Nothing -> pure FieldUnused -- verify that all expected columns are present, with some leniency in matching -- left if no match possible determineMapping :: [Metric] -> [Text] -> Either String ParticipantFieldMapping2 determineMapping participantActiveMetrics csvHeaders = do (columnMatches :: [Text]) <- traverse (detectMetricMatch csvHeaders) participantActiveMetrics mkParticipantFieldMapping2 (zip participantActiveMetrics columnMatches) where detectMetricMatch :: [Text] -> Metric -> Either String Text detectMetricMatch hdrs metric = case L.find (`columnMetricCompatible` metric) hdrs of Just hdr -> Right hdr Nothing -> Left ("no compatible header found for metric: " ++ (show . metricName) metric) columnMetricCompatible :: Text -> Metric -> Bool columnMetricCompatible hdr metric = (T.filter (/= ' ') . T.toLower . metricName) metric == T.toLower hdr extractColumnsDistinctSampleJson :: Int -> Csv.Header -> Vector Csv.NamedRecord -> [JSON.Value] extractColumnsDistinctSampleJson maxSamples hdrs records = ( fmap (\(colHdr, vals) -> columnSampleJson colHdr vals) . extractColumnsDistinctSample maxSamples hdrs) records extractColumnsInitialJson :: Int -> Csv.Header -> Vector Csv.NamedRecord -> [JSON.Value] extractColumnsInitialJson maxRows hdrs records = ( fmap (\(colHdr, vals) -> columnSampleJson colHdr vals) . extractColumnsInitialRows maxRows hdrs ) records data HeaderMappingEntry = HeaderMappingEntry { hmeCsvField :: Text , hmeMetric :: Metric -- only participant metrics } deriving ({- Show, -} Eq) -- , Ord) instance FromJSON HeaderMappingEntry where parseJSON = JSON.withObject "HeaderMappingEntry" (\o -> do metricCanonicalName <- o JSON..: "metric" case lookupParticipantMetricBySymbolicName metricCanonicalName of Just metric -> HeaderMappingEntry <$> o JSON..: "csv_field" <*> pure metric Nothing -> fail ("metric name does not match any participant metric: " ++ show metricCanonicalName)) participantFieldMappingToJSON :: ParticipantFieldMapping2 -> JSON.Value participantFieldMappingToJSON fldMap = -- didn't use tojson to avoid orphan warning. didn't move tojson to metric.types because of circular ref to metric instances (toJSON . fmap fieldToEntry . Map.toList . pfmGetMapping) fldMap where fieldToEntry :: (Metric, Text) -> JSON.Value fieldToEntry (metric, colName) = JSON.object [ "metric" JSON..= (T.filter (/= ' ') . T.toLower . metricName) metric -- TODO: use shared function , "compatible_csv_fields" JSON..= [colName] -- change to single value soon ] parseParticipantFieldMapping :: [Metric] -> [(Metric, Text)] -> Either String ParticipantFieldMapping2 parseParticipantFieldMapping volParticipantActiveMetrics requestedMapping = do when ( length volParticipantActiveMetrics /= length requestedMapping || L.sort volParticipantActiveMetrics /= (L.sort . fmap fst) requestedMapping) (Left "The requested metric mapping does not completely match the required volume metrics") mkParticipantFieldMapping2 requestedMapping
databrary/databrary
src/Model/Ingest.hs
agpl-3.0
24,695
0
23
8,173
4,073
2,294
1,779
-1
-1
{-# OPTIONS_GHC -fno-warn-orphans #-} module ProjectM36.Client.Json where import Data.Aeson import ProjectM36.Server.RemoteCallTypes.Json () import ProjectM36.Client import ProjectM36.TransactionGraph import Control.Exception (IOException) instance ToJSON EvaluatedNotification instance FromJSON EvaluatedNotification instance ToJSON ConnectionError instance ToJSON IOException where toJSON err = object ["IOException" .= show err] instance ToJSON MerkleValidationError
agentm/project-m36
src/bin/ProjectM36/Client/Json.hs
unlicense
478
0
9
51
100
54
46
13
0
{-# LANGUAGE GeneralizedNewtypeDeriving #-} ------------------------------------------------------------------------------ -- Copyright 2012 Microsoft Corporation. -- -- This is free software; you can redistribute it and/or modify it under the -- terms of the Apache License, Version 2.0. A copy of the License can be -- found in the file "license.txt" at the root of this distribution. ----------------------------------------------------------------------------- {- | Interactive command prompt. -} module Interpreter.Interpret ( interpret ) where import System.Directory ( getCurrentDirectory, setCurrentDirectory ) import System.Process ( system ) import System.Exit ( ExitCode(..) ) import Data.List ( isPrefixOf ) import Data.Char ( isSpace ) import Control.Monad import Control.Monad.Reader ( ReaderT, runReaderT ) import Control.Monad.IO.Class ( MonadIO, liftIO ) import Control.Monad.Trans.Class ( lift ) import Control.Applicative ( Applicative ) import Platform.ReadLine ( InputT, InputM(..), MonadException, runInput) import Lib.PPrint import Lib.Printer ( Printer(..), PrinterM(..) ) import Common.Failure ( catchIO ) import Common.ColorScheme import Common.File ( joinPath ) import Common.Name ( unqualify, qualify, newName ) import Common.NamePrim ( nameExpr, nameType, nameInteractive, nameInteractiveModule, nameSystemCore ) import Common.Range import Common.Error import Syntax.Syntax import Syntax.Highlight ( highlightPrint ) import Kind.Synonym ( synonymsIsEmpty,synonymsDiff, ppSynonyms ) import Kind.Assumption ( kgammaFind, kgammaIsEmpty, ppKGamma ) import Kind.Pretty ( prettyKind ) import Type.Assumption ( gammaIsEmpty, ppGamma, infoType, gammaFilter ) import Compiler.Options import Compiler.Compile import Interpreter.Command import Interpreter.State ( State(..), reset ) import Interpreter.Load ( loadFilesErr ) import Interpreter.Message ( message , messageLn , messageLnLn , messageInfoLnLn , messagePrettyLn , messageError , messageErrorMsgLn , messageErrorMsgLnLn , messagePrettyLnLn , messageMarker , messageRemark , messageEvaluation , messageInfoLn , messageHeader , messageSchemeEffect , messageScheme , prettyScheme , prettyEnv ) import Interpreter.Quote ( messageQuote ) import Interpreter.Editor ( runEditor ) newtype Interpreter p a = Interpreter (ReaderT p (InputT IO) a) deriving (Functor, Applicative, Monad, PrinterM, MonadIO, MonadException) instance InputM (Interpreter p) where readLine s = Interpreter ( lift $ readLine s ) runInterpreter :: (Printer p) => p -> Interpreter p a -> IO a runInterpreter p (Interpreter m) = runInput (runReaderT m p) io :: MonadIO m => IO a -> m a io = liftIO {--------------------------------------------------------------- Main ---------------------------------------------------------------} -- | Loads the requested modules and goes into an evaluation loop prompting -- the user for input to be evaluated. interpret :: Printer p => p -- ^ supplies 'IO' actions for (coloured) output to stdout -> Flags -- ^ flags for example from the command line -> [FilePath] -- ^ files to load initially -> IO () -- ^ interpret printer' flags0 files' = runInterpreter printer' $ do io $ messageHeader st err <- io $ loadFilesErr (terminal st) st { flags = flags0 { showCore = False, showAsmCSharp = False } } [ show nameSystemCore ] -- FIXME: What does this catch? Why not returning it? --`catchIO` \msg -> do messageError st msg; -- return $ errorMsg $ ErrorGeneral rangeNull (text msg) case checkError err of Left msg -> do io $ messageInfoLn st ("unable to load the " ++ show nameSystemCore ++ " module; standard functions are not available") io $ messageEvaluation st interpreterEx st { flags = (flags st){ evaluate = False } , errorRange = Just (getRange msg) } Right (preludeSt,_warnings) -> if (null files') then interpreterEx preludeSt{ lastLoad = [] } else command preludeSt (Load files') where st = State { printer = printer' , flags = flags0 , evalDisable = False , loaded0 = initialLoaded , loaded = initialLoaded , defines = [] , program = (programNull nameInteractiveModule) , errorRange = Nothing , lastLoad = [] , loadedPrelude = initialLoaded } {--------------------------------------------------------------- Interpreter loop ---------------------------------------------------------------} -- | A thin wrapper around the 'readLine' call that specifies the prompt -- and a fallback command (:quit) in case 'readLine' returns 'Nothing'. getCommand :: (Printer p) => State p -> Interpreter p Command getCommand _ = parseCommand `fmap` maybe ":quit" id `fmap` readLine "> " -- | Tail-recursively calls 'interpreterEx' and clears 'errorRange' interpreter :: (Printer p) => State p -> Interpreter p () interpreter st = do interpreterEx st' where st' = st{ errorRange = Nothing } -- | Fetches a command and tail-recursively calls 'command' for evaluation interpreterEx :: (Printer p) => State p -> Interpreter p () interpreterEx st = do cmd <- getCommand st command st cmd -- | Interpret a command and (if not quit) recurses to 'interpreter' command :: (Printer p) => State p -> Command -> Interpreter p () command st cmd = case cmd of Eval ln -> do{ err <- io $ compileExpression (terminal st) (flags st) (loaded st) (Executable nameExpr ()) (program st) bigLine ln ; checkInfer st True err $ \ld -> do if (not (evaluate (flags st))) then let tp = infoType $ gammaFind (qualify nameInteractive nameExpr) (loadedGamma ld) in io $ messageSchemeEffect st tp else io $ messageLn st "" interpreter st{ loaded = ld } -- (loaded st){ loadedModules = loadedModules ld }} } Define ln -> do err <- io $ compileValueDef (terminal st) (flags st) (loaded st) (program st) (lineNo st) ln checkInfer2 st True err $ \(defName',ld) -> do{ let tp = infoType $ gammaFind defName' (loadedGamma ld) tpdoc = prettyScheme st tp ; io $ messagePrettyLnLn st (text (show (unqualify defName')) <+> text ":" <+> tpdoc) ; interpreter st{ program = maybe (program st) id (modProgram (loadedModule ld)) , loaded = ld , defines = filter (\(name,_) -> defName' /= name) (defines st) ++ [(defName',[dropLet ln,""])] } } TypeOf ln -> do err <- io $ compileExpression (terminal st) (flags st) (loaded st) Object (program st) bigLine ln checkInfer st True err $ \ld -> do{ let tp = infoType $ gammaFind (qualify nameInteractive nameExpr) (loadedGamma ld) ; io $ messageSchemeEffect st tp ; interpreter st{ loaded = ld } -- (loaded st){ loadedModules = loadedModules ld }} } KindOf ln -> do err <- io $ compileType (terminal st) (flags st) (loaded st) (program st) bigLine ln checkInfer st True err $ \ld -> do{ let kind = kgammaFind (getName (program st)) nameType (loadedKGamma ld) ; io $ messagePrettyLnLn st (prettyKind (colorSchemeFromFlags (flags st)) (snd kind)) ; interpreter st{ loaded = ld } } TypeDef ln -> -- trace ("modules: " ++ show (map (show . modName . loadedModule) (loadedModules st))) $ do err <- io $ compileTypeDef (terminal st) (flags st) (loaded st) (program st) (lineNo st) ln checkInfer2 st True err $ \(defName', ld) -> do{ let (_qname,kind) = kgammaFind (getName (program st)) defName'(loadedKGamma ld) ; io $ messagePrettyLnLn st (text (show defName') <+> text "::" <+> pretty kind) ; interpreter st{ program = maybe (program st) id $ modProgram (loadedModule ld) , loaded = ld , defines = filter (\(name,_) -> defName' /= name) (defines st) ++ [(defName',[ln,""])] } } Load fnames -> do{ let st' = st{ lastLoad = fnames } ; loadFiles (terminal st) st' (reset st') fnames } Reload -> do{ loadFiles (terminal st) st (reset st) (lastLoad st) {- (map (modPath . loadedModule) (tail (loadedModules st))) -} } Edit [] -> do io $ messageRemark st "file argument missing" {- TODO: reactivate this after refactoring Edit [] -> do{ let fpath = lastFilePath st ; if (null fpath) then do io $ messageRemark st "nothing to edit" interpreterEx st else do io $ runEditor st fpath command st Reload } -} Edit fname -> do{ mbpath <- io $ searchSource (flags st) "" (newName fname) -- searchPath (includePath (flags st)) sourceExtension fname ; case mbpath of Nothing -> do io $ messageErrorMsgLnLn st (errorFileNotFound (flags st) fname) interpreter st Just (root,fname') -> do io $ runEditor st (joinPath root fname') command st Reload } Shell cmd' -> do{ ec <- io $ system cmd' ; case ec of ExitSuccess -> io $ messageLn st "" ExitFailure i -> io $ messageInfoLn st $ show i ; interpreterEx st } ChangeDir d -> do{ if (null d) then do{ fpath <- io $ getCurrentDirectory ; io $ messageInfoLnLn st fpath } else io $ setCurrentDirectory d ; interpreterEx st } Options opts-> do{ (newFlags,mode) <- io $ processOptions (flags st) (words opts) ; let setFlags files' = do if (null files') then io $ messageLn st "" else io $messageError st "(ignoring file arguments)" interpreter (st{ flags = newFlags }) ; case mode of ModeHelp -> do doc <- io $ commandLineHelp (flags st) io $ messagePrettyLn st doc interpreterEx st ModeVersion -> do io $ showVersion (printer st) io $ messageLn st "" interpreter st ModeCompiler files' -> setFlags files' ModeInteractive files' -> setFlags files' -- ModeDoc files -> setFlags files } Error err -> do{ io $ messageInfoLn st err ; io $ messageInfoLn st "invalid command." ; io $ messageInfoLnLn st "(type \":?\" for help on commands)" ; interpreterEx st } Show showcmd-> do{ interpretShowCommand st showcmd ; interpreterEx st } Quit -> do{ io $ messageQuote st } None -> do{ interpreterEx st } where lineNo :: State p -> Int lineNo st' = bigLine + (length (defines st') + 1) loadFiles :: (Printer p) => Terminal -> State p -> State p -> [FilePath] -> Interpreter p () loadFiles term originalSt startSt files' = do err <- io $ loadFilesErr term startSt files' case checkError err of Left msg -> interpreterEx originalSt{ errorRange = Just (getRange msg) } Right (st',_warnings) -> interpreterEx st' errorFileNotFound :: Flags -> FilePath -> ErrorMessage errorFileNotFound flgs name = ErrorIO (docNotFound (colorSchemeFromFlags flgs) (includePath flgs) name) docNotFound :: ColorScheme -> [FilePath] -> String -> Doc docNotFound cscheme path name = text "could not find:" <+> ppPath name <$> if (null path) then text ("search path empty. (use the \"-i\" flag at command line?)") else text "search path :" <+> align (cat (punctuate comma (map ppPath path))) where ppPath p = color (colorSource cscheme) (text p) -- | Only needed by the (Define ln) branch dropLet :: String -> String dropLet s = if isPrefixOf "let" s then dropEndWhile (\c -> isSpace c || c == '}') (dropWhile (\c -> isSpace c || c == '{') (drop 3 s)) else s where dropEndWhile p = reverse . dropWhile p . reverse {-- | Only needed by the (Edit []) branch lastFilePath :: State -> FilePath lastFilePath st' = let source = lastSource st' in if (isSourceNull source) then "" else sourceName source -} -- | Interpret a show command. interpretShowCommand :: (Printer p) => State p -> ShowCommand -> Interpreter p () interpretShowCommand st cmd = io $ case cmd of ShowHelp -> do messagePrettyLn st $ commandHelp $ colorSchemeFromFlags $ flags st showEnv (flags st) (printer st) ShowVersion -> do showVersion (printer st) messageLn st "" ShowKindSigs -> let kgamma = loadedKGamma (loaded st) in if kgammaIsEmpty kgamma then messageRemark st "no kinds to show" else messagePrettyLnLn st ( ppKGamma colors ( loadedName (loaded st) ) ( loadedImportMap (loaded st) ) kgamma ) ShowTypeSigs -> let gamma = gammaFilter (modName (loadedModule (loaded st))) $ loadedGamma (loaded st) in if gammaIsEmpty gamma then messageRemark st "no types to show" else messagePrettyLnLn st (ppGamma (prettyEnv st) gamma) ShowSynonyms -> let syns = loadedDiff synonymsDiff loadedSynonyms in if synonymsIsEmpty syns then messageRemark st "no synonyms to show" else messagePrettyLnLn st (ppSynonyms (prettyEnv st) syns) ShowSource -> do source <- lastSourceFull st if isSourceNull source then messageRemark st "no source code to show" else do syntaxColor source messageLnLn st "" -- messageLnLn st (sourceText (programSource (program st))) ShowDefines -> if null (defines st) then messageRemark st "no definitions to show" else do syntaxColor $ interactiveSource $ stringToBString $ unlines $ concatMap snd $ defines st -- messagePrettyLn st (color (colorSource colors) -- (vcat (concatMap (map string . snd) (defines st)))) where colors :: ColorScheme colors = colorSchemeFromFlags (flags st) syntaxColor :: Source -> IO () syntaxColor source = highlightPrint colors (sourceName source) 1 (sourceBString source) (printer st) interactiveSource :: BString -> Source interactiveSource str = Source (show nameInteractiveModule) str loadedDiff :: t -> (Loaded -> t1) -> t1 loadedDiff _diff get = get (loaded st) lastSourceFull :: (Printer p) => State p -> IO Source lastSourceFull st' = if (isSourceNull lastSource || not (null (sourceText lastSource))) then return lastSource else do txt <- io $ readInput (sourceName lastSource) `catchIO` (\msg -> do{ messageError st' msg; return bstringEmpty }) return (lastSource{ sourceBString = txt }) where lastSource :: Source lastSource = -- trace ("lastSource: " ++ show (map modSourcePath (loadedModules (loaded0 st))) ++ "," ++ modSourcePath (loadedModule (loaded0 st)) ++ ", " ++ show (errorRange st)) $ let fsource = Source (head $ filter (not . null) $ map modSourcePath $ [loadedModule (loaded0 st')] ++ reverse (loadedModules $ loaded0 st')) bstringEmpty -- fsource = Source (modSourcePath (last (loadedModules (loaded0 st)))) bstringEmpty source = case errorRange st' of Just rng -> let src = rangeSource rng in if isSourceNull src then fsource else src Nothing -> fsource in source {-------------------------------------------------------------------------- Misc --------------------------------------------------------------------------} -- | A source is considered null if it is the interactive module or the -- source name is the empty string. isSourceNull :: Source -> Bool isSourceNull source = (sourceName source == show nameInteractiveModule || null (sourceName source)) -- | A terminal is a collection of pretty printing 'IO' actions. terminal :: Printer p => State p -> Terminal terminal st = Terminal ( messageErrorMsgLn st ) ( if verbose (flags st) > 0 then (\s -> withColor (printer st) DarkGray (message st (s ++ "\n"))) else (\_ -> return ())) ( messagePrettyLn st ) ( messageScheme st ) ( messagePrettyLn st ) -- | TODO: document checkInfer :: (Printer p) => State p -> Bool -> Error Loaded -> (Loaded -> Interpreter p ()) -> Interpreter p () checkInfer st = checkInferWith st id -- | TODO: document checkInfer2 :: (Printer p) => State p -> Bool -> Error (t, Loaded) -> ((t, Loaded) -> Interpreter p ()) -> Interpreter p () checkInfer2 st = checkInferWith st (\(_,c) -> c) -- | TODO: document checkInferWith :: (Printer p) => State p -> (a -> Loaded) -> Bool -> Error a -> (a -> Interpreter p ()) -> Interpreter p () checkInferWith st _getLoaded showMarker err f = case checkError err of Left msg -> do when showMarker (maybeMessageMarker st (getRange msg)) io $ messageErrorMsgLnLn st msg interpreterEx st{ errorRange = Just (getRange msg) } Right (x,ws) -> do let warnings = ws -- modWarnings (loadedModule ld) when (not (null warnings)) (do let msg = ErrorWarning warnings ErrorZero when showMarker (maybeMessageMarker st (getRange msg)) io $ messageErrorMsgLnLn st msg) f x -- | TODO: document maybeMessageMarker :: (Printer p) => State p -> Range -> Interpreter p () maybeMessageMarker st rng = if (lineNo == posLine (rangeStart rng) || posLine (rangeStart rng) == bigLine) then io $ messageMarker st rng else return () where lineNo :: Int lineNo = bigLine + (length (defines st) + 1)
lpeterse/koka
src/Interpreter/Interpret.hs
apache-2.0
21,575
144
29
8,445
5,001
2,603
2,398
330
27
{- Copyright 2015 Rafał Nowak Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} module RaytracaH.Screen where import Test.QuickCheck (Arbitrary(..), choose) data Screen = Screen { width :: Int, height :: Int } deriving (Show) instance Arbitrary Screen where arbitrary = do w <- choose (1, 100) h <- choose (1, 100) return $ Screen w h
rafalnowak/RaytracaH
src/RaytracaH/Screen.hs
apache-2.0
855
0
10
175
109
61
48
11
0
module Miscellaneous.A329110Spec (main, spec) where import Test.Hspec import Miscellaneous.A329110 (a329110) main :: IO () main = hspec spec spec :: Spec spec = describe "A329110" $ it "correctly computes the first 20 elements" $ map a329110 [1..20] `shouldBe` expectedValue where expectedValue = [1,3,5,9,11,17,19,27,31,37,39,55,57,63,69,85,87,103,105,121]
peterokagey/haskellOEIS
test/Miscellaneous/A329110Spec.hs
apache-2.0
372
0
8
57
154
92
62
10
1
module External.A181534 (a181534, a181534_list) where import HelperSequences.A000217 (a000217) import Data.List (find) import Data.Maybe (fromJust) a181534 :: Int -> Integer a181534 n = a181534_list !! (n - 1) a181534_list :: [Integer] a181534_list = 1 : seqTail 2 1 where seqTail n l = a_n : seqTail (n + 1) a_n where a_n = fromJust $ find (\i -> 2 * a000217 i > a000217 n) [l..]
peterokagey/haskellOEIS
src/External/A181534.hs
apache-2.0
389
0
15
73
163
89
74
10
1
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE FlexibleContexts #-} module Agent.Intelligent.Perception ( perception, ) where import Prelude hiding (log) import Control.Lens import Data.Maybe import Types import World.Utils import Debug.Trace.Wumpus -- Module-specific logging function. logF :: (String -> a) -> a logF f = f "Agent.Intelligent.Perception" -- |Processes and breaks up messages from the outside world into smaller -- ones that the other sub-systems of the agent can process. perception :: EntityName -- ^The agent's name, for local perceptions. -> CellInd -- ^The agent's current position, for creating relative coordinates. -> Message -> [AgentMessage] perception _ pos (MsgVisualPerception iAbs d) = logF trace ("[perception] MsgvisualPerception with index " ++ show iAbs) $ (if (d ^. entity . is isWumpus) then logF trace ("Cell " ++ show iAbs ++ " has a Wumpus.") else id) $ [AMVisualGold i (d ^. gold), AMVisualMeat i (d ^. meat), AMVisualFruit i (d ^. fruit)] ++ cond (d ^. pit) [AMVisualPit i] ++ cond (d ^. plant . to isJust) [AMVisualPlant i $ d ^. plant . to (fromMaybe $ error "[Agent.perception.plant]: Nothing")] ++ cond (d ^. entity . is isAgent) [AMVisualAgent i $ d ^. ju entity . name, AMVisualEntityDirection i $ fromMaybe (error "perception: no entity direction in MsgVisualPerception!") $ d ^? entity . _Just . _Ag . direction] ++ cond (d ^. entity . is isWumpus) [AMVisualWumpus i $ d ^. ju entity . name] ++ cond (d ^. entity . to isJust) [AMVisualEntityHealth i $ d ^. ju entity . health] ++ cond (d ^. entity . to isJust) [AMVisualEntityStamina i $ d ^. ju entity . stamina] ++ cond (d ^. entity . to isNothing) [AMVisualFree i] where is f = to (maybe False f) -- |The position relative to the agent. i = makeRel pos iAbs perception _ pos (MsgEdgePerception (iAbs, dir) d) = {- logF trace "[perception] MsgEdgePerception" $ -} [AMVisualEdgeFatigue (i, dir) (d ^. fatigue), AMVisualEdgeDanger (i, dir) (d ^. danger)] where i = makeRel pos iAbs perception n _ (MsgLocalPerception d) = logF trace ("[perception] MsgLocalPerception: " ++ show d) $ [AMVisualGold (RI (0,0)) (d ^. gold), AMVisualMeat (RI (0,0)) (d ^. meat), AMVisualFruit (RI (0,0)) (d ^. fruit), AMLocalBreeze (d ^. breeze), AMLocalStench (d ^. stench), AMLocalAgent n] ++ cond (d ^. plant . to isJust) [AMVisualPlant (RI (0,0)) $ d ^. plant . to (fromMaybe $ error "[Agent.perception.plant]: Nothing")] perception _ _ (MsgGlobalPerception d) = [AMTemperature $ d ^. temperature, AMTime $ d ^. time] perception _ _ (MsgPositionPerception i) = logF trace "[perception] MsgPositionPerception" $ [AMPosition i] perception _ _ (MsgDirectionPerception i) = logF trace "[perception] MsgDirectionPerception" $ [AMDirection i] perception _ _ (MsgGesture n g) = logF trace "[perception] MsgGesture" $ [AMGesture n g] perception _ _ (MsgHealthChanged p) = logF trace ("[perception] MsgHealthChanged:" ++ show p) $ [(if p < 0 then AMHealthDecreased else AMHealthIncreased) p] perception _ _ (MsgStaminaChanged p) = logF trace ("[perception] MsgStaminaChanged:" ++ show p) $ [(if p < 0 then AMStaminaDecreased else AMStaminaIncreased) p] perception _ _ (MsgAttackedBy n d) = logF trace "[perception] MsgAttackedBy" $ [AMAttackedBy n, AMAttackedFrom d] perception _ _ (MsgReceivedItem n i) = logF trace "[perception] MsgReceivedItem" $ [case n of Nothing -> case i of Meat -> AMGainedMeat Fruit -> AMGainedFruit Gold -> AMGainedGold Just n' -> case i of Meat -> AMReceivedMeat n' Fruit -> AMReceivedFruit n' Gold -> AMReceivedGold n'] perception _ _ (MsgLostItem i) = logF trace "[perception] MsgLostItem" $ [case i of Meat -> AMGainedMeat Fruit -> AMGainedFruit Gold -> AMGainedGold] perception _ _ (MsgDied n t) = logF trace "[perception] MsgDied" $ [(case t of TyAgent -> AMAgentDied TyWumpus -> AMWumpusDied) n] perception _ _ (MsgAttacked n) = logF trace "[perception] MsgAttacked" $ [AMAttacked n] perception _ _ x@(MsgBody h s inv) = logF trace ("[perception] MsgBody: " ++ show x) $ [AMHaveHealth h, AMHaveStamina s, AMHaveMeat (inv ^. at' Meat), AMHaveFruit (inv ^. at' Fruit), AMHaveGold (inv ^. at' Gold)] perception _ _ MsgPlantHarvested = logF trace "[perception] MsgPlantHarvested" $ [AMPlantHarvested] perception _ _ (MsgAlreadyMoved as) = logF trace ("[perception] MsgAlreadyMoved " ++ show as) $ [AMAlreadyMoved as]
jtapolczai/wumpus
Agent/Intelligent/Perception.hs
apache-2.0
4,837
0
20
1,101
1,598
816
782
-1
-1
{-# LANGUAGE BangPatterns #-} {-| Utility functions. -} {- Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Ganeti.Utils ( debug , debugFn , debugXy , sepSplit , Statistics , getSumStatistics , getStdDevStatistics , getStatisticValue , updateStatistics , stdDev , if' , select , applyIf , commaJoin , ensureQuoted , tryRead , formatTable , printTable , parseUnit , plural , niceSort , niceSortKey , exitIfBad , exitErr , exitWhen , exitUnless , logWarningIfBad , rStripSpace , newUUID , getCurrentTime , getCurrentTimeUSec , clockTimeToString , chompPrefix , warn , wrap , trim , defaultHead , exitIfEmpty , splitEithers , recombineEithers , resolveAddr , setOwnerAndGroupFromNames , b64StringToBitString , bitStringToB64String ) where import Data.Char (toUpper, isAlphaNum, isDigit, isSpace, intToDigit, digitToInt) import Data.Function (on) import Data.List import qualified Data.Map as M import Control.Monad (foldM) import Debug.Trace import Network.Socket import Ganeti.BasicTypes import qualified Ganeti.ConstantUtils as ConstantUtils import Ganeti.Logging import Ganeti.Runtime import System.IO import System.Exit import System.Posix.Files import System.Time import qualified Data.ByteString as BS import Data.ByteString.Base64 (decodeLenient, encode) import qualified Data.ByteString.Char8 as BSC import Data.Word (Word8) import Numeric (showIntAtBase, readInt) -- * Debug functions -- | To be used only for debugging, breaks referential integrity. debug :: Show a => a -> a debug x = trace (show x) x -- | Displays a modified form of the second parameter before returning -- it. debugFn :: Show b => (a -> b) -> a -> a debugFn fn x = debug (fn x) `seq` x -- | Show the first parameter before returning the second one. debugXy :: Show a => a -> b -> b debugXy = seq . debug -- * Miscellaneous -- | Apply the function if condition holds, otherwise use default value. applyIf :: Bool -> (a -> a) -> a -> a applyIf b f x = if b then f x else x -- | Comma-join a string list. commaJoin :: [String] -> String commaJoin = intercalate "," -- | Split a list on a separator and return an array. sepSplit :: Eq a => a -> [a] -> [[a]] sepSplit sep s | null s = [] | null xs = [x] | null ys = [x,[]] | otherwise = x:sepSplit sep ys where (x, xs) = break (== sep) s ys = drop 1 xs -- | Simple pluralize helper plural :: Int -> String -> String -> String plural 1 s _ = s plural _ _ p = p -- | Ensure a value is quoted if needed. ensureQuoted :: String -> String ensureQuoted v = if not (all (\c -> isAlphaNum c || c == '.') v) then '\'':v ++ "'" else v -- * Mathematical functions -- Simple and slow statistical functions, please replace with better -- versions -- | Standard deviation function. stdDev :: [Double] -> Double stdDev lst = -- first, calculate the list length and sum lst in a single step, -- for performance reasons let (ll', sx) = foldl' (\(rl, rs) e -> let rl' = rl + 1 rs' = rs + e in rl' `seq` rs' `seq` (rl', rs')) (0::Int, 0) lst ll = fromIntegral ll'::Double mv = sx / ll av = foldl' (\accu em -> let d = em - mv in accu + d * d) 0.0 lst in sqrt (av / ll) -- stddev -- | Abstract type of statistical accumulations. They behave as if the given -- statistics were computed on the list of values, but they allow a potentially -- more efficient update of a given value. data Statistics = SumStatistics Double | StdDevStatistics Double Double Double deriving Show -- count, sum, and not the sum of squares---instead the -- computed variance for better precission. -- | Get a statistics that sums up the values. getSumStatistics :: [Double] -> Statistics getSumStatistics = SumStatistics . sum -- | Get a statistics for the standard deviation. getStdDevStatistics :: [Double] -> Statistics getStdDevStatistics xs = let (nt, st) = foldl' (\(n, s) x -> let !n' = n + 1 !s' = s + x in (n', s')) (0, 0) xs mean = st / nt nvar = foldl' (\v x -> let d = x - mean in v + d * d) 0 xs in StdDevStatistics nt st (nvar / nt) -- | Obtain the value of a statistics. getStatisticValue :: Statistics -> Double getStatisticValue (SumStatistics s) = s getStatisticValue (StdDevStatistics _ _ var) = sqrt var -- | In a given statistics replace on value by another. This -- will only give meaningful results, if the original value -- was actually part of the statistics. updateStatistics :: Statistics -> (Double, Double) -> Statistics updateStatistics (SumStatistics s) (x, y) = SumStatistics $ s + (y - x) updateStatistics (StdDevStatistics n s var) (x, y) = let !ds = y - x !dss = y * y - x * x !dnnvar = n * dss - (2 * s + ds) * ds !s' = s + ds !var' = max 0 $ var + dnnvar / (n * n) in StdDevStatistics n s' var' -- * Logical functions -- Avoid syntactic sugar and enhance readability. These functions are proposed -- by some for inclusion in the Prelude, and at the moment they are present -- (with various definitions) in the utility-ht package. Some rationale and -- discussion is available at <http://www.haskell.org/haskellwiki/If-then-else> -- | \"if\" as a function, rather than as syntactic sugar. if' :: Bool -- ^ condition -> a -- ^ \"then\" result -> a -- ^ \"else\" result -> a -- ^ \"then\" or "else" result depending on the condition if' True x _ = x if' _ _ y = y -- * Parsing utility functions -- | Parse results from readsPrec. parseChoices :: (Monad m, Read a) => String -> String -> [(a, String)] -> m a parseChoices _ _ [(v, "")] = return v parseChoices name s [(_, e)] = fail $ name ++ ": leftover characters when parsing '" ++ s ++ "': '" ++ e ++ "'" parseChoices name s _ = fail $ name ++ ": cannot parse string '" ++ s ++ "'" -- | Safe 'read' function returning data encapsulated in a Result. tryRead :: (Monad m, Read a) => String -> String -> m a tryRead name s = parseChoices name s $ reads s -- | Format a table of strings to maintain consistent length. formatTable :: [[String]] -> [Bool] -> [[String]] formatTable vals numpos = let vtrans = transpose vals -- transpose, so that we work on rows -- rather than columns mlens = map (maximum . map length) vtrans expnd = map (\(flds, isnum, ml) -> map (\val -> let delta = ml - length val filler = replicate delta ' ' in if delta > 0 then if isnum then filler ++ val else val ++ filler else val ) flds ) (zip3 vtrans numpos mlens) in transpose expnd -- | Constructs a printable table from given header and rows printTable :: String -> [String] -> [[String]] -> [Bool] -> String printTable lp header rows isnum = unlines . map ((++) lp . (:) ' ' . unwords) $ formatTable (header:rows) isnum -- | Converts a unit (e.g. m or GB) into a scaling factor. parseUnitValue :: (Monad m) => String -> m Rational parseUnitValue unit -- binary conversions first | null unit = return 1 | unit == "m" || upper == "MIB" = return 1 | unit == "g" || upper == "GIB" = return kbBinary | unit == "t" || upper == "TIB" = return $ kbBinary * kbBinary -- SI conversions | unit == "M" || upper == "MB" = return mbFactor | unit == "G" || upper == "GB" = return $ mbFactor * kbDecimal | unit == "T" || upper == "TB" = return $ mbFactor * kbDecimal * kbDecimal | otherwise = fail $ "Unknown unit '" ++ unit ++ "'" where upper = map toUpper unit kbBinary = 1024 :: Rational kbDecimal = 1000 :: Rational decToBin = kbDecimal / kbBinary -- factor for 1K conversion mbFactor = decToBin * decToBin -- twice the factor for just 1K -- | Tries to extract number and scale from the given string. -- -- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is -- specified, it defaults to MiB. Return value is always an integral -- value in MiB. parseUnit :: (Monad m, Integral a, Read a) => String -> m a parseUnit str = -- TODO: enhance this by splitting the unit parsing code out and -- accepting floating-point numbers case (reads str::[(Int, String)]) of [(v, suffix)] -> let unit = dropWhile (== ' ') suffix in do scaling <- parseUnitValue unit return $ truncate (fromIntegral v * scaling) _ -> fail $ "Can't parse string '" ++ str ++ "'" -- | Unwraps a 'Result', exiting the program if it is a 'Bad' value, -- otherwise returning the actual contained value. exitIfBad :: String -> Result a -> IO a exitIfBad msg (Bad s) = exitErr (msg ++ ": " ++ s) exitIfBad _ (Ok v) = return v -- | Exits immediately with an error message. exitErr :: String -> IO a exitErr errmsg = do hPutStrLn stderr $ "Error: " ++ errmsg exitWith (ExitFailure 1) -- | Exits with an error message if the given boolean condition if true. exitWhen :: Bool -> String -> IO () exitWhen True msg = exitErr msg exitWhen False _ = return () -- | Exits with an error message /unless/ the given boolean condition -- if true, the opposite of 'exitWhen'. exitUnless :: Bool -> String -> IO () exitUnless cond = exitWhen (not cond) -- | Unwraps a 'Result', logging a warning message and then returning a default -- value if it is a 'Bad' value, otherwise returning the actual contained value. logWarningIfBad :: String -> a -> Result a -> IO a logWarningIfBad msg defVal (Bad s) = do logWarning $ msg ++ ": " ++ s return defVal logWarningIfBad _ _ (Ok v) = return v -- | Print a warning, but do not exit. warn :: String -> IO () warn = hPutStrLn stderr . (++) "Warning: " -- | Helper for 'niceSort'. Computes the key element for a given string. extractKey :: [Either Integer String] -- ^ Current (partial) key, reversed -> String -- ^ Remaining string -> ([Either Integer String], String) extractKey ek [] = (reverse ek, []) extractKey ek xs@(x:_) = let (span_fn, conv_fn) = if isDigit x then (isDigit, Left . read) else (not . isDigit, Right) (k, rest) = span span_fn xs in extractKey (conv_fn k:ek) rest {-| Sort a list of strings based on digit and non-digit groupings. Given a list of names @['a1', 'a10', 'a11', 'a2']@ this function will sort the list in the logical order @['a1', 'a2', 'a10', 'a11']@. The sort algorithm breaks each name in groups of either only-digits or no-digits, and sorts based on each group. Internally, this is not implemented via regexes (like the Python version), but via actual splitting of the string in sequences of either digits or everything else, and converting the digit sequences in /Left Integer/ and the non-digit ones in /Right String/, at which point sorting becomes trivial due to the built-in 'Either' ordering; we only need one extra step of dropping the key at the end. -} niceSort :: [String] -> [String] niceSort = niceSortKey id -- | Key-version of 'niceSort'. We use 'sortBy' and @compare `on` fst@ -- since we don't want to add an ordering constraint on the /a/ type, -- hence the need to only compare the first element of the /(key, a)/ -- tuple. niceSortKey :: (a -> String) -> [a] -> [a] niceSortKey keyfn = map snd . sortBy (compare `on` fst) . map (\s -> (fst . extractKey [] $ keyfn s, s)) -- | Strip space characthers (including newline). As this is -- expensive, should only be run on small strings. rStripSpace :: String -> String rStripSpace = reverse . dropWhile isSpace . reverse -- | Returns a random UUID. -- This is a Linux-specific method as it uses the /proc filesystem. newUUID :: IO String newUUID = do contents <- readFile ConstantUtils.randomUuidFile return $! rStripSpace $ take 128 contents -- | Returns the current time as an 'Integer' representing the number -- of seconds from the Unix epoch. getCurrentTime :: IO Integer getCurrentTime = do TOD ctime _ <- getClockTime return ctime -- | Returns the current time as an 'Integer' representing the number -- of microseconds from the Unix epoch (hence the need for 'Integer'). getCurrentTimeUSec :: IO Integer getCurrentTimeUSec = do TOD ctime pico <- getClockTime -- pico: 10^-12, micro: 10^-6, so we have to shift seconds left and -- picoseconds right return $ ctime * 1000000 + pico `div` 1000000 -- | Convert a ClockTime into a (seconds-only) timestamp. clockTimeToString :: ClockTime -> String clockTimeToString (TOD t _) = show t {-| Strip a prefix from a string, allowing the last character of the prefix (which is assumed to be a separator) to be absent from the string if the string terminates there. \>>> chompPrefix \"foo:bar:\" \"a:b:c\" Nothing \>>> chompPrefix \"foo:bar:\" \"foo:bar:baz\" Just \"baz\" \>>> chompPrefix \"foo:bar:\" \"foo:bar:\" Just \"\" \>>> chompPrefix \"foo:bar:\" \"foo:bar\" Just \"\" \>>> chompPrefix \"foo:bar:\" \"foo:barbaz\" Nothing -} chompPrefix :: String -> String -> Maybe String chompPrefix pfx str = if pfx `isPrefixOf` str || str == init pfx then Just $ drop (length pfx) str else Nothing -- | Breaks a string in lines with length \<= maxWidth. -- -- NOTE: The split is OK if: -- -- * It doesn't break a word, i.e. the next line begins with space -- (@isSpace . head $ rest@) or the current line ends with space -- (@null revExtra@); -- -- * It breaks a very big word that doesn't fit anyway (@null revLine@). wrap :: Int -- ^ maxWidth -> String -- ^ string that needs wrapping -> [String] -- ^ string \"broken\" in lines wrap maxWidth = filter (not . null) . map trim . wrap0 where wrap0 :: String -> [String] wrap0 text | length text <= maxWidth = [text] | isSplitOK = line : wrap0 rest | otherwise = line' : wrap0 rest' where (line, rest) = splitAt maxWidth text (revExtra, revLine) = break isSpace . reverse $ line (line', rest') = (reverse revLine, reverse revExtra ++ rest) isSplitOK = null revLine || null revExtra || startsWithSpace rest startsWithSpace (x:_) = isSpace x startsWithSpace _ = False -- | Removes surrounding whitespace. Should only be used in small -- strings. trim :: String -> String trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace -- | A safer head version, with a default value. defaultHead :: a -> [a] -> a defaultHead def [] = def defaultHead _ (x:_) = x -- | A 'head' version in the I/O monad, for validating parameters -- without which we cannot continue. exitIfEmpty :: String -> [a] -> IO a exitIfEmpty _ (x:_) = return x exitIfEmpty s [] = exitErr s -- | Split an 'Either' list into two separate lists (containing the -- 'Left' and 'Right' elements, plus a \"trail\" list that allows -- recombination later. -- -- This is splitter; for recombination, look at 'recombineEithers'. -- The sum of \"left\" and \"right\" lists should be equal to the -- original list length, and the trail list should be the same length -- as well. The entries in the resulting lists are reversed in -- comparison with the original list. splitEithers :: [Either a b] -> ([a], [b], [Bool]) splitEithers = foldl' splitter ([], [], []) where splitter (l, r, t) e = case e of Left v -> (v:l, r, False:t) Right v -> (l, v:r, True:t) -- | Recombines two \"left\" and \"right\" lists using a \"trail\" -- list into a single 'Either' list. -- -- This is the counterpart to 'splitEithers'. It does the opposite -- transformation, and the output list will be the reverse of the -- input lists. Since 'splitEithers' also reverses the lists, calling -- these together will result in the original list. -- -- Mismatches in the structure of the lists (e.g. inconsistent -- lengths) are represented via 'Bad'; normally this function should -- not fail, if lists are passed as generated by 'splitEithers'. recombineEithers :: (Show a, Show b) => [a] -> [b] -> [Bool] -> Result [Either a b] recombineEithers lefts rights trail = foldM recombiner ([], lefts, rights) trail >>= checker where checker (eithers, [], []) = Ok eithers checker (_, lefts', rights') = Bad $ "Inconsistent results after recombination, l'=" ++ show lefts' ++ ", r'=" ++ show rights' recombiner (es, l:ls, rs) False = Ok (Left l:es, ls, rs) recombiner (es, ls, r:rs) True = Ok (Right r:es, ls, rs) recombiner (_, ls, rs) t = Bad $ "Inconsistent trail log: l=" ++ show ls ++ ", r=" ++ show rs ++ ",t=" ++ show t -- | Default hints for the resolver resolveAddrHints :: Maybe AddrInfo resolveAddrHints = Just defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV] } -- | Resolves a numeric address. resolveAddr :: Int -> String -> IO (Result (Family, SockAddr)) resolveAddr port str = do resolved <- getAddrInfo resolveAddrHints (Just str) (Just (show port)) return $ case resolved of [] -> Bad "Invalid results from lookup?" best:_ -> Ok (addrFamily best, addrAddress best) -- | Set the owner and the group of a file (given as names, not numeric id). setOwnerAndGroupFromNames :: FilePath -> GanetiDaemon -> GanetiGroup -> IO () setOwnerAndGroupFromNames filename daemon dGroup = do -- TODO: it would be nice to rework this (or getEnts) so that runtimeEnts -- is read only once per daemon startup, and then cached for further usage. runtimeEnts <- getEnts ents <- exitIfBad "Can't find required user/groups" runtimeEnts -- note: we use directly ! as lookup failures shouldn't happen, due -- to the map construction let uid = fst ents M.! daemon let gid = snd ents M.! dGroup setOwnerAndGroup filename uid gid type BitString = String -- | Base 64 encoded String to BitString wordsToBitString :: [Word8] -> BitString wordsToBitString = concatMap (padBits . wordToBits) where wordToBits = flip (showIntAtBase 2 intToDigit) "" padBits bs = replicate (8 - length bs) '0' ++ bs decodeB64String :: String -> [Word8] decodeB64String = BS.unpack . decodeLenient . BSC.pack b64StringToBitString :: String -> BitString b64StringToBitString = wordsToBitString . decodeB64String -- | A BitString to Base 64 encoded String bitStringToWords :: BitString -> [Word8] bitStringToWords [] = [] bitStringToWords bs = bitStringToWord c : bitStringToWords rest where bitStringToWord = fst . head . readInt 2 (const True) digitToInt (c, rest) = splitAt 8 bs encodeB64String :: [Word8] -> String encodeB64String = BSC.unpack . encode . BS.pack bitStringToB64String :: BitString -> String bitStringToB64String = encodeB64String . bitStringToWords
apyrgio/snf-ganeti
src/Ganeti/Utils.hs
bsd-2-clause
20,617
0
20
5,092
4,537
2,440
2,097
321
4
-- | Paragon Abstract Syntax Tree. Expressions. module Language.Java.Paragon.Syntax.Expressions ( module Language.Java.Paragon.Syntax.Expressions , module Language.Java.Paragon.Syntax.Names , module Language.Java.Paragon.Syntax.Types ) where import Language.Java.Paragon.Syntax.Names import Language.Java.Paragon.Syntax.Types import Language.Java.Paragon.Annotation import Language.Java.Paragon.Annotated -- | Expressions. Unsafe records. data Exp = -- | Literal. Lit Literal -- | Referencing some name, e.g. variable. | NameExp Name -- | Assignment. | Assign { assignAnn :: Annotation -- ^ Annotation. , assignLhs :: Lhs -- ^ Left-hand side of the assignment. , assignOp :: AssignOp -- ^ Assignment operator (=, +=, *=, ...). , assignExp :: Exp -- ^ Expression on the right-hand side. } -- | Policy expression. | PolicyExp PolicyExp deriving (Show, Eq) -- | Types of literals. Unsafe records. data Literal = Int { litAnn :: Annotation -- ^ Annotation. , intLitVal :: Integer -- ^ Value of integer literal. } | Long { litAnn :: Annotation -- ^ Annotation. , longLitVal :: Integer -- ^ Value of long literal. } | Double { litAnn :: Annotation -- ^ Annotation. , doubleLitVal :: Double -- ^ Value of double literal. } | Float { litAnn :: Annotation -- ^ Annotation. , floatLitVal :: Double -- ^ Value of float literal. } | Char { litAnn :: Annotation -- ^ Annotation. , charLitVal :: Char -- ^ Value of char literal. } | String { litAnn :: Annotation -- ^ Annotation. , stringLitVal :: String -- ^ Value of string literal. } | Boolean { litAnn :: Annotation -- ^ Annotation. , boolLitVal :: Bool -- ^ Value of boolean literal. } | Null { litAnn :: Annotation } deriving (Show, Eq) -- | Left-hand side of an assignment expression. data Lhs = -- | Variable. NameLhs { lhsName :: Name } deriving (Show, Eq) -- | Different assignment operators. data AssignOp = EqualA Annotation -- ^ = deriving (Show, Eq) -- | A policy is a conjunction (set) of clauses, represented as a list. data PolicyExp = -- | Policy literal. PolicyLit { policyAnn :: Annotation -- ^ Annotation. , policyClauses :: [Clause] -- ^ Set of clauses. } deriving (Show, Eq) -- | A clause of the form Sigma => a, where a is an actor and Sigma a set of -- locks/atomic predicates that must be open/true. data Clause = Clause { clauseAnn :: Annotation -- ^ Annotation. , clauseVarDecls :: [ClauseVarDecl] -- ^ Clause variable declarations. , clauseHead :: ClauseHead -- ^ Head of the clause. , clauseAtoms :: [Atom] -- ^ Clause atoms. } deriving (Show, Eq) -- | Clause variable declaration. data ClauseVarDecl = ClauseVarDecl { clauseVarDeclAnn :: Annotation -- ^ Annotation. , clauseVarDeclType :: RefType -- ^ Type of clause variable. , clauseVarDeclId :: Id -- ^ Variable identifier. } deriving (Show, Eq) -- | Head of the clause. data ClauseHead = ClauseDeclHead ClauseVarDecl | ClauseVarHead Actor deriving (Show, Eq) -- | Actor variable. data Actor = -- | Free actor variable (and thus concrete w.r.t. the policy under scrutiny). Actor ActorName -- | Forall quantified actor variable within the current clause. | Var Id deriving (Show, Eq) -- | Representation of actor names. Unsafe records. data ActorName = -- | Free actor variable. ActorName Name -- | Free actor type parameter. | ActorTypeVar { actorTypeVarAnn :: Annotation -- ^ Annotation. , actorTypeVarType :: RefType -- ^ Type of actor type variable. , actorTypeVarId :: Id -- ^ Actor type variable identifier. } deriving (Show, Eq) data Atom = Atom { atomAnn :: Annotation -- ^ Annotation. , atomName :: Name -- ^ Atom name. , atomActors :: [Actor] -- ^ Atom actors. } deriving (Show, Eq) -- | Policy representation. type Policy = Exp instance Annotated Exp where ann (Lit x) = ann x ann (NameExp x) = ann x ann a@(Assign {}) = assignAnn a ann (PolicyExp x) = ann x instance Annotated Literal where ann = litAnn instance Annotated Lhs where ann = ann . lhsName instance Annotated AssignOp where ann (EqualA x) = x instance Annotated PolicyExp where ann p@(PolicyLit {}) = policyAnn p instance Annotated Clause where ann = clauseAnn instance Annotated ClauseVarDecl where ann = clauseVarDeclAnn instance Annotated ClauseHead where ann (ClauseDeclHead x) = ann x ann (ClauseVarHead x) = ann x instance Annotated Actor where ann (Actor x) = ann x ann (Var x) = ann x instance Annotated ActorName where ann (ActorName x) = ann x ann a@(ActorTypeVar {}) = actorTypeVarAnn a instance Annotated Atom where ann = atomAnn
bvdelft/paragon
src/Language/Java/Paragon/Syntax/Expressions.hs
bsd-3-clause
5,113
0
9
1,416
968
585
383
104
0
{-# LANGUAGE TypeOperators, KindSignatures, OverloadedStrings, GADTs, ScopedTypeVariables, MultiParamTypeClasses, FlexibleInstances #-} module Square where import Data.Aeson import Control.Natural import Control.Wakarusa.Pointed1 import Control.Wakarusa.Session import Control.Wakarusa.JsonRpc data Square :: * -> * where Square :: Int -> Square Int -- (remotely) square a number class Squarer f where square :: Int -> f Int instance Squarer Square where square = Square instance Pointed1 h => Squarer (h Square) where square n = point1 $$ square n instance JsonRpc Square where encodeRpcCall (Square n) = call "square" [toJSON n] decodeRpcCall (JsonRpcCallee "square" [v]) = do v' <- get v r <- square v' return (result r) evalSquare :: Square :~> IO evalSquare = Nat $ \ f -> case f of Square n -> return (n * n)
ku-fpg/wakarusa
wakarusa-examples/json-rpc/Square.hs
bsd-3-clause
925
0
12
234
257
133
124
24
1
import Music.Prelude import qualified Music.Score {- W.A. Mozart: Ave Verum (excerpt) Transcribed from autograph, see http://imslp.org/wiki/Ave_verum_corpus,_K.618_(Mozart,_Wolfgang_Amadeus) Divided as follows (including preceding accompaniement): stanza1: Ave verum corpus natum de Maria virgine stanza2: Vere passum immolatum in cruce pro homoni stanza3: Cujus latus perforatum unda fluxit et sanguine stanza4: Esto nobis praegustatum in mortis examine -} -- Vocal parts [vl1, vl2] = divide 2 (tutti violin) vla = tutti viola vc = tutti cello -- Instruments [sop, alt] = divide 2 (tutti violin) ten = tutti viola -- bs = tutti cello bc = tutti doubleBass info = id . title "Ave Verum Corpus (excerpt)" . composer "W.A. Mozart" . timeSignature (4/4) . keySignature (key g False) score' = info $ compress 4 $ tempo (metronome (1/4) 30) $ {-delay (4*2) $ -} stanza1_instr </> stanza1_voc -- Rhythm helper functions lss l s1 s2 = l|*2 |> s1 |> s2 ssl s1 s2 l = s1 |> s2 |> l|*2 s3 s1 s2 s3 = s1 |> s2 |> s3 s4 s1 s2 s3 s4 = s1 |> s2 |> s3 |> s4 sl s l = s |> l|*3 ls l s = l|*3 |> s fit2 x y = (x |> y)|/2 l4 l = l|*4 ll l1 l2 = (l1 |> l2)|*2 a2 = a |* 2 as2 = as |* 2 ab2 = ab |* 2 b2 = b |* 2 bs2 = bs |* 2 bb2 = bb |* 2 c2 = c |* 2 cs2 = cs |* 2 cb2 = cb |* 2 d2 = d |* 2 ds2 = ds |* 2 db2 = db |* 2 e2 = e |* 2 es2 = es |* 2 eb2 = eb |* 2 f2 = f |* 2 fs2 = fs |* 2 fb2 = fb |* 2 g2 = g |* 2 gs2 = gs |* 2 gb2 = gb |* 2 (//) = (|>) {- Can we "overload application" as in c :: PitchL -> PitchL -> Score a (c d) :: PitchL -> Score a (c d) e :: Score a Alternatively, make score' instance of IsString and use Lilypond syntax -} -- Stanza 1 stanza1_voc = stanza1_sop </> stanza1_alto </> stanza1_ten </> stanza1_bass stanza1_sop = asScore $ delay 8 $ empty |> s3 a2 d' fs |> s3 a gs g2 |> s4 g b a g |> ssl g fs fs |> ls e e |> s4 fs fs g g |> lss g (fit2 fs e) fs |> l4 e stanza1_alto = asScore $ delay 8 $ empty |> ll fs fs |> ll e e |> s4 e g fs e |> ssl e d d |> ls cs cs |> s4 d d e e |> lss e (fit2 d cs) d |> l4 cs stanza1_ten = asScore $ delay 8 $ octavesDown 1 $ empty |> ll a a |> ll b b |> ls a a |> ll a a |> ls e e |> s4 a a b b |> ls a a |> l4 e stanza1_bass = asScore $ delay 8 $ octavesDown 1 $ empty |> ll d d |> ll d d |> ls cs cs |> ll d d |> ls a a |> s4 d d cs cs |> ls d d |> l4 a_ stanza1_instr = stanza1_vl1 </> stanza1_vl2 </> stanza1_vla </> stanza1_bc stanza1_vl1 = asScore $ empty |> s4 d a_ d e |> s4 fs d fs g |> lss a d' fs |> ssl a gs g |> s4 g b a g |> ssl g fs fs |> ls e e |> s4 fs fs g g |> lss g (fit2 fs e) fs |> l4 e stanza1_vl2 = asScore $ empty |> s4 d a_ d e |> s4 fs d fs g |> ll fs fs |> ll e e |> s4 e g fs e |> ssl e d d |> ls cs cs |> s4 d d e e |> lss e (fit2 d cs) d |> l4 cs stanza1_vla = asScore $ octavesDown 1 $ empty |> s4 d a_ d e |> s4 fs d fs g |> ll a a |> ll b b |> ls a a |> ll a a |> ls e e |> s4 a a b b |> ls a a |> l4 e stanza1_bc = asScore $ octavesDown 1 $ empty |> s4 d a_ d e |> s4 fs d fs g |> ll d d |> ll d d |> ls cs cs |> ll d d |> ls a a |> s4 d d cs cs |> ls d d |> l4 a_ -- Stanza 2 mapEvensOdds :: (a -> b) -> (a -> b) -> [a] -> [b] mapEvensOdds f g xs = let evens = fmap (xs !!) [0,2..] odds = fmap (xs !!) [1,3..] merge xs ys = concatMap (\(x,y) -> [x,y]) $ xs `zip` ys in take (length xs) $ map f evens `merge` map g odds {- openAudacity :: Score Note -> IO () openAudacity x = do void $ writeMidi "test.mid" $ x void $ system "timidity -Ow test.mid" void $ system "open -a Audacity test.wav" openAudio :: Score Note -> IO () openAudio x = do void $ writeMidi "test.mid" $ x void $ system "timidity -Ow test.mid" void $ system "open -a Audacity test.wav" -} -- fixClefs :: Score Note -> Score Note -- fixClefs = pcat . fmap (uncurry g) . extractParts' -- where -- g p x = clef (case defaultClef p of { 0 -> GClef; 1 -> CClef; 2 -> FClef } ) x fixClefs = id palindrome x = rev x |> x main :: IO () main = open score'
music-suite/music-preludes
examples/mozart.hs
bsd-3-clause
4,626
0
17
1,698
1,711
851
860
-1
-1
{-# LANGUAGE MagicHash, OverloadedStrings #-} -- Blender's file format parsing library. -- -- This library provides data structures to represent the main -- parts of a blend-file, associated functions and Data.Binary parsers -- to actually read a file into those data structures. Additionally -- functions to generate Haskell code capable of a full parse of -- particular blend-file versions are given. To make things simple, -- such generated code for some versions of Blender are exposed. -- -- Blender file format -- -- A blend-file is the native binary format used by Blender to save -- its data. At the most basic level a blend-file is just a header -- followed by a list of blocks. Each file-block is itself made of a -- header and some data. The block-header specifies what kind of data -- is to be found after it. -- -- To save its state, Blender dumps its internal data structures in -- the data part of the file-blocks. Each block can contain multiple -- structures, but all of the same type. Multiple blocks can contain -- the same type of structure. The internal data structures are just C -- structs. -- -- Because each version of Blender can potentially use different data -- structures, blend-files written by different versions of Blender can -- contain different types of structures. -- -- There is one special file-block, DNA1, whose data are a description -- of all the data structures contained in the other file-blocks. That -- description is called SDNA, for "Structure DNA". Each version of -- Blender stores such SDNA in every written blend-files. -- -- Usage of the library -- -- This library can read a blend-file into a BBlend data structure. -- A BBlend represents the main parts of blend-file: the header (in a -- BHeader), the blocks (leaving their data unparsed, in BBlocks) and -- the SDNA (this is the sole fully parsed block in a BBlend). -- -- Using the SDNA, it is possible to generate code to represent and -- read all the data part of the file-blocks. This library does that. -- The approach is to refine the notion of BBlock into Block. The -- generated code defines a variant data type called Block where each -- variant correspond to a particular structure of the SDNA. This looks -- like -- -- data Block = -- BlockLink Integer [Link] -- | BlockListBase Integer [ListBase] -- | ... -- -- The result of parsing the blend-file with the generated code is a list -- of those Block. The Link, etc. data types are also generated. They look -- like -- -- data Link = Link -- { link_next :: Integer {- struct Link * -} -- , link_prev :: Integer {- struct Link * -} -- } -- -- Those data types are low-level: they correspond directly to the data -- structures found in the blend-file. Further processing is left to you. -- -- See http://www.blender.org/development/architecture/blender-file-format/ -- for some doc. module Data.Blend where -- (BHeader, BBlock, BBlend(..), SDNA(..), lookupStruct, showField, showStruct, readBlend, showStructAsHs, showStructParser, getPointer, getShort, getUShort, getInt, getLong, getULong, getFloat, getDouble) -- where import qualified Data.ByteString.Lazy as LB import qualified Data.ByteString.Char8 as BC import Data.Binary.Get (runGet) import Data.List (find, intersperse) import Data.Char (toLower, toUpper) import Data.Blend.Types import Data.Blend.Parser showSDNAAsHs :: SDNA -> String showSDNAAsHs sdna = "data Block = \n " ++ concat (intersperse " | " $ map (f . fst) sdna) ++ " | BlockRaw Integer Struct Int ByteString -- for unparsed blocks.\n" ++ " deriving Show\n" where f n = "Block" ++ g n ++ " Integer [" ++ g n ++ "]\n" g n = toUpper (BC.head n) : tail (BC.unpack n) showSDNAAsSDNA :: SDNA -> String showSDNAAsSDNA sdna = "sdna :: SDNA\n" ++ "sdna =\n [ s" ++ concat (intersperse "\n , s" $ map (BC.unpack . fst) sdna) ++ "\n ]" showBlockParser :: SDNA -> String showBlockParser sdna = "readBlend :: FilePath -> IO [(Integer,Block)]\n" ++ "readBlend f = do\n" ++ " s <- LB.readFile f\n" ++ " return $ runGet (do h <- getBHeader\n" ++ " bs <- parseBlocks h\n" ++ " return bs) s\n\n" ++ "parseBlocks :: BHeader -> Get [(Integer,Block)]\n" ++ "parseBlocks h = do\n" ++ " code <- getByteString 4\n" ++ " size <- fmap fromIntegral getWord32le\n" ++ " addr <- getAddress h\n" ++ " idx <- fmap fromIntegral getWord32le\n" ++ " count <- fmap fromIntegral getWord32le\n" ++ " case BC.unpack code of\n" ++ " \"DNA1\" -> return [] -- check SDNA is the same\n" ++ " \"ENDB\" -> return []\n" ++ " _ -> do b <- parseBlock h size addr idx count\n" ++ " bs <- parseBlocks h\n" ++ " return (b : bs)\n\n" ++ "parseBlock :: BHeader -> Int -> Integer -> Int -> Int -> Get (Integer,Block)\n" ++ "parseBlock h size addr idx count =\n" ++ " if structSize h (sdna !! idx) * count /= size\n" ++ " then do\n" ++ " dat <- getByteString size\n" ++ " return $ (addr, BlockRaw addr (sdna !! idx) count dat)\n" ++ " else case idx of\n" ++ concat (zipWith f [(0::Integer)..] sdna) ++ " i -> error $ \"Unkown SDNA index \" ++ show i\n" where f i s = " " ++ show i ++ " -> fmap (\\b -> (addr, Block" ++ g (fst s) ++ " addr b)) (replicateM count $ get" ++ g (fst s) ++ " h)\n" g n = toUpper (BC.head n) : tail (BC.unpack n) -- Flattening of the SDNA (from the DNA1 block) structFields :: Struct -> [Field] structFields = snd structSize :: BHeader -> Struct -> Int structSize h = typeSize h . Compound showStruct :: Struct -> String showStruct (n, fs) = BC.unpack n ++ "\n" ++ concatMap showField fs ++ "\n" showStructAsHs :: Struct -> String showStructAsHs (n, fs) = "data " ++ n' ++ " = " ++ n' ++ "\n { " ++ map toLower (BC.unpack n) ++ "_" ++ concat (intersperse (" , " ++ map toLower (BC.unpack n) ++ "_") $ map showFieldAsHs fs) ++ " }\n deriving Show\n" where n' = toUpper (BC.head n) : tail (BC.unpack n) showStructAsStruct :: Struct -> String showStructAsStruct (n, fs) = "s" ++ BC.unpack n ++ ":: Struct\n" ++ "s" ++ BC.unpack n ++ " = (" ++ "\"" ++ BC.unpack n ++ "\",\n" ++ " [ " ++ concat (intersperse " , " $ map showFieldAsField fs) ++ " ])\n" showStructParser :: Struct -> String showStructParser (n, fs) = "get" ++ n' ++ " :: BHeader -> Get " ++ n' ++ "\n" ++ "get" ++ n' ++ " h = do\n " ++ concat (intersperse "\n " f') ++ "\n return $ " ++ n' ++ concatMap (\i -> " _" ++ show i) [1..length fs] ++ "\n" where n' = toUpper (BC.head n) : tail (BC.unpack n) f = map (showTypeParser . snd) fs f' = zipWith (\a b -> "_" ++ show a ++ " <- " ++ b) [(1::Integer)..] f showFieldAsHs :: Field -> String showFieldAsHs (n, t) = BC.unpack n ++ " :: " ++ showTypeAsHs t ++ "\n" showFieldAsField :: Field -> String showFieldAsField (n, t) = "(\"" ++ BC.unpack n ++ "\", " ++ showTypeAsType t ++ ")\n" -- Used the nested comments syntax... so they can be nested. showTypeAsHs :: Type -> String showTypeAsHs t = case t of Char -> "Int8 {- char -}" UChar -> "Word8 {- uchar -}" Short -> "Int16 {- short -}" UShort -> "Word16 {- ushort -}" Int -> "Int32 {- int -}" Long -> "Int64 {- long -}" ULong -> "Word64 {- ulong -}" Float -> "Float {- float -}" Double -> "Double {- double -}" Ref t' -> "Integer {- " ++ show t' ++ " * -}" RefVoid -> "Integer {- void * -}" Arr l Char -> "ByteString {- char[" ++ show l ++ "] -}" Arr l t' -> "[" ++ showTypeAsHs t' ++ "] {- " ++ show t' ++ "[" ++ show l ++ "] -}" FunPtr t' -> "Integer {- " ++ show t' ++ " (*xxx)() -}" Compound s -> let n = toUpper (BC.head $ structName s) : tail (BC.unpack $ structName s) in n ++ " {- struct " ++ BC.unpack (structName s) ++ " -}" UnknownCompound n -> "UnknownCompound (\"" ++ BC.unpack n ++ "\")" showTypeAsType :: Type -> String showTypeAsType t = case t of Char -> "Char" UChar -> "UChar" Short -> "Short" UShort -> "UShort" Int -> "Int" Long -> "Long" ULong -> "ULong" Float -> "Float" Double -> "Double" Ref (UnknownCompound n) -> "RefVoid {- Ref (UnknownCompound " ++ BC.unpack n ++ ") -}" Ref t' -> "Ref (" ++ showTypeAsType t' ++ ")" RefVoid -> "RefVoid" Arr l t' -> "Arr " ++ show l ++ " (" ++ showTypeAsType t' ++ ")" FunPtr t' -> "FunPtr (" ++ showTypeAsType t' ++ ")" Compound s -> "Compound s" ++ BC.unpack (structName s) UnknownCompound n -> "UnknownCompound (\"" ++ BC.unpack n ++ "\")" showTypeParser :: Type -> String showTypeParser t = case t of Char -> "get {- char -}" UChar -> "get {- uchar -}" Short -> "getShort h {- short -}" UShort -> "getUshort h {- ushort -}" Int -> "getInt h {- int -}" Long -> "getLong h {- long -}" ULong -> "getULong h {- ulong -}" Float -> "getFloat h {- float -}" Double -> "getDouble h {- double -}" Ref (UnknownCompound n) -> "getPointer h {- UnknownCompound " ++ BC.unpack n ++ " -}" Ref t' -> "getPointer h {- " ++ show t' ++ " * -}" RefVoid -> "getPointer h {- void * -}" Arr l Char -> "getByteString " ++ show l ++ " {- char[" ++ show l ++ "] -}" Arr l t' -> "replicateM " ++ show l ++" (" ++ showTypeParser t' ++ ") {- " ++ show t' ++ "[" ++ show l ++ "] -}" FunPtr t' -> "getPointer h {- " ++ show t' ++ " (*xxx)() -}" Compound s -> let n = toUpper (BC.head $ structName s) : tail (BC.unpack $ structName s) in "get" ++ n ++ " h {- struct " ++ BC.unpack (structName s) ++ " -}" UnknownCompound n -> error $ "Can't generate parsing code for UnknownCompound " ++ BC.unpack n -- Reads a blend-file into the basic BBlend data structures. readBBlend :: FilePath -> IO BBlend readBBlend f = do s <- LB.readFile f return $ runGet getBBlend s -- Reads just a blend-file header. readBHeader :: FilePath -> IO BHeader readBHeader f = do s <- LB.readFile f return $ runGet getBHeader s -- Convenience functions lookupStruct :: BBlend -> String -> Maybe Struct lookupStruct (BBlend _ _ sdna) n = fmap (\s -> (BC.pack n,s)) $ lookup (BC.pack n) sdna lookupStructByIndex :: BBlend -> Int -> Struct lookupStructByIndex (BBlend _ _ sdna) = (sdna !!) lookupBlockByOldAddr :: BBlend -> Integer -> Maybe BBlock lookupBlockByOldAddr bf addr = find f (blendBlocks bf) where f b = oldAddr b == addr -- Checks if a given block has a specific code and SDNA structure. hasCodeAndSDNA :: BBlock -> String -> String -> Bool hasCodeAndSDNA b c s = code && struct where code = blockCode b == BC.pack c struct = fst (sdnaStruct b) == BC.pack s
noteed/hblend
Data/Blend.hs
bsd-3-clause
10,766
0
31
2,564
2,371
1,209
1,162
170
17
{-# LANGUAGE OverloadedStrings, RankNTypes #-} module Qualys.Internal.ParseWebApp ( parseWebApps , parseWebApp ) where import Control.Applicative hiding (many) import Control.Monad (join) import Control.Monad.Catch (MonadThrow) import Control.Monad.IO.Class (MonadIO) import qualified Data.ByteString.Base64.URL as B64 import Data.Conduit (ConduitM) import Data.Text (Text) import qualified Data.Text.Encoding as TE import Data.XML.Types import Text.XML.Stream.Parse import Qualys.Internal import Qualys.Internal.ParseWasCommon import Qualys.Internal.ParseWasAuthRec import Qualys.Types.Was parseWebApps :: (MonadIO m, MonadThrow m) => ConduitM Event o m [WebApp] parseWebApps = many parseWebApp parseWebApp :: (MonadIO m, MonadThrow m) => ConduitM Event o m (Maybe WebApp) parseWebApp = tagName "WebApp" ignoreAttrs $ \_ -> WebApp <$> requireWith parseUInt (tagNoAttr "id" content) <*> tagNoAttr "name" content <*> tagNoAttr "url" content <*> tagNoAttr "os" content <*> tagNoAttr "owner" parseUser <*> tagNoAttr "scope" content <*> tagNoAttr "subDomain" content <*> parseV3List "domains" parseDomain <*> parseV3List "uris" parseUri <*> parseV3List "attributes" parseAttr <*> tagNoAttr "defaultProfile" parseProfile <*> tagNoAttr "defaultScanner" parseScanAppl <*> optionalWith parseBool (tagNoAttr "scannerLocked" content) <*> optionalWith parseBool (tagNoAttr "progressiveScanning" content) <*> parseV3List "urlBlacklist" parseUrlEntry <*> parseV3List "urlWhitelist" parseUrlEntry <*> parseV3List "postDataBlacklist" parseUrlEntry <*> parseV3List "authRecords" parseAuthRec <*> tagNoAttr "useRobots" content <*> optionalWith parseBool (tagNoAttr "useSitemap" content) <*> parseV3List "headers" (tagNoAttr "WebAppHeader" content) <*> optionalWith parseBool (tagNoAttr "malwareMonitoring" content) <*> optionalWith parseBool (tagNoAttr "malwareNotification" content) <*> optionalWith parseDate (tagNoAttr "malwareScheduleTime" content) <*> tagNoAttr "malwareScheduleTimeZone" content <*> parseV3List "tags" parseTag <*> parseV3List "comments" (tagNoAttr "Comment" parseComment) <*> optionalWith parseBool (tagNoAttr "isScheduled" content) <*> parseWasScanInfo <*> tagNoAttr "createdBy" parseUser <*> optionalWith parseDate (tagNoAttr "createdDate" content) <*> tagNoAttr "updatedBy" parseUser <*> optionalWith parseDate (tagNoAttr "updatedDate" content) <*> optionalWith decodeScrSh (tagNoAttr "screenshot" content) <*> tagNoAttr "proxy" parseProxy <*> (join <$> tagNoAttr "config" parseConfig) where -- Decode screenshot leniently, since Qualys doesn't seem to pad base64. decodeScrSh = Just . B64.decodeLenient . TE.encodeUtf8 parseDomain = tagNoAttr "Domain" content parseUri = tagNoAttr "Url" content parseAttr :: (MonadThrow m) => ConduitM Event o m (Maybe (Text,Text)) parseAttr = tagNoAttr "Attribute" $ (,) <$> requireTagNoAttr "category" content <*> requireTagNoAttr "value" content parseUrlEntry = tagName "UrlEntry" (attr "regex") $ \x -> do y <- content return $ case x of (Just "true") -> UrlRegex y _ -> UrlText y parseWasScanInfo = tagNoAttr "lastScan" $ WasScanInfo <$> requireWith parseUInt (tagNoAttr "id" content) <*> tagNoAttr "name" content parseConfig :: (MonadThrow m, MonadIO m) => ConduitM Event o m (Maybe WaConfig) parseConfig = do a <- afterN b <- cancelAt return $ a <|> b where afterN = fmap CancelAfterN <$> optionalWith parseUInt (tagNoAttr "cancelScansAfterNHours" content) cancelAt = fmap CancelScanAt <$> tagNoAttr "cancelScansAt" content
ahodgen/qualys
Qualys/Internal/ParseWebApp.hs
bsd-3-clause
3,908
0
45
839
981
496
485
81
2
{-# LANGUAGE FlexibleContexts, RankNTypes #-} module GL.Exception where import Control.Exception import Control.Monad import Data.Functor.Union import Data.Typeable import Foreign.Ptr import GHC.Stack import Graphics.GL.Core41 import Graphics.GL.Types checkStatus :: InUnion fs IO => (GLenum -> GLuint -> Ptr GLint -> Eff fs ()) -> (GLuint -> GLsizei -> Ptr GLsizei -> Ptr GLchar -> Eff fs ()) -> (String -> GLError) -> GLenum -> GLuint -> Eff fs GLuint checkStatus get getLog error status object = do success <- alloca $ \ p -> do get object status p peek p when (success == GL_FALSE) $ do l <- alloca $ \ p -> do get object GL_INFO_LOG_LENGTH p peek p log <- allocaBytes (fromIntegral l) $ \ bytes -> do getLog object l nullPtr bytes peekCString bytes throw $ GLException (error log) callStack pure object checkGLError :: InUnion fs IO => Eff fs () checkGLError = glGetError >>= \ e -> case e of GL_NO_ERROR -> pure () GL_INVALID_ENUM -> throw $ GLException InvalidEnum callStack GL_INVALID_VALUE -> throw $ GLException InvalidValue callStack GL_INVALID_OPERATION -> throw $ GLException InvalidOperation callStack GL_INVALID_FRAMEBUFFER_OPERATION -> throw $ GLException InvalidFramebufferOperation callStack GL_OUT_OF_MEMORY -> throw $ GLException OutOfMemory callStack _ -> throw $ GLException (Other "Unknown") callStack checkingGLError :: InUnion fs IO => Eff fs a -> Eff fs a checkingGLError action = do result <- action checkGLError pure result instance Show GLException where showsPrec p (GLException e s) = showString "GLException " . showsPrec p e . showChar '\n' . showString (prettyCallStack s) instance Exception GLException data GLError = InvalidEnum | InvalidValue | InvalidOperation | InvalidFramebufferOperation | OutOfMemory | Source String String | Other String deriving Show data GLException = GLException GLError CallStack deriving (Typeable)
robrix/ui-effects
src/GL/Exception.hs
bsd-3-clause
1,977
0
15
391
632
309
323
59
7
{-# LANGUAGE CPP #-} module Data.Iteratee.IO.Base ( #if defined(USE_WINDOWS) module Data.Iteratee.IO.Windows, #endif #if defined(USE_POSIX) module System.Posix, #else FileOffset #endif ) where #if defined(USE_WINDOWS) import Data.Iteratee.IO.Windows #endif -- Provide the FileOffset type, which is available in Posix modules -- and maybe Windows #if defined(USE_POSIX) import System.Posix #else type FileOffset = Integer #endif
JohnLato/iteratee
src/Data/Iteratee/IO/Base.hs
bsd-3-clause
437
0
5
60
50
39
11
4
0