code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Language.TheExperiment.AST
( module Language.TheExperiment.AST.Type
, module Language.TheExperiment.AST.Statement
, module Language.TheExperiment.AST.Expression
, module Language.TheExperiment.AST.Module
) where
import Language.TheExperiment.AST.Type
import Language.TheExperiment.AST.Statement
import Language.TheExperiment.AST.Expression
import Language.TheExperiment.AST.Module
|
jvranish/TheExperiment
|
src/Language/TheExperiment/AST.hs
|
bsd-3-clause
| 400 | 0 | 5 | 36 | 69 | 50 | 19 | 9 | 0 |
-----------------------------------------------------------------------------
--
-- Module : MSnail.Game
-- Copyright : (c) 2010, Dmitry Zamkov
-- License : BSD3 (See LICENSE)
--
-----------------------------------------------------------------------------
module MSnail.Game (
GameInputEvent(..),
GameInput(..),
GameOutput(..),
runGame
) where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import Data.IORef
import Data.Time.Clock
import MSnail.FRP
import Control.Arrow
-- A one-time occurence of an input event.
data GameInputEvent =
GameStart |
KeyEvent Key KeyState Modifiers Position deriving(Show)
-- State of user and system input at any one time.
data GameInput = GameInput {
updateTime :: Double, -- deriviative of current time
windowWidth :: Int,
windowHeight :: Int,
inputEvents :: Event (GameInputEvent)
}
-- State of game output at any one time.
data GameOutput = GameOutput {
renderFunc :: IO ()
}
-- Controls the game...
type GameController = SF GameInput GameOutput
-- ? (The first argument specifies the amount of seconds in a tick)
runGame :: GameController -> IO ()
runGame gc = do
(progname, _) <- getArgsAndInitialize
initialDisplayMode $= [DoubleBuffered]
createWindow "MSnail"
windowSize $= Size 640 480
curTime <- get elapsedTime
lastUpdate <- newIORef curTime
curRender <- newIORef (return ())
gameEvents <- newIORef ([GameStart])
curGC <- newIORef gc
displayCallback $= (do
rf <- get curRender
rf
flush
swapBuffers)
idleCallback $= Just (do
curTime <- get elapsedTime
lastTime <- get lastUpdate
let delta = (fromIntegral (curTime - lastTime) / (10 ^ 4))
events <- get gameEvents
cgc <- get curGC
let (ngc, v) = injectSF (GameInput delta 640 480 events) cgc
curGC $= ngc
curRender $= renderFunc v
gameEvents $= []
postRedisplay Nothing)
keyboardMouseCallback $= Just (\key ks mod pos -> do
events <- get gameEvents
gameEvents $= events ++ [KeyEvent key ks mod pos])
mainLoop
|
dzamkov/MSnail
|
src/MSnail/Game.hs
|
bsd-3-clause
| 2,049 | 154 | 19 | 408 | 685 | 357 | 328 | 53 | 1 |
module Sexy.Instances.Functor.Function () where
import Sexy.Classes (Functor(..))
instance Functor ((->) a) where
-- (<$>) :: (b -> c) -> (a -> b) -> a -> c
(f <$> g) x = f (g x)
|
DanBurton/sexy
|
src/Sexy/Instances/Functor/Function.hs
|
bsd-3-clause
| 185 | 1 | 9 | 40 | 68 | 39 | 29 | 4 | 0 |
{-# LANGUAGE ForeignFunctionInterface #-}
module SimpleFFI( main ) where
{- |
Module : SimpleFFI
Description : Introduction to FFI
Copyright : (c) Thomas Lang, "Real World Haskell"
License : BSD3
Portability : portable
Stability : stable
This module gives a short introduction of using the
Foreign Function Interface in Haskell, which is used
for using things defined in other programming languages
like C.
-}
{-
- to use all that stuff, we have to import
- the Foreign module and additional ones:
-
- Foreign.C.Types
- Foreign.C.Ptr
- Foreign.C.String
- Foreign.Marshal.Array
-}
import Foreign
import Foreign.C.Types
{-
- calling now the Sinus function
- defined in "math.h"
-}
foreign import ccall "math.h sin"
c_sin :: CDouble -> CDouble
-- using the above bound c_sin and
-- wrapping it to Haskell's native types
fastsin :: Double -> Double
fastsin x = realToFrac (c_sin (realToFrac x))
-- main function: calculating sinus of every 10 steps
main = mapM_ (print . fastsin) [0/10, 1/10 .. 10/10 ]
|
langthom/Hack-A-Thon-Haskell
|
SimpleFFI.hs
|
bsd-3-clause
| 1,051 | 0 | 9 | 216 | 111 | 65 | 46 | 9 | 1 |
module VkModulePrefix
( vulkanModule
, vulkanModulePrefix
) where
import qualified Data.Text as T
import Relude
import Render.Element ( ModName(..) )
-- | Module prefix for generated source
vulkanModulePrefix :: Text
vulkanModulePrefix = "Vulkan"
vulkanModule :: [Text] -> ModName
vulkanModule = ModName . T.intercalate "." . (vulkanModulePrefix :)
|
expipiplus1/vulkan
|
generate-new/src/VkModulePrefix.hs
|
bsd-3-clause
| 415 | 0 | 8 | 113 | 82 | 51 | 31 | 10 | 1 |
{-# LANGUAGE OverloadedStrings, Safe #-}
module Evalso.Cruncher.Language.Bash (bash) where
import Evalso.Cruncher.Language (Language (..))
bash :: Language
bash = Language {
_codeFilename = "program.sh"
, _compileCommand = Nothing
, _compileTimeout = Nothing
, _runCommand = ["bash", "program.sh"]
, _runTimeout = 5
, _codemirror = "shell"
, _rpm = "bash"
, _displayName = "Bash"
}
|
eval-so/cruncher
|
src/Evalso/Cruncher/Language/Bash.hs
|
bsd-3-clause
| 403 | 0 | 7 | 74 | 95 | 63 | 32 | 13 | 1 |
module D13Spec (main, spec) where
import Test.Hspec
import D13Lib
import qualified PathSearch as PS
main :: IO ()
main = hspec spec
spec :: Spec
spec = parallel $ do
describe "num2bin" $
it "formats numbers" $ do
num2bin 1 `shouldBe` "1"
num2bin 2 `shouldBe` "10"
num2bin 10 `shouldBe` "1010"
describe "pointIsOpen" $
it "is correct" $ do
open MazeState { point = (1, 1), key = 10, goal = (0, 0) } `shouldBe` True
open MazeState { point = (1, 0), key = 10, goal = (0, 0) } `shouldBe` False
open MazeState { point = (5, 1), key = 10, goal = (0, 0) } `shouldBe` False
open MazeState { point = (2, 6), key = 10, goal = (0, 0) } `shouldBe` True
describe "pathfinding" $
it "finds the shortest path through the maze" $ do
let start = MazeState { point = (1, 1), key = 10, goal = (7, 4) }
let path = PS.minPath start
PS.length path `shouldBe` 12 -- 11 steps
|
wfleming/advent-of-code-2016
|
2016/test/D13Spec.hs
|
bsd-3-clause
| 1,000 | 0 | 19 | 316 | 392 | 221 | 171 | 24 | 1 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__
{-# LANGUAGE MagicHash, DeriveDataTypeable, StandaloneDeriving #-}
#endif
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
{-# LANGUAGE ScopedTypeVariables #-}
#if __GLASGOW_HASKELL__ >= 708
{-# LANGUAGE TypeFamilies #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntMap.Base
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This defines the data structures and core (hidden) manipulations
-- on representations.
-----------------------------------------------------------------------------
-- [Note: INLINE bit fiddling]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- It is essential that the bit fiddling functions like mask, zero, branchMask
-- etc are inlined. If they do not, the memory allocation skyrockets. The GHC
-- usually gets it right, but it is disastrous if it does not. Therefore we
-- explicitly mark these functions INLINE.
-- [Note: Local 'go' functions and capturing]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Care must be taken when using 'go' function which captures an argument.
-- Sometimes (for example when the argument is passed to a data constructor,
-- as in insert), GHC heap-allocates more than necessary. Therefore C-- code
-- must be checked for increased allocation when creating and modifying such
-- functions.
-- [Note: Order of constructors]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- The order of constructors of IntMap matters when considering performance.
-- Currently in GHC 7.0, when type has 3 constructors, they are matched from
-- the first to the last -- the best performance is achieved when the
-- constructors are ordered by frequency.
-- On GHC 7.0, reordering constructors from Nil | Tip | Bin to Bin | Tip | Nil
-- improves the benchmark by circa 10%.
module Data.IntMap.Base (
-- * Map type
IntMap(..), Key -- instance Eq,Show
-- * Operators
, (!), (\\)
-- * Query
, null
, size
, member
, notMember
, lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
, mergeWithKey'
-- * Traversal
-- ** Map
, map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, foldr
, foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
-- * Internal types
, Mask, Prefix, Nat
-- * Utility
, natFromInt
, intFromNat
, link
, bin
, zero
, nomatch
, match
, mask
, maskW
, shorter
, branchMask
, highestBitMask
, foldlStrict
) where
import Control.Applicative (Applicative(pure, (<*>)), (<$>))
import Control.DeepSeq (NFData(rnf))
import Control.Monad (liftM)
import Data.Bits
import qualified Data.Foldable as Foldable
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid(..))
import Data.Traversable (Traversable(traverse))
import Data.Typeable
import Data.Word (Word)
import Prelude hiding (lookup, map, filter, foldr, foldl, null)
import Data.BitUtil
import Data.IntSet.Base (Key)
import qualified Data.IntSet.Base as IntSet
import Data.StrictPair
#if __GLASGOW_HASKELL__
import Data.Data (Data(..), Constr, mkConstr, constrIndex, Fixity(Prefix),
DataType, mkDataType)
import GHC.Exts (build)
#if __GLASGOW_HASKELL__ >= 708
import qualified GHC.Exts as GHCExts
#endif
import Text.Read
#endif
-- Use macros to define strictness of functions.
-- STRICT_x_OF_y denotes an y-ary function strict in the x-th parameter.
-- We do not use BangPatterns, because they are not in any standard and we
-- want the compilers to be compiled by as many compilers as possible.
#define STRICT_1_OF_2(fn) fn arg _ | arg `seq` False = undefined
-- A "Nat" is a natural machine word (an unsigned Int)
type Nat = Word
natFromInt :: Key -> Nat
natFromInt = fromIntegral
{-# INLINE natFromInt #-}
intFromNat :: Nat -> Key
intFromNat = fromIntegral
{-# INLINE intFromNat #-}
{--------------------------------------------------------------------
Types
--------------------------------------------------------------------}
-- | A map of integers to values @a@.
-- See Note: Order of constructors
data IntMap a = Bin {-# UNPACK #-} !Prefix
{-# UNPACK #-} !Mask
!(IntMap a)
!(IntMap a)
| Tip {-# UNPACK #-} !Key a
| Nil
type Prefix = Int
type Mask = Int
{--------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Find the value at a key.
-- Calls 'error' when the element can not be found.
--
-- > fromList [(5,'a'), (3,'b')] ! 1 Error: element not in the map
-- > fromList [(5,'a'), (3,'b')] ! 5 == 'a'
(!) :: IntMap a -> Key -> a
m ! k = find k m
-- | Same as 'difference'.
(\\) :: IntMap a -> IntMap b -> IntMap a
m1 \\ m2 = difference m1 m2
infixl 9 \\{-This comment teaches CPP correct behaviour -}
{--------------------------------------------------------------------
Types
--------------------------------------------------------------------}
instance Monoid (IntMap a) where
mempty = empty
mappend = union
mconcat = unions
instance Foldable.Foldable IntMap where
fold t = go t
where go Nil = mempty
go (Tip _ v) = v
go (Bin _ _ l r) = go l `mappend` go r
{-# INLINABLE fold #-}
foldr = foldr
{-# INLINE foldr #-}
foldl = foldl
{-# INLINE foldl #-}
foldMap f t = go t
where go Nil = mempty
go (Tip _ v) = f v
go (Bin _ _ l r) = go l `mappend` go r
{-# INLINE foldMap #-}
instance Traversable IntMap where
traverse f = traverseWithKey (\_ -> f)
{-# INLINE traverse #-}
instance NFData a => NFData (IntMap a) where
rnf Nil = ()
rnf (Tip _ v) = rnf v
rnf (Bin _ _ l r) = rnf l `seq` rnf r
#if __GLASGOW_HASKELL__
{--------------------------------------------------------------------
A Data instance
--------------------------------------------------------------------}
-- This instance preserves data abstraction at the cost of inefficiency.
-- We provide limited reflection services for the sake of data abstraction.
instance Data a => Data (IntMap a) where
gfoldl f z im = z fromList `f` (toList im)
toConstr _ = fromListConstr
gunfold k z c = case constrIndex c of
1 -> k (z fromList)
_ -> error "gunfold"
dataTypeOf _ = intMapDataType
dataCast1 f = gcast1 f
fromListConstr :: Constr
fromListConstr = mkConstr intMapDataType "fromList" [] Prefix
intMapDataType :: DataType
intMapDataType = mkDataType "Data.IntMap.Base.IntMap" [fromListConstr]
#endif
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the map empty?
--
-- > Data.IntMap.null (empty) == True
-- > Data.IntMap.null (singleton 1 'a') == False
null :: IntMap a -> Bool
null Nil = True
null _ = False
{-# INLINE null #-}
-- | /O(n)/. Number of elements in the map.
--
-- > size empty == 0
-- > size (singleton 1 'a') == 1
-- > size (fromList([(1,'a'), (2,'c'), (3,'b')])) == 3
size :: IntMap a -> Int
size t
= case t of
Bin _ _ l r -> size l + size r
Tip _ _ -> 1
Nil -> 0
-- | /O(min(n,W))/. Is the key a member of the map?
--
-- > member 5 (fromList [(5,'a'), (3,'b')]) == True
-- > member 1 (fromList [(5,'a'), (3,'b')]) == False
-- See Note: Local 'go' functions and capturing]
member :: Key -> IntMap a -> Bool
member k = k `seq` go
where
go (Bin p m l r) | nomatch k p m = False
| zero k m = go l
| otherwise = go r
go (Tip kx _) = k == kx
go Nil = False
-- | /O(min(n,W))/. Is the key not a member of the map?
--
-- > notMember 5 (fromList [(5,'a'), (3,'b')]) == False
-- > notMember 1 (fromList [(5,'a'), (3,'b')]) == True
notMember :: Key -> IntMap a -> Bool
notMember k m = not $ member k m
-- | /O(min(n,W))/. Lookup the value at a key in the map. See also 'Data.Map.lookup'.
-- See Note: Local 'go' functions and capturing]
lookup :: Key -> IntMap a -> Maybe a
lookup k = k `seq` go
where
go (Bin p m l r) | nomatch k p m = Nothing
| zero k m = go l
| otherwise = go r
go (Tip kx x) | k == kx = Just x
| otherwise = Nothing
go Nil = Nothing
-- See Note: Local 'go' functions and capturing]
find :: Key -> IntMap a -> a
find k = k `seq` go
where
go (Bin p m l r) | nomatch k p m = not_found
| zero k m = go l
| otherwise = go r
go (Tip kx x) | k == kx = x
| otherwise = not_found
go Nil = not_found
not_found = error ("IntMap.!: key " ++ show k ++ " is not an element of the map")
-- | /O(min(n,W))/. The expression @('findWithDefault' def k map)@
-- returns the value at key @k@ or returns @def@ when the key is not an
-- element of the map.
--
-- > findWithDefault 'x' 1 (fromList [(5,'a'), (3,'b')]) == 'x'
-- > findWithDefault 'x' 5 (fromList [(5,'a'), (3,'b')]) == 'a'
-- See Note: Local 'go' functions and capturing]
findWithDefault :: a -> Key -> IntMap a -> a
findWithDefault def k = k `seq` go
where
go (Bin p m l r) | nomatch k p m = def
| zero k m = go l
| otherwise = go r
go (Tip kx x) | k == kx = x
| otherwise = def
go Nil = def
-- | /O(log n)/. Find largest key smaller than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupLT 3 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLT 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- See Note: Local 'go' functions and capturing.
lookupLT :: Key -> IntMap a -> Maybe (Key, a)
lookupLT k t = k `seq` case t of
Bin _ m l r | m < 0 -> if k >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch k p m = if k < p then unsafeFindMax def else unsafeFindMax r
| zero k m = go def l
| otherwise = go l r
go def (Tip ky y) | k <= ky = unsafeFindMax def
| otherwise = Just (ky, y)
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest key greater than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupGT 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGT 5 (fromList [(3,'a'), (5,'b')]) == Nothing
-- See Note: Local 'go' functions and capturing.
lookupGT :: Key -> IntMap a -> Maybe (Key, a)
lookupGT k t = k `seq` case t of
Bin _ m l r | m < 0 -> if k >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch k p m = if k < p then unsafeFindMin l else unsafeFindMin def
| zero k m = go r l
| otherwise = go def r
go def (Tip ky y) | k >= ky = unsafeFindMin def
| otherwise = Just (ky, y)
go def Nil = unsafeFindMin def
-- | /O(log n)/. Find largest key smaller or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupLE 2 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLE 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupLE 5 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- See Note: Local 'go' functions and capturing.
lookupLE :: Key -> IntMap a -> Maybe (Key, a)
lookupLE k t = k `seq` case t of
Bin _ m l r | m < 0 -> if k >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch k p m = if k < p then unsafeFindMax def else unsafeFindMax r
| zero k m = go def l
| otherwise = go l r
go def (Tip ky y) | k < ky = unsafeFindMax def
| otherwise = Just (ky, y)
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest key greater or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupGE 3 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupGE 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGE 6 (fromList [(3,'a'), (5,'b')]) == Nothing
-- See Note: Local 'go' functions and capturing.
lookupGE :: Key -> IntMap a -> Maybe (Key, a)
lookupGE k t = k `seq` case t of
Bin _ m l r | m < 0 -> if k >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch k p m = if k < p then unsafeFindMin l else unsafeFindMin def
| zero k m = go r l
| otherwise = go def r
go def (Tip ky y) | k > ky = unsafeFindMin def
| otherwise = Just (ky, y)
go def Nil = unsafeFindMin def
-- Helper function for lookupGE and lookupGT. It assumes that if a Bin node is
-- given, it has m > 0.
unsafeFindMin :: IntMap a -> Maybe (Key, a)
unsafeFindMin Nil = Nothing
unsafeFindMin (Tip ky y) = Just (ky, y)
unsafeFindMin (Bin _ _ l _) = unsafeFindMin l
-- Helper function for lookupLE and lookupLT. It assumes that if a Bin node is
-- given, it has m > 0.
unsafeFindMax :: IntMap a -> Maybe (Key, a)
unsafeFindMax Nil = Nothing
unsafeFindMax (Tip ky y) = Just (ky, y)
unsafeFindMax (Bin _ _ _ r) = unsafeFindMax r
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. The empty map.
--
-- > empty == fromList []
-- > size empty == 0
empty :: IntMap a
empty
= Nil
{-# INLINE empty #-}
-- | /O(1)/. A map of one element.
--
-- > singleton 1 'a' == fromList [(1, 'a')]
-- > size (singleton 1 'a') == 1
singleton :: Key -> a -> IntMap a
singleton k x
= Tip k x
{-# INLINE singleton #-}
{--------------------------------------------------------------------
Insert
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Insert a new key\/value pair in the map.
-- If the key is already present in the map, the associated value is
-- replaced with the supplied value, i.e. 'insert' is equivalent to
-- @'insertWith' 'const'@.
--
-- > insert 5 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'x')]
-- > insert 7 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'a'), (7, 'x')]
-- > insert 5 'x' empty == singleton 5 'x'
insert :: Key -> a -> IntMap a -> IntMap a
insert k x t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> link k (Tip k x) p t
| zero k m -> Bin p m (insert k x l) r
| otherwise -> Bin p m l (insert k x r)
Tip ky _
| k==ky -> Tip k x
| otherwise -> link k (Tip k x) ky t
Nil -> Tip k x
-- right-biased insertion, used by 'union'
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWith' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f new_value old_value@.
--
-- > insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "xxxa")]
-- > insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWith (++) 5 "xxx" empty == singleton 5 "xxx"
insertWith :: (a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWith f k x t
= insertWithKey (\_ x' y' -> f x' y') k x t
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWithKey' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f key new_value old_value@.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:xxx|a")]
-- > insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWithKey f 5 "xxx" empty == singleton 5 "xxx"
insertWithKey :: (Key -> a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWithKey f k x t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> link k (Tip k x) p t
| zero k m -> Bin p m (insertWithKey f k x l) r
| otherwise -> Bin p m l (insertWithKey f k x r)
Tip ky y
| k==ky -> Tip k (f k x y)
| otherwise -> link k (Tip k x) ky t
Nil -> Tip k x
-- | /O(min(n,W))/. The expression (@'insertLookupWithKey' f k x map@)
-- is a pair where the first element is equal to (@'lookup' k map@)
-- and the second element equal to (@'insertWithKey' f k x map@).
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
-- > insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
-- > insertLookupWithKey f 5 "xxx" empty == (Nothing, singleton 5 "xxx")
--
-- This is how to define @insertLookup@ using @insertLookupWithKey@:
--
-- > let insertLookup kx x t = insertLookupWithKey (\_ a _ -> a) kx x t
-- > insertLookup 5 "x" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "x")])
-- > insertLookup 7 "x" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "x")])
insertLookupWithKey :: (Key -> a -> a -> a) -> Key -> a -> IntMap a -> (Maybe a, IntMap a)
insertLookupWithKey f k x t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> (Nothing,link k (Tip k x) p t)
| zero k m -> let (found,l') = insertLookupWithKey f k x l in (found,Bin p m l' r)
| otherwise -> let (found,r') = insertLookupWithKey f k x r in (found,Bin p m l r')
Tip ky y
| k==ky -> (Just y,Tip k (f k x y))
| otherwise -> (Nothing,link k (Tip k x) ky t)
Nil -> (Nothing,Tip k x)
{--------------------------------------------------------------------
Deletion
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Delete a key and its value from the map. When the key is not
-- a member of the map, the original map is returned.
--
-- > delete 5 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > delete 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > delete 5 empty == empty
delete :: Key -> IntMap a -> IntMap a
delete k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> t
| zero k m -> bin p m (delete k l) r
| otherwise -> bin p m l (delete k r)
Tip ky _
| k==ky -> Nil
| otherwise -> t
Nil -> Nil
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjust ("new " ++) 7 empty == empty
adjust :: (a -> a) -> Key -> IntMap a -> IntMap a
adjust f k m
= adjustWithKey (\_ x -> f x) k m
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > let f key x = (show key) ++ ":new " ++ x
-- > adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjustWithKey f 7 empty == empty
adjustWithKey :: (Key -> a -> a) -> Key -> IntMap a -> IntMap a
adjustWithKey f
= updateWithKey (\k' x -> Just (f k' x))
-- | /O(min(n,W))/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > update f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > update f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > update f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
update :: (a -> Maybe a) -> Key -> IntMap a -> IntMap a
update f
= updateWithKey (\_ x -> f x)
-- | /O(min(n,W))/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f k x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateWithKey :: (Key -> a -> Maybe a) -> Key -> IntMap a -> IntMap a
updateWithKey f k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> t
| zero k m -> bin p m (updateWithKey f k l) r
| otherwise -> bin p m l (updateWithKey f k r)
Tip ky y
| k==ky -> case (f k y) of
Just y' -> Tip ky y'
Nothing -> Nil
| otherwise -> t
Nil -> Nil
-- | /O(min(n,W))/. Lookup and update.
-- The function returns original value, if it is updated.
-- This is different behavior than 'Data.Map.updateLookupWithKey'.
-- Returns the original key value if the map entry is deleted.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:new a")])
-- > updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a")])
-- > updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) == (Just "b", singleton 5 "a")
updateLookupWithKey :: (Key -> a -> Maybe a) -> Key -> IntMap a -> (Maybe a,IntMap a)
updateLookupWithKey f k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> (Nothing,t)
| zero k m -> let (found,l') = updateLookupWithKey f k l in (found,bin p m l' r)
| otherwise -> let (found,r') = updateLookupWithKey f k r in (found,bin p m l r')
Tip ky y
| k==ky -> case (f k y) of
Just y' -> (Just y,Tip ky y')
Nothing -> (Just y,Nil)
| otherwise -> (Nothing,t)
Nil -> (Nothing,Nil)
-- | /O(min(n,W))/. The expression (@'alter' f k map@) alters the value @x@ at @k@, or absence thereof.
-- 'alter' can be used to insert, delete, or update a value in an 'IntMap'.
-- In short : @'lookup' k ('alter' f k m) = f ('lookup' k m)@.
alter :: (Maybe a -> Maybe a) -> Key -> IntMap a -> IntMap a
alter f k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> case f Nothing of
Nothing -> t
Just x -> link k (Tip k x) p t
| zero k m -> bin p m (alter f k l) r
| otherwise -> bin p m l (alter f k r)
Tip ky y
| k==ky -> case f (Just y) of
Just x -> Tip ky x
Nothing -> Nil
| otherwise -> case f Nothing of
Just x -> link k (Tip k x) ky t
Nothing -> Tip ky y
Nil -> case f Nothing of
Just x -> Tip k x
Nothing -> Nil
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
-- | The union of a list of maps.
--
-- > unions [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "b"), (5, "a"), (7, "C")]
-- > unions [(fromList [(5, "A3"), (3, "B3")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "a"), (3, "b")])]
-- > == fromList [(3, "B3"), (5, "A3"), (7, "C")]
unions :: [IntMap a] -> IntMap a
unions xs
= foldlStrict union empty xs
-- | The union of a list of maps, with a combining operation.
--
-- > unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
unionsWith :: (a->a->a) -> [IntMap a] -> IntMap a
unionsWith f ts
= foldlStrict (unionWith f) empty ts
-- | /O(n+m)/. The (left-biased) union of two maps.
-- It prefers the first map when duplicate keys are encountered,
-- i.e. (@'union' == 'unionWith' 'const'@).
--
-- > union (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "a"), (7, "C")]
union :: IntMap a -> IntMap a -> IntMap a
union m1 m2
= mergeWithKey' Bin const id id m1 m2
-- | /O(n+m)/. The union with a combining function.
--
-- > unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "aA"), (7, "C")]
unionWith :: (a -> a -> a) -> IntMap a -> IntMap a -> IntMap a
unionWith f m1 m2
= unionWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. The union with a combining function.
--
-- > let f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
-- > unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
unionWithKey :: (Key -> a -> a -> a) -> IntMap a -> IntMap a -> IntMap a
unionWithKey f m1 m2
= mergeWithKey' Bin (\(Tip k1 x1) (Tip _k2 x2) -> Tip k1 (f k1 x1 x2)) id id m1 m2
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference between two maps (based on keys).
--
-- > difference (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 3 "b"
difference :: IntMap a -> IntMap b -> IntMap a
difference m1 m2
= mergeWithKey (\_ _ _ -> Nothing) id (const Nil) m1 m2
-- | /O(n+m)/. Difference with a combining function.
--
-- > let f al ar = if al == "b" then Just (al ++ ":" ++ ar) else Nothing
-- > differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
-- > == singleton 3 "b:B"
differenceWith :: (a -> b -> Maybe a) -> IntMap a -> IntMap b -> IntMap a
differenceWith f m1 m2
= differenceWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. Difference with a combining function. When two equal keys are
-- encountered, the combining function is applied to the key and both values.
-- If it returns 'Nothing', the element is discarded (proper set difference).
-- If it returns (@'Just' y@), the element is updated with a new value @y@.
--
-- > let f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
-- > differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
-- > == singleton 3 "3:b|B"
differenceWithKey :: (Key -> a -> b -> Maybe a) -> IntMap a -> IntMap b -> IntMap a
differenceWithKey f m1 m2
= mergeWithKey f id (const Nil) m1 m2
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. The (left-biased) intersection of two maps (based on keys).
--
-- > intersection (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "a"
intersection :: IntMap a -> IntMap b -> IntMap a
intersection m1 m2
= mergeWithKey' bin const (const Nil) (const Nil) m1 m2
-- | /O(n+m)/. The intersection with a combining function.
--
-- > intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "aA"
intersectionWith :: (a -> b -> c) -> IntMap a -> IntMap b -> IntMap c
intersectionWith f m1 m2
= intersectionWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. The intersection with a combining function.
--
-- > let f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
-- > intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "5:a|A"
intersectionWithKey :: (Key -> a -> b -> c) -> IntMap a -> IntMap b -> IntMap c
intersectionWithKey f m1 m2
= mergeWithKey' bin (\(Tip k1 x1) (Tip _k2 x2) -> Tip k1 (f k1 x1 x2)) (const Nil) (const Nil) m1 m2
{--------------------------------------------------------------------
MergeWithKey
--------------------------------------------------------------------}
-- | /O(n+m)/. A high-performance universal combining function. Using
-- 'mergeWithKey', all combining functions can be defined without any loss of
-- efficiency (with exception of 'union', 'difference' and 'intersection',
-- where sharing of some nodes is lost with 'mergeWithKey').
--
-- Please make sure you know what is going on when using 'mergeWithKey',
-- otherwise you can be surprised by unexpected code growth or even
-- corruption of the data structure.
--
-- When 'mergeWithKey' is given three arguments, it is inlined to the call
-- site. You should therefore use 'mergeWithKey' only to define your custom
-- combining functions. For example, you could define 'unionWithKey',
-- 'differenceWithKey' and 'intersectionWithKey' as
--
-- > myUnionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) id id m1 m2
-- > myDifferenceWithKey f m1 m2 = mergeWithKey f id (const empty) m1 m2
-- > myIntersectionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) (const empty) (const empty) m1 m2
--
-- When calling @'mergeWithKey' combine only1 only2@, a function combining two
-- 'IntMap's is created, such that
--
-- * if a key is present in both maps, it is passed with both corresponding
-- values to the @combine@ function. Depending on the result, the key is either
-- present in the result with specified value, or is left out;
--
-- * a nonempty subtree present only in the first map is passed to @only1@ and
-- the output is added to the result;
--
-- * a nonempty subtree present only in the second map is passed to @only2@ and
-- the output is added to the result.
--
-- The @only1@ and @only2@ methods /must return a map with a subset (possibly empty) of the keys of the given map/.
-- The values can be modified arbitrarily. Most common variants of @only1@ and
-- @only2@ are 'id' and @'const' 'empty'@, but for example @'map' f@ or
-- @'filterWithKey' f@ could be used for any @f@.
mergeWithKey :: (Key -> a -> b -> Maybe c) -> (IntMap a -> IntMap c) -> (IntMap b -> IntMap c)
-> IntMap a -> IntMap b -> IntMap c
mergeWithKey f g1 g2 = mergeWithKey' bin combine g1 g2
where -- We use the lambda form to avoid non-exhaustive pattern matches warning.
combine = \(Tip k1 x1) (Tip _k2 x2) -> case f k1 x1 x2 of Nothing -> Nil
Just x -> Tip k1 x
{-# INLINE combine #-}
{-# INLINE mergeWithKey #-}
-- Slightly more general version of mergeWithKey. It differs in the following:
--
-- * the combining function operates on maps instead of keys and values. The
-- reason is to enable sharing in union, difference and intersection.
--
-- * mergeWithKey' is given an equivalent of bin. The reason is that in union*,
-- Bin constructor can be used, because we know both subtrees are nonempty.
mergeWithKey' :: (Prefix -> Mask -> IntMap c -> IntMap c -> IntMap c)
-> (IntMap a -> IntMap b -> IntMap c) -> (IntMap a -> IntMap c) -> (IntMap b -> IntMap c)
-> IntMap a -> IntMap b -> IntMap c
mergeWithKey' bin' f g1 g2 = go
where
go t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = merge1
| shorter m2 m1 = merge2
| p1 == p2 = bin' p1 m1 (go l1 l2) (go r1 r2)
| otherwise = maybe_link p1 (g1 t1) p2 (g2 t2)
where
merge1 | nomatch p2 p1 m1 = maybe_link p1 (g1 t1) p2 (g2 t2)
| zero p2 m1 = bin' p1 m1 (go l1 t2) (g1 r1)
| otherwise = bin' p1 m1 (g1 l1) (go r1 t2)
merge2 | nomatch p1 p2 m2 = maybe_link p1 (g1 t1) p2 (g2 t2)
| zero p1 m2 = bin' p2 m2 (go t1 l2) (g2 r2)
| otherwise = bin' p2 m2 (g2 l2) (go t1 r2)
go t1'@(Bin _ _ _ _) t2'@(Tip k2' _) = merge t2' k2' t1'
where merge t2 k2 t1@(Bin p1 m1 l1 r1) | nomatch k2 p1 m1 = maybe_link p1 (g1 t1) k2 (g2 t2)
| zero k2 m1 = bin' p1 m1 (merge t2 k2 l1) (g1 r1)
| otherwise = bin' p1 m1 (g1 l1) (merge t2 k2 r1)
merge t2 k2 t1@(Tip k1 _) | k1 == k2 = f t1 t2
| otherwise = maybe_link k1 (g1 t1) k2 (g2 t2)
merge t2 _ Nil = g2 t2
go t1@(Bin _ _ _ _) Nil = g1 t1
go t1'@(Tip k1' _) t2' = merge t1' k1' t2'
where merge t1 k1 t2@(Bin p2 m2 l2 r2) | nomatch k1 p2 m2 = maybe_link k1 (g1 t1) p2 (g2 t2)
| zero k1 m2 = bin' p2 m2 (merge t1 k1 l2) (g2 r2)
| otherwise = bin' p2 m2 (g2 l2) (merge t1 k1 r2)
merge t1 k1 t2@(Tip k2 _) | k1 == k2 = f t1 t2
| otherwise = maybe_link k1 (g1 t1) k2 (g2 t2)
merge t1 _ Nil = g1 t1
go Nil t2 = g2 t2
maybe_link _ Nil _ t2 = t2
maybe_link _ t1 _ Nil = t1
maybe_link p1 t1 p2 t2 = link p1 t1 p2 t2
{-# INLINE maybe_link #-}
{-# INLINE mergeWithKey' #-}
{--------------------------------------------------------------------
Min\/Max
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Update the value at the minimal key.
--
-- > updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"3:b"), (5,"a")]
-- > updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMinWithKey :: (Key -> a -> Maybe a) -> IntMap a -> IntMap a
updateMinWithKey f t =
case t of Bin p m l r | m < 0 -> bin p m l (go f r)
_ -> go f t
where
go f' (Bin p m l r) = bin p m (go f' l) r
go f' (Tip k y) = case f' k y of
Just y' -> Tip k y'
Nothing -> Nil
go _ Nil = error "updateMinWithKey Nil"
-- | /O(min(n,W))/. Update the value at the maximal key.
--
-- > updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"b"), (5,"5:a")]
-- > updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMaxWithKey :: (Key -> a -> Maybe a) -> IntMap a -> IntMap a
updateMaxWithKey f t =
case t of Bin p m l r | m < 0 -> bin p m (go f l) r
_ -> go f t
where
go f' (Bin p m l r) = bin p m l (go f' r)
go f' (Tip k y) = case f' k y of
Just y' -> Tip k y'
Nothing -> Nil
go _ Nil = error "updateMaxWithKey Nil"
-- | /O(min(n,W))/. Retrieves the maximal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > maxViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((5,"a"), singleton 3 "b")
-- > maxViewWithKey empty == Nothing
maxViewWithKey :: IntMap a -> Maybe ((Key, a), IntMap a)
maxViewWithKey t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go l of (result, l') -> Just (result, bin p m l' r)
_ -> Just (go t)
where
go (Bin p m l r) = case go r of (result, r') -> (result, bin p m l r')
go (Tip k y) = ((k, y), Nil)
go Nil = error "maxViewWithKey Nil"
-- | /O(min(n,W))/. Retrieves the minimal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > minViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((3,"b"), singleton 5 "a")
-- > minViewWithKey empty == Nothing
minViewWithKey :: IntMap a -> Maybe ((Key, a), IntMap a)
minViewWithKey t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go r of (result, r') -> Just (result, bin p m l r')
_ -> Just (go t)
where
go (Bin p m l r) = case go l of (result, l') -> (result, bin p m l' r)
go (Tip k y) = ((k, y), Nil)
go Nil = error "minViewWithKey Nil"
-- | /O(min(n,W))/. Update the value at the maximal key.
--
-- > updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "Xa")]
-- > updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMax :: (a -> Maybe a) -> IntMap a -> IntMap a
updateMax f = updateMaxWithKey (const f)
-- | /O(min(n,W))/. Update the value at the minimal key.
--
-- > updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "Xb"), (5, "a")]
-- > updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMin :: (a -> Maybe a) -> IntMap a -> IntMap a
updateMin f = updateMinWithKey (const f)
-- Similar to the Arrow instance.
first :: (a -> c) -> (a, b) -> (c, b)
first f (x,y) = (f x,y)
-- | /O(min(n,W))/. Retrieves the maximal key of the map, and the map
-- stripped of that element, or 'Nothing' if passed an empty map.
maxView :: IntMap a -> Maybe (a, IntMap a)
maxView t = liftM (first snd) (maxViewWithKey t)
-- | /O(min(n,W))/. Retrieves the minimal key of the map, and the map
-- stripped of that element, or 'Nothing' if passed an empty map.
minView :: IntMap a -> Maybe (a, IntMap a)
minView t = liftM (first snd) (minViewWithKey t)
-- | /O(min(n,W))/. Delete and find the maximal element.
deleteFindMax :: IntMap a -> ((Key, a), IntMap a)
deleteFindMax = fromMaybe (error "deleteFindMax: empty map has no maximal element") . maxViewWithKey
-- | /O(min(n,W))/. Delete and find the minimal element.
deleteFindMin :: IntMap a -> ((Key, a), IntMap a)
deleteFindMin = fromMaybe (error "deleteFindMin: empty map has no minimal element") . minViewWithKey
-- | /O(min(n,W))/. The minimal key of the map.
findMin :: IntMap a -> (Key, a)
findMin Nil = error $ "findMin: empty map has no minimal element"
findMin (Tip k v) = (k,v)
findMin (Bin _ m l r)
| m < 0 = go r
| otherwise = go l
where go (Tip k v) = (k,v)
go (Bin _ _ l' _) = go l'
go Nil = error "findMax Nil"
-- | /O(min(n,W))/. The maximal key of the map.
findMax :: IntMap a -> (Key, a)
findMax Nil = error $ "findMax: empty map has no maximal element"
findMax (Tip k v) = (k,v)
findMax (Bin _ m l r)
| m < 0 = go l
| otherwise = go r
where go (Tip k v) = (k,v)
go (Bin _ _ _ r') = go r'
go Nil = error "findMax Nil"
-- | /O(min(n,W))/. Delete the minimal key. Returns an empty map if the map is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Map.Map' –
-- versions prior to 0.5 threw an error if the 'IntMap' was already empty.
deleteMin :: IntMap a -> IntMap a
deleteMin = maybe Nil snd . minView
-- | /O(min(n,W))/. Delete the maximal key. Returns an empty map if the map is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Map.Map' –
-- versions prior to 0.5 threw an error if the 'IntMap' was already empty.
deleteMax :: IntMap a -> IntMap a
deleteMax = maybe Nil snd . maxView
{--------------------------------------------------------------------
Submap
--------------------------------------------------------------------}
-- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
-- Defined as (@'isProperSubmapOf' = 'isProperSubmapOfBy' (==)@).
isProperSubmapOf :: Eq a => IntMap a -> IntMap a -> Bool
isProperSubmapOf m1 m2
= isProperSubmapOfBy (==) m1 m2
{- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
The expression (@'isProperSubmapOfBy' f m1 m2@) returns 'True' when
@m1@ and @m2@ are not equal,
all keys in @m1@ are in @m2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isProperSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
But the following are all 'False':
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)])
> isProperSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
-}
isProperSubmapOfBy :: (a -> b -> Bool) -> IntMap a -> IntMap b -> Bool
isProperSubmapOfBy predicate t1 t2
= case submapCmp predicate t1 t2 of
LT -> True
_ -> False
submapCmp :: (a -> b -> Bool) -> IntMap a -> IntMap b -> Ordering
submapCmp predicate t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = GT
| shorter m2 m1 = submapCmpLt
| p1 == p2 = submapCmpEq
| otherwise = GT -- disjoint
where
submapCmpLt | nomatch p1 p2 m2 = GT
| zero p1 m2 = submapCmp predicate t1 l2
| otherwise = submapCmp predicate t1 r2
submapCmpEq = case (submapCmp predicate l1 l2, submapCmp predicate r1 r2) of
(GT,_ ) -> GT
(_ ,GT) -> GT
(EQ,EQ) -> EQ
_ -> LT
submapCmp _ (Bin _ _ _ _) _ = GT
submapCmp predicate (Tip kx x) (Tip ky y)
| (kx == ky) && predicate x y = EQ
| otherwise = GT -- disjoint
submapCmp predicate (Tip k x) t
= case lookup k t of
Just y | predicate x y -> LT
_ -> GT -- disjoint
submapCmp _ Nil Nil = EQ
submapCmp _ Nil _ = LT
-- | /O(n+m)/. Is this a submap?
-- Defined as (@'isSubmapOf' = 'isSubmapOfBy' (==)@).
isSubmapOf :: Eq a => IntMap a -> IntMap a -> Bool
isSubmapOf m1 m2
= isSubmapOfBy (==) m1 m2
{- | /O(n+m)/.
The expression (@'isSubmapOfBy' f m1 m2@) returns 'True' if
all keys in @m1@ are in @m2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)])
But the following are all 'False':
> isSubmapOfBy (==) (fromList [(1,2)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)])
-}
isSubmapOfBy :: (a -> b -> Bool) -> IntMap a -> IntMap b -> Bool
isSubmapOfBy predicate t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = False
| shorter m2 m1 = match p1 p2 m2 && (if zero p1 m2 then isSubmapOfBy predicate t1 l2
else isSubmapOfBy predicate t1 r2)
| otherwise = (p1==p2) && isSubmapOfBy predicate l1 l2 && isSubmapOfBy predicate r1 r2
isSubmapOfBy _ (Bin _ _ _ _) _ = False
isSubmapOfBy predicate (Tip k x) t = case lookup k t of
Just y -> predicate x y
Nothing -> False
isSubmapOfBy _ Nil _ = True
{--------------------------------------------------------------------
Mapping
--------------------------------------------------------------------}
-- | /O(n)/. Map a function over all values in the map.
--
-- > map (++ "x") (fromList [(5,"a"), (3,"b")]) == fromList [(3, "bx"), (5, "ax")]
map :: (a -> b) -> IntMap a -> IntMap b
map f t
= case t of
Bin p m l r -> Bin p m (map f l) (map f r)
Tip k x -> Tip k (f x)
Nil -> Nil
-- | /O(n)/. Map a function over all values in the map.
--
-- > let f key x = (show key) ++ ":" ++ x
-- > mapWithKey f (fromList [(5,"a"), (3,"b")]) == fromList [(3, "3:b"), (5, "5:a")]
mapWithKey :: (Key -> a -> b) -> IntMap a -> IntMap b
mapWithKey f t
= case t of
Bin p m l r -> Bin p m (mapWithKey f l) (mapWithKey f r)
Tip k x -> Tip k (f k x)
Nil -> Nil
-- | /O(n)/.
-- @'traverseWithKey' f s == 'fromList' <$> 'traverse' (\(k, v) -> (,) k <$> f k v) ('toList' m)@
-- That is, behaves exactly like a regular 'traverse' except that the traversing
-- function also has access to the key associated with a value.
--
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(1, 'a'), (5, 'e')]) == Just (fromList [(1, 'b'), (5, 'f')])
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(2, 'c')]) == Nothing
traverseWithKey :: Applicative t => (Key -> a -> t b) -> IntMap a -> t (IntMap b)
traverseWithKey f = go
where
go Nil = pure Nil
go (Tip k v) = Tip k <$> f k v
go (Bin p m l r) = Bin p m <$> go l <*> go r
{-# INLINE traverseWithKey #-}
-- | /O(n)/. The function @'mapAccum'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a b = (a ++ b, b ++ "X")
-- > mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) == ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
mapAccum :: (a -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccum f = mapAccumWithKey (\a' _ x -> f a' x)
-- | /O(n)/. The function @'mapAccumWithKey'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
-- > mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) == ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
mapAccumWithKey :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumWithKey f a t
= mapAccumL f a t
-- | /O(n)/. The function @'mapAccumL'@ threads an accumulating
-- argument through the map in ascending order of keys.
mapAccumL :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumL f a t
= case t of
Bin p m l r -> let (a1,l') = mapAccumL f a l
(a2,r') = mapAccumL f a1 r
in (a2,Bin p m l' r')
Tip k x -> let (a',x') = f a k x in (a',Tip k x')
Nil -> (a,Nil)
-- | /O(n)/. The function @'mapAccumR'@ threads an accumulating
-- argument through the map in descending order of keys.
mapAccumRWithKey :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumRWithKey f a t
= case t of
Bin p m l r -> let (a1,r') = mapAccumRWithKey f a r
(a2,l') = mapAccumRWithKey f a1 l
in (a2,Bin p m l' r')
Tip k x -> let (a',x') = f a k x in (a',Tip k x')
Nil -> (a,Nil)
-- | /O(n*min(n,W))/.
-- @'mapKeys' f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the value at the greatest of the
-- original keys is retained.
--
-- > mapKeys (+ 1) (fromList [(5,"a"), (3,"b")]) == fromList [(4, "b"), (6, "a")]
-- > mapKeys (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "c"
-- > mapKeys (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "c"
mapKeys :: (Key->Key) -> IntMap a -> IntMap a
mapKeys f = fromList . foldrWithKey (\k x xs -> (f k, x) : xs) []
-- | /O(n*min(n,W))/.
-- @'mapKeysWith' c f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the associated values will be
-- combined using @c@.
--
-- > mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "cdab"
-- > mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "cdab"
mapKeysWith :: (a -> a -> a) -> (Key->Key) -> IntMap a -> IntMap a
mapKeysWith c f = fromListWith c . foldrWithKey (\k x xs -> (f k, x) : xs) []
-- | /O(n*min(n,W))/.
-- @'mapKeysMonotonic' f s == 'mapKeys' f s@, but works only when @f@
-- is strictly monotonic.
-- That is, for any values @x@ and @y@, if @x@ < @y@ then @f x@ < @f y@.
-- /The precondition is not checked./
-- Semi-formally, we have:
--
-- > and [x < y ==> f x < f y | x <- ls, y <- ls]
-- > ==> mapKeysMonotonic f s == mapKeys f s
-- > where ls = keys s
--
-- This means that @f@ maps distinct original keys to distinct resulting keys.
-- This function has slightly better performance than 'mapKeys'.
--
-- > mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")]) == fromList [(6, "b"), (10, "a")]
mapKeysMonotonic :: (Key->Key) -> IntMap a -> IntMap a
mapKeysMonotonic f = fromDistinctAscList . foldrWithKey (\k x xs -> (f k, x) : xs) []
{--------------------------------------------------------------------
Filter
--------------------------------------------------------------------}
-- | /O(n)/. Filter all values that satisfy some predicate.
--
-- > filter (> "a") (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > filter (> "x") (fromList [(5,"a"), (3,"b")]) == empty
-- > filter (< "a") (fromList [(5,"a"), (3,"b")]) == empty
filter :: (a -> Bool) -> IntMap a -> IntMap a
filter p m
= filterWithKey (\_ x -> p x) m
-- | /O(n)/. Filter all keys\/values that satisfy some predicate.
--
-- > filterWithKey (\k _ -> k > 4) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
filterWithKey :: (Key -> a -> Bool) -> IntMap a -> IntMap a
filterWithKey predicate t
= case t of
Bin p m l r
-> bin p m (filterWithKey predicate l) (filterWithKey predicate r)
Tip k x
| predicate k x -> t
| otherwise -> Nil
Nil -> Nil
-- | /O(n)/. Partition the map according to some predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partition (> "a") (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > partition (< "x") (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partition (> "x") (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
partition :: (a -> Bool) -> IntMap a -> (IntMap a,IntMap a)
partition p m
= partitionWithKey (\_ x -> p x) m
-- | /O(n)/. Partition the map according to some predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partitionWithKey (\ k _ -> k > 3) (fromList [(5,"a"), (3,"b")]) == (singleton 5 "a", singleton 3 "b")
-- > partitionWithKey (\ k _ -> k < 7) (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partitionWithKey (\ k _ -> k > 7) (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
partitionWithKey :: (Key -> a -> Bool) -> IntMap a -> (IntMap a,IntMap a)
partitionWithKey predicate0 t0 = toPair $ go predicate0 t0
where
go predicate t
= case t of
Bin p m l r
-> let (l1 :*: l2) = go predicate l
(r1 :*: r2) = go predicate r
in bin p m l1 r1 :*: bin p m l2 r2
Tip k x
| predicate k x -> (t :*: Nil)
| otherwise -> (Nil :*: t)
Nil -> (Nil :*: Nil)
-- | /O(n)/. Map values and collect the 'Just' results.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > mapMaybe f (fromList [(5,"a"), (3,"b")]) == singleton 5 "new a"
mapMaybe :: (a -> Maybe b) -> IntMap a -> IntMap b
mapMaybe f = mapMaybeWithKey (\_ x -> f x)
-- | /O(n)/. Map keys\/values and collect the 'Just' results.
--
-- > let f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
-- > mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) == singleton 3 "key : 3"
mapMaybeWithKey :: (Key -> a -> Maybe b) -> IntMap a -> IntMap b
mapMaybeWithKey f (Bin p m l r)
= bin p m (mapMaybeWithKey f l) (mapMaybeWithKey f r)
mapMaybeWithKey f (Tip k x) = case f k x of
Just y -> Tip k y
Nothing -> Nil
mapMaybeWithKey _ Nil = Nil
-- | /O(n)/. Map values and separate the 'Left' and 'Right' results.
--
-- > let f a = if a < "c" then Left a else Right a
-- > mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
-- >
-- > mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
mapEither :: (a -> Either b c) -> IntMap a -> (IntMap b, IntMap c)
mapEither f m
= mapEitherWithKey (\_ x -> f x) m
-- | /O(n)/. Map keys\/values and separate the 'Left' and 'Right' results.
--
-- > let f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
-- > mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
-- >
-- > mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
mapEitherWithKey :: (Key -> a -> Either b c) -> IntMap a -> (IntMap b, IntMap c)
mapEitherWithKey f0 t0 = toPair $ go f0 t0
where
go f (Bin p m l r)
= bin p m l1 r1 :*: bin p m l2 r2
where
(l1 :*: l2) = go f l
(r1 :*: r2) = go f r
go f (Tip k x) = case f k x of
Left y -> (Tip k y :*: Nil)
Right z -> (Nil :*: Tip k z)
go _ Nil = (Nil :*: Nil)
-- | /O(min(n,W))/. The expression (@'split' k map@) is a pair @(map1,map2)@
-- where all keys in @map1@ are lower than @k@ and all keys in
-- @map2@ larger than @k@. Any key equal to @k@ is found in neither @map1@ nor @map2@.
--
-- > split 2 (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3,"b"), (5,"a")])
-- > split 3 (fromList [(5,"a"), (3,"b")]) == (empty, singleton 5 "a")
-- > split 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > split 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", empty)
-- > split 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], empty)
split :: Key -> IntMap a -> (IntMap a, IntMap a)
split k t =
case t of
Bin _ m l r
| m < 0 -> if k >= 0 -- handle negative numbers.
then case go k l of (lt :*: gt) -> let lt' = union r lt
in lt' `seq` (lt', gt)
else case go k r of (lt :*: gt) -> let gt' = union gt l
in gt' `seq` (lt, gt')
_ -> case go k t of
(lt :*: gt) -> (lt, gt)
where
go k' t'@(Bin p m l r) | nomatch k' p m = if k' > p then t' :*: Nil else Nil :*: t'
| zero k' m = case go k' l of (lt :*: gt) -> lt :*: union gt r
| otherwise = case go k' r of (lt :*: gt) -> union l lt :*: gt
go k' t'@(Tip ky _) | k' > ky = (t' :*: Nil)
| k' < ky = (Nil :*: t')
| otherwise = (Nil :*: Nil)
go _ Nil = (Nil :*: Nil)
-- | /O(min(n,W))/. Performs a 'split' but also returns whether the pivot
-- key was found in the original map.
--
-- > splitLookup 2 (fromList [(5,"a"), (3,"b")]) == (empty, Nothing, fromList [(3,"b"), (5,"a")])
-- > splitLookup 3 (fromList [(5,"a"), (3,"b")]) == (empty, Just "b", singleton 5 "a")
-- > splitLookup 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Nothing, singleton 5 "a")
-- > splitLookup 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Just "a", empty)
-- > splitLookup 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], Nothing, empty)
splitLookup :: Key -> IntMap a -> (IntMap a, Maybe a, IntMap a)
splitLookup k t =
case t of
Bin _ m l r
| m < 0 -> if k >= 0 -- handle negative numbers.
then case go k l of
(lt, fnd, gt) -> let lt' = union r lt
in lt' `seq` (lt', fnd, gt)
else case go k r of
(lt, fnd, gt) -> let gt' = union gt l
in gt' `seq` (lt, fnd, gt')
_ -> go k t
where
go k' t'@(Bin p m l r)
| nomatch k' p m = if k' > p then (t', Nothing, Nil) else (Nil, Nothing, t')
| zero k' m = case go k' l of
(lt, fnd, gt) -> let gt' = union gt r in gt' `seq` (lt, fnd, gt')
| otherwise = case go k' r of
(lt, fnd, gt) -> let lt' = union l lt in lt' `seq` (lt', fnd, gt)
go k' t'@(Tip ky y) | k' > ky = (t', Nothing, Nil)
| k' < ky = (Nil, Nothing, t')
| otherwise = (Nil, Just y, Nil)
go _ Nil = (Nil, Nothing, Nil)
{--------------------------------------------------------------------
Fold
--------------------------------------------------------------------}
-- | /O(n)/. Fold the values in the map using the given right-associative
-- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'elems'@.
--
-- For example,
--
-- > elems map = foldr (:) [] map
--
-- > let f a len = len + (length a)
-- > foldr f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
foldr :: (a -> b -> b) -> b -> IntMap a -> b
foldr f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
go z' Nil = z'
go z' (Tip _ x) = f x z'
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldr #-}
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldr' :: (a -> b -> b) -> b -> IntMap a -> b
foldr' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip _ x) = f x z'
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldr' #-}
-- | /O(n)/. Fold the values in the map using the given left-associative
-- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'elems'@.
--
-- For example,
--
-- > elems = reverse . foldl (flip (:)) []
--
-- > let f len a = len + (length a)
-- > foldl f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
foldl :: (a -> b -> a) -> a -> IntMap b -> a
foldl f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
go z' Nil = z'
go z' (Tip _ x) = f z' x
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl #-}
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldl' :: (a -> b -> a) -> a -> IntMap b -> a
foldl' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip _ x) = f z' x
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl' #-}
-- | /O(n)/. Fold the keys and values in the map using the given right-associative
-- binary operator, such that
-- @'foldrWithKey' f z == 'Prelude.foldr' ('uncurry' f) z . 'toAscList'@.
--
-- For example,
--
-- > keys map = foldrWithKey (\k x ks -> k:ks) [] map
--
-- > let f k a result = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldrWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (5:a)(3:b)"
foldrWithKey :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldrWithKey f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
go z' Nil = z'
go z' (Tip kx x) = f kx x z'
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldrWithKey #-}
-- | /O(n)/. A strict version of 'foldrWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldrWithKey' :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldrWithKey' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip kx x) = f kx x z'
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldrWithKey' #-}
-- | /O(n)/. Fold the keys and values in the map using the given left-associative
-- binary operator, such that
-- @'foldlWithKey' f z == 'Prelude.foldl' (\\z' (kx, x) -> f z' kx x) z . 'toAscList'@.
--
-- For example,
--
-- > keys = reverse . foldlWithKey (\ks k x -> k:ks) []
--
-- > let f result k a = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldlWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (3:b)(5:a)"
foldlWithKey :: (a -> Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
go z' Nil = z'
go z' (Tip kx x) = f z' kx x
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldlWithKey #-}
-- | /O(n)/. A strict version of 'foldlWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldlWithKey' :: (a -> Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip kx x) = f z' kx x
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldlWithKey' #-}
-- | /O(n)/. Fold the keys and values in the map using the given monoid, such that
--
-- @'foldMapWithKey' f = 'Prelude.fold' . 'mapWithKey' f@
--
-- This can be an asymptotically faster than 'foldrWithKey' or 'foldlWithKey' for some monoids.
foldMapWithKey :: Monoid m => (Key -> a -> m) -> IntMap a -> m
foldMapWithKey f = go
where
go Nil = mempty
go (Tip kx x) = f kx x
go (Bin _ _ l r) = go l `mappend` go r
{-# INLINE foldMapWithKey #-}
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/.
-- Return all elements of the map in the ascending order of their keys.
-- Subject to list fusion.
--
-- > elems (fromList [(5,"a"), (3,"b")]) == ["b","a"]
-- > elems empty == []
elems :: IntMap a -> [a]
elems = foldr (:) []
-- | /O(n)/. Return all keys of the map in ascending order. Subject to list
-- fusion.
--
-- > keys (fromList [(5,"a"), (3,"b")]) == [3,5]
-- > keys empty == []
keys :: IntMap a -> [Key]
keys = foldrWithKey (\k _ ks -> k : ks) []
-- | /O(n)/. An alias for 'toAscList'. Returns all key\/value pairs in the
-- map in ascending key order. Subject to list fusion.
--
-- > assocs (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > assocs empty == []
assocs :: IntMap a -> [(Key,a)]
assocs = toAscList
-- | /O(n*min(n,W))/. The set of all keys of the map.
--
-- > keysSet (fromList [(5,"a"), (3,"b")]) == Data.IntSet.fromList [3,5]
-- > keysSet empty == Data.IntSet.empty
keysSet :: IntMap a -> IntSet.IntSet
keysSet Nil = IntSet.Nil
keysSet (Tip kx _) = IntSet.singleton kx
keysSet (Bin p m l r)
| m .&. IntSet.suffixBitMask == 0 = IntSet.Bin p m (keysSet l) (keysSet r)
| otherwise = IntSet.Tip (p .&. IntSet.prefixBitMask) (computeBm (computeBm 0 l) r)
where STRICT_1_OF_2(computeBm)
computeBm acc (Bin _ _ l' r') = computeBm (computeBm acc l') r'
computeBm acc (Tip kx _) = acc .|. IntSet.bitmapOf kx
computeBm _ Nil = error "Data.IntSet.keysSet: Nil"
-- | /O(n)/. Build a map from a set of keys and a function which for each key
-- computes its value.
--
-- > fromSet (\k -> replicate k 'a') (Data.IntSet.fromList [3, 5]) == fromList [(5,"aaaaa"), (3,"aaa")]
-- > fromSet undefined Data.IntSet.empty == empty
fromSet :: (Key -> a) -> IntSet.IntSet -> IntMap a
fromSet _ IntSet.Nil = Nil
fromSet f (IntSet.Bin p m l r) = Bin p m (fromSet f l) (fromSet f r)
fromSet f (IntSet.Tip kx bm) = buildTree f kx bm (IntSet.suffixBitMask + 1)
where -- This is slightly complicated, as we to convert the dense
-- representation of IntSet into tree representation of IntMap.
--
-- We are given a nonzero bit mask 'bmask' of 'bits' bits with prefix 'prefix'.
-- We split bmask into halves corresponding to left and right subtree.
-- If they are both nonempty, we create a Bin node, otherwise exactly
-- one of them is nonempty and we construct the IntMap from that half.
buildTree g prefix bmask bits = prefix `seq` bmask `seq` case bits of
0 -> Tip prefix (g prefix)
_ -> case intFromNat ((natFromInt bits) `shiftRL` 1) of
bits2 | bmask .&. ((1 `shiftLL` bits2) - 1) == 0 ->
buildTree g (prefix + bits2) (bmask `shiftRL` bits2) bits2
| (bmask `shiftRL` bits2) .&. ((1 `shiftLL` bits2) - 1) == 0 ->
buildTree g prefix bmask bits2
| otherwise ->
Bin prefix bits2 (buildTree g prefix bmask bits2) (buildTree g (prefix + bits2) (bmask `shiftRL` bits2) bits2)
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
#if __GLASGOW_HASKELL__ >= 708
instance GHCExts.IsList (IntMap a) where
type Item (IntMap a) = (Key,a)
fromList = fromList
toList = toList
#endif
-- | /O(n)/. Convert the map to a list of key\/value pairs. Subject to list
-- fusion.
--
-- > toList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > toList empty == []
toList :: IntMap a -> [(Key,a)]
toList = toAscList
-- | /O(n)/. Convert the map to a list of key\/value pairs where the
-- keys are in ascending order. Subject to list fusion.
--
-- > toAscList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
toAscList :: IntMap a -> [(Key,a)]
toAscList = foldrWithKey (\k x xs -> (k,x):xs) []
-- | /O(n)/. Convert the map to a list of key\/value pairs where the keys
-- are in descending order. Subject to list fusion.
--
-- > toDescList (fromList [(5,"a"), (3,"b")]) == [(5,"a"), (3,"b")]
toDescList :: IntMap a -> [(Key,a)]
toDescList = foldlWithKey (\xs k x -> (k,x):xs) []
-- List fusion for the list generating functions.
#if __GLASGOW_HASKELL__
-- The foldrFB and foldlFB are fold{r,l}WithKey equivalents, used for list fusion.
-- They are important to convert unfused methods back, see mapFB in prelude.
foldrFB :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldrFB = foldrWithKey
{-# INLINE[0] foldrFB #-}
foldlFB :: (a -> Key -> b -> a) -> a -> IntMap b -> a
foldlFB = foldlWithKey
{-# INLINE[0] foldlFB #-}
-- Inline assocs and toList, so that we need to fuse only toAscList.
{-# INLINE assocs #-}
{-# INLINE toList #-}
-- The fusion is enabled up to phase 2 included. If it does not succeed,
-- convert in phase 1 the expanded elems,keys,to{Asc,Desc}List calls back to
-- elems,keys,to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were
-- used in a list fusion, otherwise it would go away in phase 1), and let compiler
-- do whatever it wants with elems,keys,to{Asc,Desc}List -- it was forbidden to
-- inline it before phase 0, otherwise the fusion rules would not fire at all.
{-# NOINLINE[0] elems #-}
{-# NOINLINE[0] keys #-}
{-# NOINLINE[0] toAscList #-}
{-# NOINLINE[0] toDescList #-}
{-# RULES "IntMap.elems" [~1] forall m . elems m = build (\c n -> foldrFB (\_ x xs -> c x xs) n m) #-}
{-# RULES "IntMap.elemsBack" [1] foldrFB (\_ x xs -> x : xs) [] = elems #-}
{-# RULES "IntMap.keys" [~1] forall m . keys m = build (\c n -> foldrFB (\k _ xs -> c k xs) n m) #-}
{-# RULES "IntMap.keysBack" [1] foldrFB (\k _ xs -> k : xs) [] = keys #-}
{-# RULES "IntMap.toAscList" [~1] forall m . toAscList m = build (\c n -> foldrFB (\k x xs -> c (k,x) xs) n m) #-}
{-# RULES "IntMap.toAscListBack" [1] foldrFB (\k x xs -> (k, x) : xs) [] = toAscList #-}
{-# RULES "IntMap.toDescList" [~1] forall m . toDescList m = build (\c n -> foldlFB (\xs k x -> c (k,x) xs) n m) #-}
{-# RULES "IntMap.toDescListBack" [1] foldlFB (\xs k x -> (k, x) : xs) [] = toDescList #-}
#endif
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs.
--
-- > fromList [] == empty
-- > fromList [(5,"a"), (3,"b"), (5, "c")] == fromList [(5,"c"), (3,"b")]
-- > fromList [(5,"c"), (3,"b"), (5, "a")] == fromList [(5,"a"), (3,"b")]
fromList :: [(Key,a)] -> IntMap a
fromList xs
= foldlStrict ins empty xs
where
ins t (k,x) = insert k x t
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs with a combining function. See also 'fromAscListWith'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"c")] == fromList [(3, "ab"), (5, "cba")]
-- > fromListWith (++) [] == empty
fromListWith :: (a -> a -> a) -> [(Key,a)] -> IntMap a
fromListWith f xs
= fromListWithKey (\_ x y -> f x y) xs
-- | /O(n*min(n,W))/. Build a map from a list of key\/value pairs with a combining function. See also fromAscListWithKey'.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > fromListWithKey f [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"c")] == fromList [(3, "3:a|b"), (5, "5:c|5:b|a")]
-- > fromListWithKey f [] == empty
fromListWithKey :: (Key -> a -> a -> a) -> [(Key,a)] -> IntMap a
fromListWithKey f xs
= foldlStrict ins empty xs
where
ins t (k,x) = insertWithKey f k x t
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order.
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
fromAscList :: [(Key,a)] -> IntMap a
fromAscList xs
= fromAscListWithKey (\_ x _ -> x) xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order, with a combining function on equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "ba")]
fromAscListWith :: (a -> a -> a) -> [(Key,a)] -> IntMap a
fromAscListWith f xs
= fromAscListWithKey (\_ x y -> f x y) xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order, with a combining function on equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "5:b|a")]
fromAscListWithKey :: (Key -> a -> a -> a) -> [(Key,a)] -> IntMap a
fromAscListWithKey _ [] = Nil
fromAscListWithKey f (x0 : xs0) = fromDistinctAscList (combineEq x0 xs0)
where
-- [combineEq f xs] combines equal elements with function [f] in an ordered list [xs]
combineEq z [] = [z]
combineEq z@(kz,zz) (x@(kx,xx):xs)
| kx==kz = let yy = f kx xx zz in combineEq (kx,yy) xs
| otherwise = z:combineEq x xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order and all distinct.
-- /The precondition (input list is strictly ascending) is not checked./
--
-- > fromDistinctAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
fromDistinctAscList :: forall a. [(Key,a)] -> IntMap a
fromDistinctAscList [] = Nil
fromDistinctAscList (z0 : zs0) = work z0 zs0 Nada
where
work (kx,vx) [] stk = finish kx (Tip kx vx) stk
work (kx,vx) (z@(kz,_):zs) stk = reduce z zs (branchMask kx kz) kx (Tip kx vx) stk
reduce :: (Key,a) -> [(Key,a)] -> Mask -> Prefix -> IntMap a -> Stack a -> IntMap a
reduce z zs _ px tx Nada = work z zs (Push px tx Nada)
reduce z zs m px tx stk@(Push py ty stk') =
let mxy = branchMask px py
pxy = mask px mxy
in if shorter m mxy
then reduce z zs m pxy (Bin pxy mxy ty tx) stk'
else work z zs (Push px tx stk)
finish _ t Nada = t
finish px tx (Push py ty stk) = finish p (link py ty px tx) stk
where m = branchMask px py
p = mask px m
data Stack a = Push {-# UNPACK #-} !Prefix !(IntMap a) !(Stack a) | Nada
{--------------------------------------------------------------------
Eq
--------------------------------------------------------------------}
instance Eq a => Eq (IntMap a) where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: Eq a => IntMap a -> IntMap a -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip kx x) (Tip ky y)
= (kx == ky) && (x==y)
equal Nil Nil = True
equal _ _ = False
nequal :: Eq a => IntMap a -> IntMap a -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip kx x) (Tip ky y)
= (kx /= ky) || (x/=y)
nequal Nil Nil = False
nequal _ _ = True
{--------------------------------------------------------------------
Ord
--------------------------------------------------------------------}
instance Ord a => Ord (IntMap a) where
compare m1 m2 = compare (toList m1) (toList m2)
{--------------------------------------------------------------------
Functor
--------------------------------------------------------------------}
instance Functor IntMap where
fmap = map
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
instance Show a => Show (IntMap a) where
showsPrec d m = showParen (d > 10) $
showString "fromList " . shows (toList m)
{--------------------------------------------------------------------
Read
--------------------------------------------------------------------}
instance (Read e) => Read (IntMap e) where
#ifdef __GLASGOW_HASKELL__
readPrec = parens $ prec 10 $ do
Ident "fromList" <- lexP
xs <- readPrec
return (fromList xs)
readListPrec = readListPrecDefault
#else
readsPrec p = readParen (p > 10) $ \ r -> do
("fromList",s) <- lex r
(xs,t) <- reads s
return (fromList xs,t)
#endif
{--------------------------------------------------------------------
Typeable
--------------------------------------------------------------------}
#include "Typeable.h"
INSTANCE_TYPEABLE1(IntMap,intMapTc,"IntMap")
{--------------------------------------------------------------------
Helpers
--------------------------------------------------------------------}
{--------------------------------------------------------------------
Link
--------------------------------------------------------------------}
link :: Prefix -> IntMap a -> Prefix -> IntMap a -> IntMap a
link p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
{-# INLINE link #-}
{--------------------------------------------------------------------
@bin@ assures that we never have empty trees within a tree.
--------------------------------------------------------------------}
bin :: Prefix -> Mask -> IntMap a -> IntMap a -> IntMap a
bin _ _ l Nil = l
bin _ _ Nil r = r
bin p m l r = Bin p m l r
{-# INLINE bin #-}
{--------------------------------------------------------------------
Endian independent bit twiddling
--------------------------------------------------------------------}
zero :: Key -> Mask -> Bool
zero i m
= (natFromInt i) .&. (natFromInt m) == 0
{-# INLINE zero #-}
nomatch,match :: Key -> Prefix -> Mask -> Bool
nomatch i p m
= (mask i m) /= p
{-# INLINE nomatch #-}
match i p m
= (mask i m) == p
{-# INLINE match #-}
mask :: Key -> Mask -> Prefix
mask i m
= maskW (natFromInt i) (natFromInt m)
{-# INLINE mask #-}
{--------------------------------------------------------------------
Big endian operations
--------------------------------------------------------------------}
maskW :: Nat -> Nat -> Prefix
maskW i m
= intFromNat (i .&. (complement (m-1) `xor` m))
{-# INLINE maskW #-}
shorter :: Mask -> Mask -> Bool
shorter m1 m2
= (natFromInt m1) > (natFromInt m2)
{-# INLINE shorter #-}
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
{-# INLINE branchMask #-}
{--------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
foldlStrict :: (a -> b -> a) -> a -> [b] -> a
foldlStrict f = go
where
go z [] = z
go z (x:xs) = let z' = f z x in z' `seq` go z' xs
{-# INLINE foldlStrict #-}
-- | /O(1)/. Decompose a map into pieces based on the structure of the underlying
-- tree. This function is useful for consuming a map in parallel.
--
-- No guarantee is made as to the sizes of the pieces; an internal, but
-- deterministic process determines this. However, it is guaranteed that the
-- pieces returned will be in ascending order (all elements in the first submap
-- less than all elements in the second, and so on).
--
-- Examples:
--
-- > splitRoot (fromList (zip [1..6::Int] ['a'..])) ==
-- > [fromList [(1,'a'),(2,'b'),(3,'c')],fromList [(4,'d'),(5,'e'),(6,'f')]]
--
-- > splitRoot empty == []
--
-- Note that the current implementation does not return more than two submaps,
-- but you should not depend on this behaviour because it can change in the
-- future without notice.
splitRoot :: IntMap a -> [IntMap a]
splitRoot orig =
case orig of
Nil -> []
x@(Tip _ _) -> [x]
Bin _ m l r | m < 0 -> [r, l]
| otherwise -> [l, r]
{-# INLINE splitRoot #-}
{--------------------------------------------------------------------
Debugging
--------------------------------------------------------------------}
-- | /O(n)/. Show the tree that implements the map. The tree is shown
-- in a compressed, hanging format.
showTree :: Show a => IntMap a -> String
showTree s
= showTreeWith True False s
{- | /O(n)/. The expression (@'showTreeWith' hang wide map@) shows
the tree that implements the map. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
-}
showTreeWith :: Show a => Bool -> Bool -> IntMap a -> String
showTreeWith hang wide t
| hang = (showsTreeHang wide [] t) ""
| otherwise = (showsTree wide [] [] t) ""
showsTree :: Show a => Bool -> [String] -> [String] -> IntMap a -> ShowS
showsTree wide lbars rbars t
= case t of
Bin p m l r
-> showsTree wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showBin p m) . showString "\n" .
showWide wide lbars .
showsTree wide (withEmpty lbars) (withBar lbars) l
Tip k x
-> showsBars lbars . showString " " . shows k . showString ":=" . shows x . showString "\n"
Nil -> showsBars lbars . showString "|\n"
showsTreeHang :: Show a => Bool -> [String] -> IntMap a -> ShowS
showsTreeHang wide bars t
= case t of
Bin p m l r
-> showsBars bars . showString (showBin p m) . showString "\n" .
showWide wide bars .
showsTreeHang wide (withBar bars) l .
showWide wide bars .
showsTreeHang wide (withEmpty bars) r
Tip k x
-> showsBars bars . showString " " . shows k . showString ":=" . shows x . showString "\n"
Nil -> showsBars bars . showString "|\n"
showBin :: Prefix -> Mask -> String
showBin _ _
= "*" -- ++ show (p,m)
showWide :: Bool -> [String] -> String -> String
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
node :: String
node = "+--"
withBar, withEmpty :: [String] -> [String]
withBar bars = "| ":bars
withEmpty bars = " ":bars
|
hvr/containers
|
Data/IntMap/Base.hs
|
bsd-3-clause
| 83,511 | 0 | 22 | 21,659 | 19,069 | 9,887 | 9,182 | -1 | -1 |
module Deflisp.CoreSpec (main, spec) where
import Test.Hspec
import Deflisp.Core
import Deflisp.Core.Show
import Deflisp.Core.Types
import Deflisp.Core.Parser
-- import Control.Monad.State
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parsing" $ do
it "empty list is parsed as an empty list" $ do
readExpression "()" `shouldBe` LispList []
it "list with a single number" $ do
readExpression "(1)" `shouldBe` LispList [LispNumber 1]
it "list with a number and symbol" $ do
readExpression "(1 a)" `shouldBe` LispList [LispNumber 1, LispSymbol "a"]
it "list with a nested list" $ do
readExpression "(1 (1 2) (3 4))" `shouldBe` LispList [LispNumber 1,
LispList [LispNumber 1, LispNumber 2],
LispList [LispNumber 3, LispNumber 4]]
describe "syntax" $ do
it "empty list evaluates to empty list" $ do
evalString "()" `shouldBe` (LispList [])
it "if / else with truthy value evaluates only truthy expression" $ do
evalString "(if true 1 2)" `shouldBe` (LispNumber 1)
it "if / else with falsy value evaluates only truthy expression" $ do
evalString "(if false 1 2)" `shouldBe` (LispNumber 2)
describe "def" $ do
it "defines a var" $ do
evalStrings ["(def a 1)", "a"] `shouldBe` (LispNumber 1)
it "defines a function" $ do
evalStrings ["(def myinc (fn [a] (+ 1 a)))",
"(myinc 1)"] `shouldBe` (LispNumber 2)
it "defines a macro" $ do
evalStrings ["(defmacro or [cond & conds] (list 'if cond cond (if (= conds ()) 'false (cons 'or conds))))",
"(or false 2 3)"] `shouldBe` (LispNumber 2)
describe "when macro" $ do
it "should return a result of expression when trutly value is given" $ do
evalStrings ["(defmacro when [test & body] (list 'if test (cons 'do body) nil))",
"(when true 1)"] `shouldBe` (LispNumber 1)
it "should return nil when faulty value is given" $ do
evalStrings ["(defmacro when [test & body] (list 'if test (cons 'do body) nil))",
"(when false 1)"] `shouldBe` LispNil
-- it "apply" $ do
-- evalStrings ["(defmacro apply [var cond] (cons var cond))",
-- "(apply + '(1 2 3))"] `shouldBe` (LispList
-- [LispNumber 1,
-- ])
|
ifesdjeen/deflisp
|
test/DefLisp/CoreSpec.hs
|
bsd-3-clause
| 2,582 | 0 | 21 | 870 | 528 | 261 | 267 | 44 | 1 |
module NWA
( new, exec, NWA, State(..), Symbol(..), Transition(..)
) where
import qualified Data.Set as DataSet
import Stack
newtype State = State Int
deriving (Eq, Ord, Show)
data Transition a = Internal State a State
| Call State a (Symbol Int, State)
| Return (Symbol Int, State) a State
deriving (Eq, Ord, Show)
data NWA a = NWA State [Transition a] [State] [Symbol Int]
new :: (Show a, Eq a, Ord a) => State -> [Transition a] -> [State] -> Either String (NWA a)
new start trans finals = case validateTrans trans of
(Just err) -> Left err
Nothing -> if startExists start trans finals
then Right $ NWA start trans finals [Bottom]
else Left $ "start state does not exist: " ++ show start
alphabets :: (Show a, Eq a, Ord a) => [Transition a] -> (DataSet.Set a, DataSet.Set a, DataSet.Set a)
alphabets [] = (DataSet.empty, DataSet.empty, DataSet.empty)
alphabets (t:ts) = let (is, cs, rs) = alphabets ts
in case t of
(Internal _ c _) -> (c `DataSet.insert` is, cs, rs)
(Call _ c _) -> (is, c `DataSet.insert` cs, rs)
(Return _ c _) -> (is, cs, c `DataSet.insert` rs)
validateTrans :: (Show a, Eq a, Ord a) => [Transition a] -> Maybe String
validateTrans trans = let
(is, cs, rs) = alphabets trans
ics = is `DataSet.intersection` cs
irs = is `DataSet.intersection` rs
crs = cs `DataSet.intersection` rs
in if (not $ DataSet.null ics)
then Just $ "overlapping input alphabet for internals and calls: " ++ show ics
else if (not $ DataSet.null irs)
then Just $ "overlapping input alphabet for internals and returns: " ++ show irs
else if (not $ DataSet.null crs)
then Just $ "overlapping input alphabet for calls and returns: " ++ show crs
else Nothing
startExists :: State -> [Transition a] -> [State] -> Bool
startExists start [] fs = start `elem` fs
startExists start ((Internal s _ _):ts) fs = s == start || startExists start ts fs
startExists start ((Call s _ _):ts) fs = s == start || startExists start ts fs
startExists start ((Return (s', s) _ _):ts) fs = (s' == Bottom && s == start) || startExists start ts fs
exec :: (Show a, Eq a) => (NWA a) -> [a] -> Either String Bool
exec (NWA start trans finals stack) input = case run stack start trans input of
(Left err) -> Left err
(Right (_, final)) -> Right (final `elem` finals)
run :: (Show a, Eq a) => [Symbol Int] -> State -> [Transition a] -> [a] -> Either String ([Symbol Int], State)
run stack current _ [] = Right (stack, current)
run stack current trans (c:cs) = case step stack current trans c of
(Left err) -> Left err
(Right (newstack, newstate)) -> run newstack newstate trans cs
step :: (Show a, Eq a) => [Symbol Int] -> State -> [Transition a] -> a -> Either String ([Symbol Int], State)
step stack current trans c = case get trans (peek stack) current c of
(Left err) -> Left err
(Right (Internal _ _ next)) -> Right (stack, next)
(Right (Call _ _ (symbol, next))) -> Right (push symbol stack, next)
(Right (Return _ _ next)) -> Right (pop stack, next)
get :: (Show a, Eq a) => [Transition a] -> Symbol Int -> State -> a -> Either String (Transition a)
get trans symbol current input = case filter (isTrans symbol current input) trans of
[] -> Left $ "no transition found for (" ++ show symbol ++ "," ++ show current ++ ") " ++ show input
[t] -> Right t
_ -> Left $ "multiple transitions found for (" ++ show symbol ++ "," ++ show current ++ ") " ++ show input
isTrans :: (Eq a) => Symbol Int -> State -> a -> Transition a -> Bool
isTrans _ current input (Internal s c _) = s == current && c == input
isTrans _ current input (Call s c _) = s == current && c == input
isTrans symbol current input (Return (s', s) c _) = s' == symbol && s == current && c == input
|
katydid/nwe
|
src/NWA.hs
|
bsd-3-clause
| 3,823 | 0 | 13 | 885 | 1,781 | 931 | 850 | 66 | 4 |
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
module Data.ExprGADT.Dumb where
-- import Data.ExprGADT.Dumb.Infer
import Control.Applicative
import Control.Exception
import Control.Monad.Except
import Data.ExprGADT.Dumb.Types
import Data.ExprGADT.Eval
import Data.ExprGADT.Traversals
import Data.ExprGADT.Types
import Data.Foldable
import Data.IsTy
import Data.List
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import Data.Proof.EQ as Ty
import Data.Proxy
import Data.Singletons
import Data.Singletons.Decide
import Data.Singletons.Prelude hiding (Map)
import Data.Singletons.TH
import Data.Typeable
import Unsafe.Coerce
import qualified Data.Map.Strict as M
$(singletons [d|
data PNat = NZ | NS PNat deriving (Eq, Show, Ord)
nPlus :: PNat -> PNat -> PNat
nPlus NZ y = y
nPlus (NS x) y = NS (nPlus x y)
nLTE :: PNat -> PNat -> Bool
nLTE NZ _ = True
nLTE (NS x) (NS y) = nLTE x y
|])
data Vec :: PNat -> * -> * where
VNil :: Vec 'NZ a
(:>) :: a -> Vec n a -> Vec ('NS n) a
infixr 5 :>
data VecW :: * -> * where
VW :: Sing n -> Vec n a -> VecW a
type family LengthP (xs :: [k]) :: PNat where
LengthP '[] = 'NZ
LengthP (x ': xs) = 'NS (LengthP xs)
takeVec :: Sing n -> Vec m a -> Maybe (Vec n a)
takeVec SNZ _ = Just VNil
takeVec (SNS n) (x :> xs) = (x :>) <$> takeVec n xs
takeVec (SNS _) VNil = Nothing
applyOnLast :: (Vec m a -> b) -> Sing n -> Vec (NPlus n m) a -> b
applyOnLast f SNZ xs = f xs
applyOnLast f (SNS i) (_ :> xs) = applyOnLast f i xs
applyOnLast _ (SNS _) _ = error "impossible...cannot be called."
data PolyExpr :: * where
PE :: Sing n -> (Vec n ETypeW -> ExprW) -> PolyExpr
-- data PolyType :: * where
-- PT :: Sing n -> (Vec n ETypeW -> ETypeW) -> PolyType
data TExpr :: PNat -> * where
TEV :: NatIxor n -> TExpr n
TEO0 :: TOp0 -> TExpr n
TEO1 :: TOp1 -> TExpr n -> TExpr n
TEO2 :: TOp2 -> TExpr n -> TExpr n -> TExpr n
TForall :: TExpr ('NS n) -> TExpr n
-- deriving (Show, Eq)
data Subst :: PNat -> PNat -> * where
Sub :: (NatIxor n -> TExpr m) -> Subst n m
data SubstW :: PNat -> * where
SW :: Sing m -> Subst n m -> SubstW n
unifyTExpr :: forall n m. (SingI n, SingI m)
=> TExpr n
-> TExpr m
-> Maybe (SubstW n, SubstW m)
unifyTExpr (TEO0 o) (TEO0 o') | o == o' = Just ( SW (sing :: Sing n) nilSubst
, SW (sing :: Sing m) nilSubst
)
| otherwise = Nothing
unifyTExpr (TEO1 o t1) (TEO1 o' t1') | o == o' = unifyTExpr t1 t1'
| otherwise = Nothing
unifyTExpr (TEO2 o t1 t2) (TEO2 o' t1' t2') | o == o' = do
(SW ns1 sub1, SW ns1' sub1') <- unifyTExpr t1 t1'
let t1s = applySubst sub1 t1
t1s' = applySubst sub1' t1'
(SW ns2 sub2, SW ns2' sub2') <- unifyTExpr t2 t2'
undefined
| otherwise = Nothing
nilSubst :: Subst n n
nilSubst = Sub TEV
subTExpr :: Vec n (TExpr m) -> TExpr n -> TExpr m
subTExpr v = subNix (indexVec v)
applySubst :: Subst n m -> TExpr n -> TExpr m
applySubst (Sub f) = subNix f
subNix :: forall n m. (NatIxor n -> TExpr m) -> TExpr n -> TExpr m
subNix f t = case t of
TEV ix -> f ix
TEO0 o -> TEO0 o
TEO1 o t1 -> TEO1 o (subNix f t1)
TEO2 o t1 t2 -> TEO2 o (subNix f t1) (subNix f t2)
TForall t1 -> TForall (subNix f' t1)
where
f' :: NatIxor ('NS n) -> TExpr ('NS m)
f' NIZ = TEV NIZ
f' (NIS ix) = subNix (TEV . NIS) (f ix)
data ETListW :: * where
ETLW :: ETList ts -> ETListW
data IndexorW :: [*] -> * where
IXW :: EType a -> Indexor vs a -> IndexorW vs
deriving instance Show (IndexorW vs)
instance Functor (Vec n) where
fmap _ VNil = VNil
fmap f (x :> xs) = f x :> fmap f xs
instance Applicative (Vec 'NZ) where
pure _ = VNil
_ <*> _ = VNil
instance Applicative (Vec n) => Applicative (Vec ('NS n)) where
pure x = x :> pure x
(f :> fs) <*> (x :> xs) = f x :> (fs <*> xs)
instance Foldable (Vec n) where
foldMap _ VNil = mempty
foldMap f (x :> xs) = f x <> foldMap f xs
data NatIxor :: PNat -> * where
NIZ :: NatIxor ('NS n)
NIS :: NatIxor n -> NatIxor ('NS n)
indexVec :: Vec n a -> NatIxor n -> a
indexVec (x :> _ ) NIZ = x
indexVec (_ :> xs) (NIS ix) = indexVec xs ix
-- data Env :: PNat -> * where
-- Env :: Map VName (NatIxor n) -> Vec n ETypeW -> Env n
unDumb :: DumbExpr -> PolyExpr
unDumb e =
case e of
DV v -> undefined
|
mstksg/expr-gadt
|
src/Data/ExprGADT/Dumb.hs
|
bsd-3-clause
| 5,461 | 0 | 11 | 1,767 | 1,871 | 963 | 908 | 137 | 6 |
module Sodium.Chloride.Inline (inline) where
import Control.Applicative
import Control.Monad.Reader
import Control.Lens hiding (Index, Fold)
import qualified Data.Map as M
import Sodium.Chloride.Program.Vector
import Sodium.Chloride.Recmap.Vector
import Sodium.ApplyOnce
inline :: Program -> Program
inline = recmapProgram' (recmapper' inlineBody)
inlineBody body
= update body $ eliminateAssign
(body ^. bodyResults, body ^. bodyStatements)
where update body (subResults, subStatements)
= bodyResults .~ subResults
$ bodyStatements .~ subStatements
$ body
eliminateAssign
:: ([Expression], [(IndicesList, Statement)])
-> ([Expression], [(IndicesList, Statement)])
eliminateAssign (bodyResults, (statement:statements))
= maybe follow id $ do
([name], Assign expr) <- Just statement
let subSingle = liftA2 (,)
(mapM substituteSingleAccess bodyResults)
(mapM (_2 substituteSingleAccess) statements)
case runReaderT subSingle (name, expr) of
Once bodyPair -> Just (eliminateAssign bodyPair)
None bodyPair -> Just (eliminateAssign bodyPair)
Ambiguous -> Nothing
where follow
= over _2 (statement:)
$ eliminateAssign (bodyResults, statements)
eliminateAssign bodyPair = bodyPair
type SubstituteAccessEnv = ((Name, Index), Expression)
class SubstituteSingleAccess a where
substituteSingleAccess :: a -> ReaderT SubstituteAccessEnv ApplyOnce a
instance SubstituteSingleAccess Expression where
substituteSingleAccess = \case
Primary prim -> return $ Primary prim
Access name' j -> do
(name, expr) <- ask
if name == (name', j)
then lift (Once expr)
else return (Access name' j)
Call op exprs -> do
Call op <$> mapM substituteSingleAccess exprs
Fold op exprs range -> do
Fold op
<$> mapM substituteSingleAccess exprs
<*> substituteSingleAccess range
instance SubstituteSingleAccess Statement where
substituteSingleAccess = \case
-- It is assumed that every variable is assigned only once,
-- since the code is vectorized. Therefore it's not the
-- variable we are substituting, and no additional checks required.
Assign expr -> Assign <$> substituteSingleAccess expr
Execute executeName args
-> Execute executeName
<$> mapM substituteSingleAccess args
ForStatement forCycle
-> ForStatement
<$> substituteSingleAccess forCycle
MultiIfStatement multiIfBranch
-> MultiIfStatement
<$> substituteSingleAccess multiIfBranch
BodyStatement body
-> BodyStatement
<$> substituteSingleAccess body
instance SubstituteSingleAccess ForCycle where
substituteSingleAccess
= forRange substituteSingleAccess
>=> (forArgExprs . traversed) substituteSingleAccess
>=> liftA2 (>>=)
(shadowedBy . toListOf (forArgIndices . traversed . _1))
(flip subBody)
where subBody shadowed
| shadowed = return
| otherwise = forBody substituteSingleAccess
instance SubstituteSingleAccess MultiIfBranch where
substituteSingleAccess
= (multiIfLeafs . traversed)
(_1 substituteSingleAccess >=> _2 substituteSingleAccess)
>=> multiIfElse substituteSingleAccess
instance SubstituteSingleAccess Body where
substituteSingleAccess
= liftA2 (>>=)
(shadowedBy . M.keys . view bodyVars)
(flip subBody)
where subBody shadowed
| shadowed = return
| otherwise
= (bodyStatements . traversed . _2) substituteSingleAccess
>=> (bodyResults . traversed) substituteSingleAccess
shadowedBy :: Monad m => [Name] -> ReaderT SubstituteAccessEnv m Bool
shadowedBy names = do
(name, _) <- ask
return $ fst name `elem` names
|
kirbyfan64/sodium
|
src/Sodium/Chloride/Inline.hs
|
bsd-3-clause
| 3,550 | 62 | 14 | 605 | 834 | 472 | 362 | -1 | -1 |
module Test.Balance where
import Data.Interval
import Data.IntervalSet as IS
balanced :: IntervalSet -> Bool
balanced Empty = True
balanced (IntervalSet i t) = balancedTree i 1 1 t
balancedTree :: Interval -> Int -> Int -> Tree -> Bool
balancedTree (Interval a b) lmin rmin Leaf = b - a >=max lmin rmin
balancedTree (Interval a b) lmin rmin (Node l (Interval c d) r) =
balancedTree (Interval a c) lmin lrmin l &&
balancedTree (Interval d b) rlmin rmin r
where
lrmin = max (c - a - (b - c)) 1
rlmin = max (b - d - (d - a)) 1
balancedIntervalUnion :: IntervalSet -> Interval -> Bool
balancedIntervalUnion s i = balanced (insert i s)
balancedIntervalComplement ::IntervalSet ->Interval ->Bool
balancedIntervalComplement s i = balanced (delete i s)
balancedIntervalIntersection ::IntervalSet ->Interval ->Bool
balancedIntervalIntersection s i = balanced (IS.intersect i s)
|
ian-mi/interval-set
|
tests/Test/Balance.hs
|
bsd-3-clause
| 930 | 0 | 11 | 204 | 356 | 182 | 174 | 19 | 1 |
{-# LANGUAGE FlexibleContexts #-}
import Data.Point2d
import Data.KdTree.Static as KDT
import Data.KdTree.Dynamic as DKDT
import Control.DeepSeq
import Control.Monad
import qualified Control.Monad.Random as CMR
import Criterion.Main
import Data.List
import Data.Maybe
import qualified Data.Heap as Q
import System.Random.Mersenne.Pure64
zeroOnePointSampler :: CMR.Rand PureMT Point2d
zeroOnePointSampler =
liftM2 Point2d
(CMR.getRandomR (0.0, 1.0))
(CMR.getRandomR (0.0, 1.0))
-- Input: List of pairs of points, where first of each pair is the
-- point to add to the dynamic KdTree, and the second is the point to
-- query for nearest neighbor
interleaveBuildQuery :: [(Point2d, Point2d)] -> [Point2d]
interleaveBuildQuery =
let f :: (DKDT.KdTree Double Point2d, [Point2d]) ->
(Point2d, Point2d) ->
(DKDT.KdTree Double Point2d, [Point2d])
f (kdt, accList) (treePt, queryPt) =
let newKdt = DKDT.insert kdt treePt
near = DKDT.nearest newKdt queryPt
in (newKdt, near : accList)
start = (DKDT.emptyWithDist pointAsList2d distSqr2d, [])
in snd . foldl' f start
-- nn implemented with optimized linear scan
nearestLinear :: [Point2d] -> Point2d -> Point2d
nearestLinear [] _ = error "nearestLinear called on an empty list!"
nearestLinear (ph : pt) query = fst $ foldl' f (ph, distSqr2d query ph) pt
where {-# INLINE f #-}
f b@(_, dBest) x
| d < dBest = (x, d)
| otherwise = b
where d = distSqr2d query x
pointsInRadiusLinear :: [Point2d] -> Double -> Point2d -> [Point2d]
pointsInRadiusLinear ps radius query =
filter ((<= radius * radius) . distSqr2d query) ps
-- knn implemented with priority queue
kNearestNeighborsLinear :: [Point2d] -> Int -> Point2d -> [Point2d]
kNearestNeighborsLinear ps k query =
reverse $ map snd $ Q.toAscList $ foldl' f (Q.empty :: Q.MaxPrioHeap Double Point2d) ps
where f q p = let insertBounded queue dist x
| Q.size queue < k = Q.insert (dist, x) queue
| otherwise =
let ((farthestDist, _), rest) = fromJust $ Q.view queue
in if dist < farthestDist
then Q.insert (dist, x) rest
else queue
in insertBounded q (distSqr2d query p) p
rangeLinear :: Point2d -> Point2d -> [Point2d] -> [Point2d]
rangeLinear lowers uppers xs =
let lowersAsList = pointAsList2d lowers
uppersAsList = pointAsList2d uppers
valInRange l x u = l <= x && x <= u
pointInRange p =
and $ zipWith3 valInRange
lowersAsList (pointAsList2d p) uppersAsList
in filter pointInRange xs
pointToBounds :: Point2d -> Double -> (Point2d, Point2d)
pointToBounds (Point2d x y) w =
(Point2d (x - w) (y - w), Point2d (x + w) (y + w))
rangeOfPointLinear :: [Point2d] -> Double -> Point2d -> [Point2d]
rangeOfPointLinear xs w q =
let (lowers, uppers) = pointToBounds q w
in rangeLinear lowers uppers xs
rangeOfPointKdt :: KDT.KdTree Double Point2d -> Double -> Point2d -> [Point2d]
rangeOfPointKdt kdt w q =
let (lowers, uppers) = pointToBounds q w
in KDT.inRange kdt lowers uppers
linearInterleaveBuildQuery :: [(Point2d, Point2d)] -> [Point2d]
linearInterleaveBuildQuery =
let f :: ([Point2d], [Point2d]) -> (Point2d, Point2d) -> ([Point2d], [Point2d])
f (ps, accList) (structPt, queryPt) =
let ps' = structPt : ps
near = nearestLinear ps' queryPt
in (ps', near : accList)
in snd . foldl' f ([], [])
main :: IO ()
main =
let seed = 1
treePoints = CMR.evalRand (sequence $ repeat zeroOnePointSampler) $ pureMT seed
kdtN n = KDT.buildWithDist pointAsList2d distSqr2d $ take n treePoints
queryPoints = CMR.evalRand (sequence $ repeat zeroOnePointSampler) $ pureMT (seed + 1)
buildKdtBench n = bench (show n) $ nf kdtN n
nnKdtBench nq np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq) $
nf (map (KDT.nearest (kdtN np))) (take nq queryPoints)
inRadKdtBench nq r np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-r-" ++ show r) $
nf (map (KDT.inRadius (kdtN np) r)) (take nq queryPoints)
knnKdtBench nq k np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-k-" ++ show k) $
nf (map (KDT.kNearest (kdtN np) k)) (take nq queryPoints)
rangeKdtBench nq w np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-w-" ++ show w) $
nf (map $ rangeOfPointKdt (kdtN np) w) (take nq queryPoints)
nnLinearBench nq np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq) $
nf (map (nearestLinear (take np treePoints))) (take nq queryPoints)
inRadLinearBench nq r np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-r-" ++ show r) $
nf (map $ pointsInRadiusLinear (take np treePoints) r) (take nq queryPoints)
rangeLinearBench nq w np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-w-" ++ show w) $
nf (map $ rangeOfPointLinear (take np treePoints) w) (take nq queryPoints)
knnLinearBench nq k np =
bench ("np-" ++ show np ++ "-nq-" ++ show nq ++ "-k-" ++ show k) $
nf (map $ kNearestNeighborsLinear (take np treePoints) k) (take nq queryPoints)
nniDkdtBench n =
bench ("n-" ++ show n) $
nf interleaveBuildQuery (zip (take n treePoints) (take n queryPoints))
numQueries = 100
pointSetSizes = [100, 1000, 10000, 100000]
radius = 0.05
numNeighbors = 10
rangeWidth = 0.05
in defaultMain [
bgroup "linear-nn" $ map (nnLinearBench numQueries) pointSetSizes,
bgroup "linear-rad" $ map (inRadLinearBench numQueries radius) pointSetSizes,
bgroup "linear-knn" $ map (knnLinearBench numQueries numNeighbors) pointSetSizes,
bgroup "linear-range" $ map (rangeLinearBench numQueries rangeWidth) pointSetSizes,
bgroup "kdt-build" $ map buildKdtBench pointSetSizes,
bgroup "kdt-nn" $ map (nnKdtBench numQueries) pointSetSizes,
bgroup "kdt-rad" $ map (inRadKdtBench numQueries radius) pointSetSizes,
bgroup "kdt-knn" $ map (knnKdtBench numQueries numNeighbors) pointSetSizes,
bgroup "kdt-range" $ map (rangeKdtBench numQueries rangeWidth) pointSetSizes,
bgroup "dkdt-nn" $ map nniDkdtBench pointSetSizes
]
|
ScrambledEggsOnToast/kdt
|
app-src/Benchmarks/KDTBenchmark.hs
|
mit
| 6,396 | 6 | 17 | 1,634 | 2,295 | 1,163 | 1,132 | 128 | 2 |
{-# LANGUAGE OverloadedStrings, ExistentialQuantification, ExtendedDefaultRules, FlexibleContexts, TemplateHaskell #-}
module Dashdo.Examples.TestDashdo where
import Numeric.Datasets.Iris
import Dashdo
import Dashdo.Types
import Dashdo.Serve
import Dashdo.Elements
import Dashdo.FlexibleInput
import Control.Monad
import Control.Monad.State.Strict
import Lucid
import Data.Monoid ((<>))
import Data.Text (Text, unpack, pack)
import Lens.Micro.Platform
import Graphics.Plotly (plotly, layout, title, Trace)
import Graphics.Plotly.Lucid
import Graphics.Plotly.GoG
import Graphics.Plotly.Histogram (histogram)
data Example = Example
{ _pname :: Text
, _isMale :: Bool
, _xaxis :: Tag (Iris -> Double)
, _yaxis :: Tag (Iris -> Double)
}
makeLenses ''Example
testDashdo = runDashdoIO $ Dashdo initv (example iris)
test :: SHtml IO Bool ()
test = do
b <- getValue
"The person is male: "
if b then "yes" else "no"
hello :: SHtml IO Text ()
hello = do
id <<~ textInput
br_ []
txt <- getValue
"Hello, " <> (toHtml txt) <> "!"
example :: [Iris] -> SHtml IO Example ()
example irisd = wrap plotlyCDN $ do
nm <- getValue
let trace :: Trace
trace = points (aes & x .~ (nm ^. xaxis . tagVal)
& y .~ (nm ^. yaxis . tagVal)) irisd
-- & marker ?~ (defMarker & markercolor ?~ catColors (map irisClass irisd))
h2_ "Testing Dashdo"
isMale <<~ select [("Male", True),("Female", False)]
br_ []
"Name input #1:"
pname <<~ textInput
br_ []
"Name input #2:"
pname <<~ textInput
br_ []
"Name input #3:"
pname <<~ textInput
br_ []
"Greetings using (#>):"
pname #> hello
br_ []
isMale #> test
br_ []
xaxis <<~ select axes
yaxis <<~ select axes
toHtml $ plotly "foo" [trace] & layout . title ?~ "my plot"
axes = [tagOpt "sepal length" sepalLength,
tagOpt "sepal width" sepalWidth,
tagOpt "petal length" petalLength,
tagOpt "petal width" petalWidth]
initv = Example "Simon" True (snd $ axes!!0) (snd $ axes!!1)
{-hbarData :: [(Text, Double)]
hbarData = [("Simon", 14.5), ("Joe", 18.9), ("Dorothy", 16.2)]
hbarsTrace = bars & y ?~ map fst hbarData & x ?~ map snd hbarData & orientation ?~ Horizontal -}
|
filopodia/open
|
dashdo-examples/lib/Dashdo/Examples/TestDashdo.hs
|
mit
| 2,237 | 0 | 18 | 485 | 661 | 342 | 319 | -1 | -1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Qi.Config.AWS.ApiGw.ApiMethod.Accessors where
import Control.Lens
import Data.Char (isAlphaNum)
import qualified Data.HashMap.Strict as SHM
import Data.Text (Text)
import qualified Data.Text as T
import Protolude
import Qi.Config.AWS
import Qi.Config.AWS.ApiGw
import Qi.Config.Identifier
getLogicalName
:: ApiResource
-> ApiVerb
-> Text
getLogicalName apir verb = T.concat [makeAlphaNumeric $ apir^.arName, T.pack $ show verb]
|
qmuli/qmuli
|
library/Qi/Config/AWS/ApiGw/ApiMethod/Accessors.hs
|
mit
| 622 | 0 | 8 | 176 | 127 | 79 | 48 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Command.Types where
import qualified Data.Aeson as A
import Data.Aeson.Types (Parser)
import Data.Aeson ((.:), (.=))
import qualified Data.HashMap.Lazy as HM
import qualified Data.Map as M
import GHC.Generics (Generic)
import Network.Socket (Socket, PortNumber)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar ( TVar, newTVarIO
, modifyTVar', readTVar, writeTVar)
import Control.Exception (bracketOnError)
import Network.Vanguard.Core
-- |A partial connection represents a conncection which has been
-- started, but for which the details have yet to be finalized.
data PartialConnection = PC { connection :: CommVar Connection }
-- |PartialConnections will be stored in a Map with a unique
-- identifier.
type PendingM = HM.HashMap Int PartialConnection
-- |We wrap the PendingM and give it a basic API.
newtype Pending = Pending (TVar PendingM)
-- | Create a new Pending value.
newPending :: IO Pending
newPending = fmap Pending . newTVarIO $ HM.empty
-- | Add an entry to a collection of Pendings.
addBlankPending :: Pending -> Int -> IO (CommVar Connection)
addBlankPending (Pending p) uid = do
c <- newCommVar
atomically $ modifyTVar' p (HM.insert uid (PC c))
return c
errorBracketedPending :: Pending -> Int -> (CommVar Connection -> IO a) -> IO a
errorBracketedPending pen@(Pending p) uid action =
bracketOnError (addBlankPending pen uid)
(\_ -> atomically $ HM.delete uid <$> readTVar p)
action
-- | Retrieve an entry from a Pending. This will delete it from the record.
retrievePending :: Pending -> Int -> IO (Maybe PartialConnection)
retrievePending (Pending p) uid = atomically $ do
map <- readTVar p
let (map', pc) = takeOut map uid
case pc of
(Just _) -> writeTVar p map' >> return pc
Nothing -> return Nothing
-- |Utility function to look up a value, and if it exists, remove it
-- and return it.
takeOut :: HM.HashMap Int a -> Int -> (HM.HashMap Int a, Maybe a)
takeOut m k = case (HM.lookup k m) of
Just x -> let m' = HM.delete k m in
(m', Just x)
Nothing -> (m, Nothing)
|
rlupton20/vanguard-dataplane
|
app/Command/Types.hs
|
gpl-3.0
| 2,229 | 0 | 13 | 470 | 609 | 329 | 280 | 42 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Pipes.Docker (dockerListener) where
import Prelude hiding (putStrLn, getLine, words, lines, concat, length, drop, dropWhile, null, lookup)
import Network.Socket hiding (recv)
-- import qualified GHC.IO.Exception as Ex
-- import Control.Exception (try, throwIO)
import qualified Data.HashMap.Strict as Map
import Control.Monad (forever)
import Network.Socket.ByteString (recv, sendAll)
import Data.Aeson (decodeStrict, eitherDecodeStrict)
import Data.Aeson.Types (Object)
import Data.Foldable (forM_)
import Data.Maybe (fromMaybe)
import Data.Either (rights)
import Data.ByteString (ByteString, concat)
import Data.ByteString.Char8 (pack, unpack, putStrLn)
import Data.HashMap.Strict (fromList)
import Data.Text (append, dropWhile, replace, splitOn)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.Text.Read (decimal)
import Text.Regex (mkRegex, splitRegex)
import Pipes (Consumer, Producer, Pipe, (>->), await, yield, runEffect, lift)
import Data.Docker (Event(..), StartResponse(..), StopResponse(..), StartNetworkSettings(..), StartConfig(..))
import Data.Consul (RegisterNode(..), DeregisterNode(..), Service(..), Datacenter(..))
import Network.Consul.DockerClient (registerNode, deregisterNode, mkConsulClient)
dockerListener :: IO ()
dockerListener = do
putStrLn "docker-listerner"
runEffect $ docker >-> json2event >-> event2id >-> id2container >-> container2consul >-> consul
type Status = ByteString
-- | @todo: handle error cases
docker :: Producer ByteString IO ()
docker = forever $ do
lift $ putStr "docker:"
s <- lift unixSocket
_ <- ($) forever $ lift (event s) >>= yield
lift $ sClose s
json2event :: Pipe ByteString Event IO ()
json2event = forever $ do
r <- await
let j = last $ filter (\c-> c /= "") $ (splitRegex (mkRegex "[ \t\r\n]+") r') where r' = unpack r
case (decodeStrict $ pack j) :: Maybe Event of
Just ev -> yield ev
Nothing -> lift $ putStr "error in json2event j=:" >> print j
event2id :: Pipe Event (ByteString, Status) IO ()
event2id = forever $ do
e <- await
lift $ putStr "--| EVENT2ID: " >> putStr "event2id: e=" >> print e
yield $ (encodeUtf8 $ _eId e, encodeUtf8 $ _eStatus e)
id2container :: Pipe (ByteString, Status) (ByteString, Status) IO ()
id2container = forever $ do
s <- lift $ unixSocket
_ <- ($) forever $ do
(eId, status) <- await
lift $ sendAll s $ concat ["GET /containers/", eId, "/json HTTP/1.1", "\r\n\r\n"]
r <- lift $ recv s 4096 -- r: http response
yield $ (r, status)
lift $ sClose s
container2consul :: Pipe (ByteString, Status) (ByteString, Status) IO ()
container2consul = forever $ do
(r, status) <- await -- r: http response
let json :: ByteString = pack $ last $ init $ filter (\c -> c /= "")
(splitRegex (mkRegex "[ \t\r\n]+") (unpack r))
yield (json, status)
consul :: Consumer (ByteString, Status) IO ()
consul = do
consulClient <- mkConsulClient
forever $ do
(json, status) <- await
lift $ putStr "consul: json=" >> putStrLn json
r <- case status of
"start" -> do
let nodes = mkRegisterNodes $ decodeStrict json
debug = eitherDecodeStrict json :: Either String StartResponse
res' = decodeStrict json :: Maybe StartResponse
lift $ putStr "consul: nodes=" >> print nodes >> putStr "DEBUG=" >> print debug
case res' of
Just r -> lift $ putStr "consul: Env=" >> print (_scEnv (_srConfig r))
Nothing -> lift $ putStrLn "Nothing"
case nodes of
Just ns -> forM_ ns (\n -> lift $ putStr "n=" >> print n >> registerNode consulClient n) >> return True
Nothing -> return False
"stop" -> do
let node = mkDeregisterNode $ decodeStrict json
lift $ putStr "consul: node=" >> print node
case node of
Just n -> lift $ deregisterNode consulClient n
Nothing -> return False
_ -> return False
return r
event :: Socket -> IO ByteString
event s = do
sendAll s "GET /events HTTP/1.1\r\nContent-Type: application/json\r\n\r\n"
recv s 4096
-- return j
unixSocket :: IO Socket
unixSocket = do
s <- socket AF_UNIX Stream defaultProtocol
connect s $ SockAddrUnix "/var/run/docker.sock"
return s
mkDeregisterNode :: Maybe StopResponse -> Maybe DeregisterNode
mkDeregisterNode (Just res) = let name = replace "_" "-" (dropWhile (\c -> c == '/') (_stName res))
in Just (DeregisterNode (Just $ Datacenter "dev") name)
mkDeregisterNode Nothing = Nothing
-- @todo: test docker containers with and without ports (external services)
mkRegisterNodes :: Maybe StartResponse -> Maybe [RegisterNode]
mkRegisterNodes (Just res) = let name = replace "_" "-" (dropWhile (\c -> c == '/') (_srName res))
dc = Just $ Datacenter "dev"
net = _srNetworkSettings res :: StartNetworkSettings
ip = _snsIPAddress net
ps = ports (_snsPorts net)
in case ps of
-- either hostname or name
Just ps' -> Just $ map (\port -> RegisterNode dc name ip (mkService res port) Nothing) ps'
Nothing -> Just [RegisterNode dc name ip (mkService res 0) Nothing]
mkRegisterNodes Nothing = Nothing
mkService :: StartResponse -> Int -> Maybe Service
mkService res port = let cid = _srId res
net = _srNetworkSettings res
name = replace "_" "-" (dropWhile (\c -> c == '/') (_srName res))
sid = append cid $ append "-" $ append name $ append "-" (decodeUtf8 (pack $ show port))
env = fromList $ map (\e -> let l = splitOn "=" e in (head l, last l)) $ _scEnv (_srConfig res)
service = fromMaybe name (Map.lookup "SERVICE" env)
ps = _snsPorts net
in case ps of
Just ps' -> Just (Service sid service (Map.keys ps') Nothing (Just port))
Nothing -> Just (Service sid service [] Nothing Nothing)
ports :: Maybe Object -> Maybe [Int]
ports o = case o of
Just obj -> let ports' = map (\p -> decimal p) $ map (\p -> head (splitOn "/" p)) $ Map.keys obj
in return $ map (\pair -> fst pair) (rights ports')
Nothing -> Nothing
|
metaml/dokreg
|
src/Pipes/Docker.hs
|
gpl-3.0
| 7,008 | 0 | 26 | 2,216 | 2,204 | 1,132 | 1,072 | 126 | 6 |
{-# language ExistentialQuantification, FunctionalDependencies, RecordWildCards,
NamedFieldPuns, FlexibleInstances, MultiParamTypeClasses,
DeriveDataTypeable, DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
-- module for often used types (in one Base module, to avoid module import cycles.)
module Base.Types (
module Base.Types,
module Base.Types.Events,
module Base.Types.LevelMetaData,
Offset,
Seconds,
) where
import Data.Set hiding (size)
import Data.Indexable
import Data.Abelian
import Data.SelectTree
import Data.Typeable
import Data.Map hiding (size)
import Data.Data
import Data.Generics.Uniplate.Data
import Data.Accessor
import Data.IORef
import qualified Data.Binary as Binary
import qualified Data.Text as T
import Data.Version
import qualified Data.Strict as St
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.CatchState
import Control.Concurrent.MVar
import System.FilePath
import Physics.Chipmunk as CM
import Graphics.Qt as Qt
import Sound.SFML
import Utils
import Base.Constants
import Base.Configuration
import Base.Grounds
import Base.GameGrounds
import Base.Pixmap
import Base.Types.Events
import Base.Types.LevelMetaData
import StoryMode.Types
-- * type aliases
type ConfigurationReader = ReaderT Configuration IO
type RM = ConfigurationReader
type ConfigurationState = CatchState Configuration IO
type M = ConfigurationState
type GameMonad o = StateT GameState M o
type RetryLevel = AppState
data GameState = GameState {
cmSpace :: Space,
cameraStateRef :: IORef CameraState,
scene :: Scene Object_,
retryLevel :: RetryLevel
}
setScene :: Scene Object_ -> GameState -> GameState
setScene scene (GameState space camRef _ retryLevel) =
GameState space camRef scene retryLevel
-- * from Base.Application
data Application
= Application {
application :: Ptr QApplication,
window :: Ptr MainWindow,
keyPoller :: KeyPoller,
autoUpdateVersion :: MVar UpdateVersions,
storyModeAvailability :: MVar StoryModeAvailability,
getMainMenu_ :: Application -> AppState,
applicationPixmaps :: ApplicationPixmaps,
applicationSounds :: ApplicationSounds,
allSorts :: SelectTree Sort_
}
getMainMenu :: Application -> AppState
getMainMenu app = getMainMenu_ app app
data UpdateVersions = UpdateVersions {
gameNewVersion :: Maybe Version,
storyModeNewVersion :: Either String (Maybe Version)
}
data StoryModeAvailability
= NotAvailable
| Buyable
| Installed
hasUpdates :: UpdateVersions -> Bool
hasUpdates (UpdateVersions (Just _) _) = True
hasUpdates (UpdateVersions _ (Right (Just _))) = True
hasUpdates _ = False
data AppState
= AppState RenderableInstance (M AppState)
| AppStateLooped RenderableInstance (M AppState)
| NoGUIAppState (M AppState)
| GameAppState RenderableInstance (GameMonad AppState) GameState
| UnManagedAppState (M AppState) -- manages rendering by itself
| FinalAppState
type Parent = AppState
type Play = Parent -> LevelFile -> AppState
data ApplicationPixmaps = ApplicationPixmaps {
menuBackground :: [Pixmap],
menuBackgroundTransparent :: [Pixmap],
alphaNumericFont :: Font,
pixmapsDigitFont :: Font,
headerCubePixmaps :: HeaderCubePixmaps,
menuTitlePixmap :: Pixmap,
pausePixmap :: Pixmap,
successPixmap :: Pixmap,
failurePixmap :: Pixmap
}
data Font = Font {
colorVariants :: (Map Color ColorVariant)
}
-- | save pixmaps in one color on transparent background.
data ColorVariant = ColorVariant {
longest :: Int, -- length of the longest text for which a pixmap exists
glyphs :: Map T.Text Pixmap,
errorSymbol :: Pixmap
}
deriving Show
-- | a letter with its graphical representation
data Glyph
= Glyph {
character :: T.Text,
glyphPixmap :: Pixmap
}
| ErrorGlyph {glyphPixmap :: Pixmap}
deriving (Show)
data HeaderCubePixmaps
= HeaderCubePixmaps {
startCube :: Pixmap,
standardCube :: Pixmap,
spaceCube :: Pixmap,
endCube :: Pixmap
}
data ApplicationSounds = ApplicationSounds {
menuSelectSound :: PolySound,
menuConfirmSound :: PolySound,
menuCancelSound :: PolySound,
errorSound :: PolySound,
failureSound :: PolySound,
successSound :: PolySound
}
-- * Base.Renderable
class Renderable r where
render :: Ptr QPainter -> Application -> Configuration
-> Size Double -> r -> IO (Size Double, IO ())
label :: r -> String
-- for usage in menus
select :: r -> r
select = id
deselect :: r -> r
deselect = id
data RenderableInstance =
forall r . Renderable r => RenderableInstance r
renderable :: Renderable r => r -> RenderableInstance
renderable = RenderableInstance
-- * from Game.Scene
-- | representing the scene (both physical and graphical objects) during the game.
-- A value of this type gets passed from the logic thread to the rendering thread
data Scene object
= Scene {
levelFile :: LevelFile,
spaceTime_ :: Seconds,
objects_ :: GameGrounds object,
lowerLimit_ :: Maybe CpFloat,
batteryPower_ :: !(Pair Integer Integer), -- makes it possible to have REALLY BIG amounts of power :)
batteryMap :: Map Shape (Index, Chipmunk), -- saves the batteries for every battery shape (needed for removal)
switches_ :: !(Pair Int Int),
contactRef :: !(ContactRef Contacts),
contacts_ :: !Contacts,
mode_ :: Mode
}
deriving Show
spaceTime :: Accessor (Scene o) Seconds
spaceTime = accessor spaceTime_ (\ a r -> r{spaceTime_ = a})
objects :: Accessor (Scene o) (GameGrounds o)
objects = accessor objects_ (\ a r -> r{objects_ = a})
lowerLimit :: Accessor (Scene o) (Maybe CpFloat)
lowerLimit = accessor lowerLimit_ (\ a r -> r{lowerLimit_ = a})
batteryPower :: Accessor (Scene o) (Pair Integer Integer)
batteryPower = accessor batteryPower_ (\ a r -> r{batteryPower_ = a})
switches :: Accessor (Scene o) (Pair Int Int)
switches = accessor switches_ (\ a r -> r{switches_ = a})
contacts :: Accessor (Scene o) Contacts
contacts = accessor contacts_ (\ a r -> r{contacts_ = a})
mode :: Accessor (Scene o) Mode
mode = accessor mode_ (\ a r -> r{mode_ = a})
-- * getter
-- | returns the object currently controlled by the gamepad
getControlled :: Scene o -> Maybe o
getControlled s =
s |>
getControlledIndex |>
fmap (\ i -> s ^. mainLayerObjectA i)
-- | returns the controlled index in game mode
getControlledIndex :: Scene o -> Maybe Index
getControlledIndex scene =
case scene ^. mode of
NikkiMode{nikki} -> Just nikki
TerminalMode{terminal} -> Just terminal
RobotMode{robot} -> Just robot
LevelFinished{} -> Nothing
-- | accesses an object from the mainLayer
mainLayerObjectA :: Index -> Accessor (Scene o) o
mainLayerObjectA i =
objects .> gameMainLayer .> indexA i
data CameraState
= CS Index Vector
deriving Show
data Contacts
= Contacts {
nikkiCollisions :: [NikkiCollision],
nikkiTouchesDeadly :: !Bool,
triggers :: Set Shape,
terminals :: Set Shape,
batteries :: Set Shape,
fallingTiles :: Set Shape,
nearestSign :: Maybe (Shape, CpFloat)
}
deriving Show
data MyCollisionType
= NikkiHeadCT
| NikkiLegsCT
| NikkiGhostCT
| TileCT
| TerminalCT
| DeadlySolidCT
| DeadlyPermeableCT
| PermeableCT
| RobotCT
| TriggerCT
| BatteryCT
| SignCT
| FallingTileCT
deriving (Eq, Ord, Enum, Bounded, Show)
instance PP MyCollisionType where
pp = show
data NikkiCollision = NikkiCollision {
nikkiCollisionShape :: !Shape,
nikkiCollisionAngle :: !Angle,
nikkiCollisionPosition :: !Vector,
nikkiCollisionType :: !MyCollisionType
}
deriving (Show)
instance PP NikkiCollision where
pp (NikkiCollision a b c d) =
"NikkiCollision " ++ show a <~> pp b <~> pp c <~> pp d
-- * mode for the game scene
data Mode
= NikkiMode {
nikki :: Index
}
| TerminalMode {
nikki :: Index,
terminal :: Index
}
| RobotMode{
nikki :: Index,
terminal :: Index,
robot :: Index
}
| LevelFinished {
levelScore :: Score,
levelResult :: LevelResult
}
deriving Show
mkLevelFinished :: Scene o -> LevelResult -> Mode
mkLevelFinished scene result = LevelFinished
(mkScore result (scene ^. spaceTime) (St.fst (scene ^. batteryPower)))
result
-- | returns, if Nikki is controlled currently
isNikkiMode :: Mode -> Bool
isNikkiMode NikkiMode{} = True
-- | returns, if a robot is controlled currently
isRobotMode :: Mode -> Bool
isRobotMode RobotMode{} = True
isRobotMode _ = False
isTerminalMode :: Mode -> Bool
isTerminalMode TerminalMode{} = True
isTerminalMode _ = False
isLevelFinishedMode :: Mode -> Bool
isLevelFinishedMode LevelFinished{} = True
isLevelFinishedMode _ = False
isGameMode :: Mode -> Bool
isGameMode = not . isLevelFinishedMode
data LevelResult = Passed | Failed
deriving (Eq, Ord, Show)
-- | versioned type for scores
data Score
= Score_0 {
scoreTime_ :: Seconds,
scoreBatteryPower_ :: Integer
}
| Score_1_Tried -- played but not passed
| Score_1_Passed {
scoreTime_ :: Seconds,
scoreBatteryPower_ :: Integer
}
deriving (Eq, Show)
scoreTimeA :: Accessor Score Seconds
scoreTimeA = accessor scoreTime_ (\ a r -> r{scoreTime_ = a})
scoreBatteryPowerA :: Accessor Score Integer
scoreBatteryPowerA = accessor scoreBatteryPower_ (\ a r -> r{scoreBatteryPower_ = a})
toNewestScore :: Score -> Score
toNewestScore (Score_0 time batteries) = Score_1_Passed time batteries
toNewestScore x = x
isPassedScore :: Score -> Bool
isPassedScore Score_0{} = True
isPassedScore Score_1_Tried{} = False
isPassedScore Score_1_Passed{} = True
instance Binary.Binary Score where
put (Score_0 a b) = do
Binary.putWord8 0
Binary.put a
Binary.put b
put Score_1_Tried = Binary.putWord8 1
put (Score_1_Passed a b) = do
Binary.putWord8 2
Binary.put a
Binary.put b
get = toNewestScore <$> do
i <- Binary.getWord8
case i of
0 -> Score_0 <$> Binary.get <*> Binary.get
1 -> return Score_1_Tried
2 -> Score_1_Passed <$> Binary.get <*> Binary.get
mkScore :: LevelResult -> Seconds -> Integer -> Score
mkScore Passed t = Score_1_Passed (roundTime t)
where
roundTime :: Seconds -> Seconds
roundTime =
(* (10 ^ timeDigits)) >>>
ceiling >>>
fromIntegral >>>
(/ (10 ^ timeDigits))
mkScore Failed _ = const Score_1_Tried
-- * EditorScene types
data EditorScene sort
= EditorScene {
editorLevelFile :: LevelFile,
cursor :: EditorPosition,
cursorStep :: Maybe EditorPosition, -- if Nothing -> size of selected object
availableSorts_ :: SelectTree sort,
editorObjects_ :: Grounds (EditorObject sort),
selectedLayer_ :: GroundsIndex,
selected :: Maybe (GroundsIndex, Index),
-- index of the object that is in the scene and currently under the cursor
editorMode :: EditorMode,
clipBoard :: [EditorObject sort],
cachedTiles_ :: CachedTiles
}
deriving (Show, Typeable)
editorObjects :: Accessor (EditorScene sort) (Grounds (EditorObject sort))
editorObjects = accessor editorObjects_ (\ a r -> r{editorObjects_ = a})
selectedLayer :: Accessor (EditorScene sort) GroundsIndex
selectedLayer = accessor selectedLayer_ (\ a r -> r{selectedLayer_ = a})
availableSorts :: Accessor (EditorScene sort) (SelectTree sort)
availableSorts = accessor availableSorts_ (\ a r -> r{availableSorts_ = a})
cachedTiles :: Accessor (EditorScene sort) CachedTiles
cachedTiles = accessor cachedTiles_ (\ a r -> r{cachedTiles_ = a})
instance Show (EditorScene sort -> EditorPosition) where
show _ = "<EditorScene -> EditorPosition>"
type CachedTiles = Maybe [ShapeType]
data EditorMode
= NormalMode
| ObjectEditMode {
objectIndex :: Index
}
| SelectionMode {
endPosition :: EditorPosition
}
deriving (Eq, Show, Typeable)
toSelectionMode :: EditorScene s -> EditorScene s
toSelectionMode scene = scene{editorMode = SelectionMode (cursor scene)}
data EditorPosition = EditorPosition {
editorX :: Double,
editorY :: Double
}
deriving (Show, Read, Eq, Typeable, Data)
instance Abelian EditorPosition where
zero = EditorPosition 0 0
(EditorPosition a b) +~ (EditorPosition x y) =
EditorPosition (a + x) (b + y)
(EditorPosition a b) -~ (EditorPosition x y) =
EditorPosition (a - x) (b - y)
-- * Editor objects
data EditorObject sort
= EditorObject {
editorSort :: sort,
editorPosition_ :: EditorPosition,
editorOEMState_ :: Maybe OEMState
}
deriving (Show, Functor)
editorPosition :: Accessor (EditorObject sort) EditorPosition
editorPosition = accessor editorPosition_ (\ a r -> r{editorPosition_ = a})
editorOEMState :: Accessor (EditorObject s) (Maybe OEMState)
editorOEMState = accessor editorOEMState_ (\ a r -> r{editorOEMState_ = a})
-- | modifies all EditorPositions of the OEMState of EditorObjects
modifyOEMEditorPositions :: (EditorPosition -> EditorPosition)
-> EditorObject s -> EditorObject s
modifyOEMEditorPositions f o@EditorObject{editorOEMState_ = Nothing} = o
modifyOEMEditorPositions f o@EditorObject{editorOEMState_ = Just (OEMState state)} =
editorOEMState ^= (Just $ OEMState $ transformBi f state) $ o
-- * object edit mode
class (Typeable a, Data a) => IsOEMState a where
oemEnterMode :: Sort sort o => EditorScene sort -> a -> a
oemUpdate :: EditorScene sort -> Button -> a -> OEMUpdateMonad a
oemNormalize :: Sort sort o => EditorScene sort -> a -> a
oemRender :: Sort sort o => Ptr QPainter -> Application -> Configuration -> EditorScene sort -> a -> IO ()
oemPickle :: a -> String
-- phantom type
oemHelp :: a -> String
type OEMUpdateMonad a = Either OEMException a
oemNothing :: OEMUpdateMonad a
oemNothing = Left OEMNothing
oemError :: OEMUpdateMonad a
oemError = Left OEMError
data OEMException
= OEMNothing -- Nothing to be done, state is the same (help screen is shown?)
| OEMError -- an error occured (emit an error sound)
data OEMState = forall a . IsOEMState a => OEMState a
deriving Typeable
instance Show OEMState where
show = const "<OEMState>"
instance Data OEMState where
gfoldl = oemStateDataInstanceError
gunfold = oemStateDataInstanceError
toConstr = oemStateDataInstanceError
dataTypeOf = oemStateDataInstanceError
oemStateDataInstanceError = error "don't use Data instance of OEMState"
instance IsOEMState OEMState where
oemEnterMode scene (OEMState a) = OEMState $ oemEnterMode scene a
oemUpdate scene button (OEMState a) = fmap OEMState $ oemUpdate scene button a
oemNormalize scene (OEMState a) = OEMState $ oemNormalize scene a
oemRender ptr app config scene (OEMState a) = oemRender ptr app config scene a
oemPickle (OEMState a) = oemPickle a
oemHelp (OEMState a) = oemHelp a
data OEMMethods = OEMMethods {
oemInitialize :: EditorPosition -> OEMState,
oemUnpickle :: String -> Maybe OEMState
}
-- * Objects
newtype SortId = SortId {getSortId :: FilePath}
deriving (Show, Read, Eq)
data RenderMode
= Iconified
| InScene {
offset :: Qt.Position Double
}
-- * Sort class
-- | Class that every sort of objects has to implement. This is the interface between
-- the game and the implemented objects.
-- Minimal complete definition: 'sortId', 'size', 'sortRender',
-- 'renderIconified', 'initialize', 'immutableCopy', 'chipmunks', 'renderObject'
class (Show sort, Typeable sort, Show object, Typeable object) =>
Sort sort object |
sort -> object, object -> sort where
sortId :: sort -> SortId
-- free memory for allocated resources
freeSort :: sort -> IO ()
freeSort = const $ return ()
size :: sort -> Size Double
-- Sorts that support an object edit mode have to return Just (initial, unpickle) here.
objectEditMode :: sort -> Maybe OEMMethods
objectEditMode _ = Nothing
renderIconified :: sort -> Ptr QPainter -> IO ()
renderEditorObject :: Ptr QPainter -> Offset Double
-> EditorObject sort -> IO ()
renderEditorObject ptr offset editorObject = do
resetMatrix ptr
translate ptr offset
let sort = editorSort editorObject
translate ptr (epToPosition (size sort) (editorObject ^. editorPosition))
renderIconified sort ptr
-- if Nothing is passed as space, this should be an object
-- that is not added to the chipmunk space (i.e. background tiles)
initialize :: Application -> LevelFile -> Maybe Space
-> sort -> EditorPosition -> Maybe OEMState -> CachedTiles -> RM object
freeObject :: object -> IO ()
freeObject = const $ return ()
immutableCopy :: object -> IO object
chipmunks :: object -> [Chipmunk]
-- | only implemented in Nikki and robots
getControlledChipmunk :: Scene Object_ -> object -> Chipmunk
getControlledChipmunk scene o = error ("please implement getControlledChipmunk in: " ++ show o)
startControl :: Seconds -> object -> object
startControl now = id
isUpdating :: object -> Bool -- phantom type
update :: sort -> Application -> Configuration -> Space -> Scene Object_ -> Seconds
-> Contacts -> (Bool, ControlData)
-> Index -> object -> StateT (Scene Object_ -> Scene Object_) IO object
update sort app config space scene now contacts cd i o =
io $ updateNoSceneChange sort app config space scene now contacts cd o
updateNoSceneChange :: sort -> Application -> Configuration -> Space -> Scene Object_
-> Seconds -> Contacts -> (Bool, ControlData)
-> object -> IO object
updateNoSceneChange _ _ _ _ _ _ _ _ o = return o
renderObject :: Application -> Configuration
-> object -> sort -> Ptr QPainter -> Offset Double -> Seconds -> IO [RenderPixmap]
pushSceneChange :: (Scene Object_ -> Scene Object_) -> StateT (Scene Object_ -> Scene Object_) IO ()
pushSceneChange f = modify (>>> f)
-- * position conversions
-- from lower left to upper left
epToPosition :: Size Double -> EditorPosition -> Qt.Position Double
epToPosition size (EditorPosition x y) = Position x (y - height size)
epToCenterPosition :: Size Double -> EditorPosition -> Qt.Position Double
epToCenterPosition size ep = epToPosition size ep +~ fmap (/ 2) (size2position size)
epToCenterVector :: Size Double -> EditorPosition -> Vector
epToCenterVector size = position2vector . epToCenterPosition size
editorComponentWise :: (Double -> Double -> Double) -> EditorPosition -> EditorPosition -> EditorPosition
editorComponentWise (#) (EditorPosition a b) (EditorPosition x y) =
EditorPosition (a # x) (b # y)
-- * Sort class wrappers
data Sort_
= forall sort object .
(Sort sort object, Show sort, Typeable sort) =>
Sort_ sort
| DummySort -- used if the wrapper object (Object_) will find the sort.
deriving Typeable
data Object_
= forall sort object .
(Sort sort object,
Show sort, Typeable sort,
Show object, Typeable object) =>
Object_ sort object
deriving (Typeable)
instance Show Object_ where
show (Object_ s o) = "Object_ (" ++ show o ++ ")"
instance Show Sort_ where
show (Sort_ s) = "Sort_ (" ++ show s ++ ")"
instance Eq Sort_ where
a == b = sortId a == sortId b
instance Sort Sort_ Object_ where
sortId (Sort_ s) = sortId s
freeSort (Sort_ s) = freeSort s
size (Sort_ s) = size s
objectEditMode (Sort_ s) = objectEditMode s
renderIconified (Sort_ s) = renderIconified s
renderEditorObject ptr offset editorObject =
case editorSort editorObject of
(Sort_ innerSort) ->
renderEditorObject ptr offset editorObject{editorSort = innerSort}
initialize app file space (Sort_ sort) editorPosition state cachedTiles =
Object_ sort <$> initialize app file space sort editorPosition state cachedTiles
freeObject (Object_ _ o) = freeObject o
immutableCopy (Object_ s o) = Object_ s <$> Base.Types.immutableCopy o
chipmunks (Object_ _ o) = chipmunks o
getControlledChipmunk scene (Object_ _ o) = getControlledChipmunk scene o
startControl now (Object_ sort o) = Object_ sort $ startControl now o
isUpdating (Object_ _ o) = isUpdating o
update DummySort app controls space mode now contacts cd i (Object_ sort o) =
Object_ sort <$> Base.Types.update sort app controls space mode now contacts cd i o
updateNoSceneChange DummySort app controls space mode now contacts cd (Object_ sort o) =
Object_ sort <$> updateNoSceneChange sort app controls space mode now contacts cd o
renderObject = error "Don't use this function, use render_ instead (that's type safe)"
sort_ :: Object_ -> Sort_
sort_ (Object_ sort _) = Sort_ sort
-- * level files
data LevelFile
= StandardLevel {
levelPath :: FilePath
, levelPackage :: FilePath
, levelFileName :: FilePath
, levelMetaData_ :: LevelMetaData
}
| UserLevel {
levelPath :: FilePath
, levelPackage :: FilePath
, levelFileName :: FilePath
, levelMetaData_ :: LevelMetaData
}
| EpisodeLevel {
levelEpisode :: Episode LevelFile
, levelPath :: FilePath
, levelPackage :: FilePath
, levelFileName :: FilePath
, levelMetaData_ :: LevelMetaData
}
| TemplateLevel {levelFilePath :: FilePath}
| UnknownLevelType {levelFilePath :: FilePath}
deriving (Show)
type LevelUID = String
-- | unique ID of a level
levelUID :: LevelFile -> LevelUID
levelUID (StandardLevel dir package file meta) =
"standardLevels" <//> package <//> file
levelUID (UserLevel dir package file meta) =
"userLevels" <//> package <//> file
levelUID (EpisodeLevel _ dir package file meta) =
"storyModeLevels" <//> package <//> file
levelUID (TemplateLevel path) =
"templateLevels" <//> path
levelUID (UnknownLevelType path) =
"unknownLevels" <//> path
getAbsoluteFilePath :: LevelFile -> FilePath
getAbsoluteFilePath (TemplateLevel p) = p
getAbsoluteFilePath (UnknownLevelType p) = p
getAbsoluteFilePath x = levelPath x </> levelPackage x </> levelFileName x
levelMetaData :: LevelFile -> LevelMetaData
levelMetaData StandardLevel{..} = levelMetaData_
levelMetaData UserLevel{..} = levelMetaData_
levelMetaData EpisodeLevel{..} = levelMetaData_
levelMetaData file =
LevelMetaData (guessName $ getAbsoluteFilePath file) Nothing Nothing Nothing Nothing
|
geocurnoff/nikki
|
src/Base/Types.hs
|
lgpl-3.0
| 23,024 | 0 | 20 | 5,265 | 6,153 | 3,302 | 2,851 | 548 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
module Test.Async.Common
( value
, TestException(..)
, module X
) where
import Data.Typeable
import Control.Exception.Lifted
import Test.Tasty as X
import Test.Tasty.HUnit as X
import Test.Tasty.TH as X
value :: Int
value = 42
data TestException = TestException
deriving (Eq, Show, Typeable)
instance Exception TestException
|
dmjio/lifted-async
|
tests/Test/Async/Common.hs
|
bsd-3-clause
| 410 | 0 | 6 | 67 | 98 | 63 | 35 | 16 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Transform.Expression (crawlLet, checkPorts) where
import Control.Applicative ((<$>),(<*>))
import AST.Annotation ( Annotated(A) )
import AST.Expression.General
import qualified AST.Expression.Canonical as Canonical
import AST.Type (Type, CanonicalType)
crawlLet
:: ([def] -> Either a [def'])
-> Expr ann def var
-> Either a (Expr ann def' var)
crawlLet =
crawl (\_ _ -> return ()) (\_ _ -> return ())
checkPorts
:: (String -> CanonicalType -> Either a ())
-> (String -> CanonicalType -> Either a ())
-> Canonical.Expr
-> Either a Canonical.Expr
checkPorts inCheck outCheck expr =
crawl inCheck outCheck (mapM checkDef) expr
where
checkDef def@(Canonical.Definition _ body _) =
do _ <- checkPorts inCheck outCheck body
return def
crawl
:: (String -> Type var -> Either a ())
-> (String -> Type var -> Either a ())
-> ([def] -> Either a [def'])
-> Expr ann def var
-> Either a (Expr ann def' var)
crawl portInCheck portOutCheck defsTransform =
go
where
go (A srcSpan expr) =
A srcSpan <$>
case expr of
Var x ->
return (Var x)
Lambda p e ->
Lambda p <$> go e
Binop op e1 e2 ->
Binop op <$> go e1 <*> go e2
Case e cases ->
Case <$> go e <*> mapM (\(p,b) -> (,) p <$> go b) cases
Data name es ->
Data name <$> mapM go es
Literal lit ->
return (Literal lit)
Range e1 e2 ->
Range <$> go e1 <*> go e2
ExplicitList es ->
ExplicitList <$> mapM go es
App e1 e2 ->
App <$> go e1 <*> go e2
MultiIf branches ->
MultiIf <$> mapM (\(b,e) -> (,) <$> go b <*> go e) branches
Access e lbl ->
Access <$> go e <*> return lbl
Remove e lbl ->
Remove <$> go e <*> return lbl
Insert e lbl v ->
Insert <$> go e <*> return lbl <*> go v
Modify e fields ->
Modify <$> go e <*> mapM (\(k,v) -> (,) k <$> go v) fields
Record fields ->
Record <$> mapM (\(k,v) -> (,) k <$> go v) fields
Let defs body ->
Let <$> defsTransform defs <*> go body
GLShader uid src gltipe ->
return $ GLShader uid src gltipe
PortIn name st ->
do portInCheck name st
return $ PortIn name st
PortOut name st signal ->
do portOutCheck name st
PortOut name st <$> go signal
|
JoeyEremondi/utrecht-apa-p1
|
src/Transform/Expression.hs
|
bsd-3-clause
| 2,677 | 0 | 17 | 1,021 | 1,025 | 501 | 524 | 74 | 19 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.HcPkg
-- Copyright : Duncan Coutts 2009, 2013
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This module provides an library interface to the @hc-pkg@ program.
-- Currently only GHC, GHCJS and LHC have hc-pkg programs.
module Distribution.Simple.Program.HcPkg (
HcPkgInfo(..),
init,
invoke,
register,
reregister,
registerMultiInstance,
unregister,
recache,
expose,
hide,
dump,
describe,
list,
-- * Program invocations
initInvocation,
registerInvocation,
reregisterInvocation,
registerMultiInstanceInvocation,
unregisterInvocation,
recacheInvocation,
exposeInvocation,
hideInvocation,
dumpInvocation,
describeInvocation,
listInvocation,
) where
import Prelude hiding (init)
import Distribution.Package
( PackageId, ComponentId(..) )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo, InstalledPackageInfo(..)
, showInstalledPackageInfo
, emptyInstalledPackageInfo, fieldsInstalledPackageInfo )
import Distribution.ParseUtils
import Distribution.Simple.Compiler
( PackageDB(..), PackageDBStack )
import Distribution.Simple.Program.Types
( ConfiguredProgram(programId) )
import Distribution.Simple.Program.Run
( ProgramInvocation(..), IOEncoding(..), programInvocation
, runProgramInvocation, getProgramInvocationOutput )
import Distribution.Text
( display, simpleParse )
import Distribution.Simple.Utils
( die, writeUTF8File )
import Distribution.Verbosity
( Verbosity, deafening, silent )
import Distribution.Compat.Exception
( catchIO )
import Data.Char
( isSpace )
import Data.List
( stripPrefix )
import System.FilePath as FilePath
( (</>), (<.>)
, splitPath, splitDirectories, joinPath, isPathSeparator )
import qualified System.FilePath.Posix as FilePath.Posix
-- | Information about the features and capabilities of an @hc-pkg@
-- program.
--
data HcPkgInfo = HcPkgInfo
{ hcPkgProgram :: ConfiguredProgram
, noPkgDbStack :: Bool -- ^ no package DB stack supported
, noVerboseFlag :: Bool -- ^ hc-pkg does not support verbosity flags
, flagPackageConf :: Bool -- ^ use package-conf option instead of package-db
, supportsDirDbs :: Bool -- ^ supports directory style package databases
, requiresDirDbs :: Bool -- ^ requires directory style package databases
, nativeMultiInstance :: Bool -- ^ supports --enable-multi-instance flag
, recacheMultiInstance :: Bool -- ^ supports multi-instance via recache
}
-- | Call @hc-pkg@ to initialise a package database at the location {path}.
--
-- > hc-pkg init {path}
--
init :: HcPkgInfo -> Verbosity -> Bool -> FilePath -> IO ()
init hpi verbosity preferCompat path
| not (supportsDirDbs hpi)
|| (not (requiresDirDbs hpi) && preferCompat)
= writeFile path "[]"
| otherwise
= runProgramInvocation verbosity (initInvocation hpi verbosity path)
-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the
-- provided command-line arguments to it.
invoke :: HcPkgInfo -> Verbosity -> PackageDBStack -> [String] -> IO ()
invoke hpi verbosity dbStack extraArgs =
runProgramInvocation verbosity invocation
where
args = packageDbStackOpts hpi dbStack ++ extraArgs
invocation = programInvocation (hcPkgProgram hpi) args
-- | Call @hc-pkg@ to register a package.
--
-- > hc-pkg register {filename | -} [--user | --global | --package-db]
--
register :: HcPkgInfo -> Verbosity -> PackageDBStack
-> Either FilePath
InstalledPackageInfo
-> IO ()
register hpi verbosity packagedb pkgFile =
runProgramInvocation verbosity
(registerInvocation hpi verbosity packagedb pkgFile)
-- | Call @hc-pkg@ to re-register a package.
--
-- > hc-pkg register {filename | -} [--user | --global | --package-db]
--
reregister :: HcPkgInfo -> Verbosity -> PackageDBStack
-> Either FilePath
InstalledPackageInfo
-> IO ()
reregister hpi verbosity packagedb pkgFile =
runProgramInvocation verbosity
(reregisterInvocation hpi verbosity packagedb pkgFile)
registerMultiInstance :: HcPkgInfo -> Verbosity
-> PackageDBStack
-> InstalledPackageInfo
-> IO ()
registerMultiInstance hpi verbosity packagedbs pkgInfo
| nativeMultiInstance hpi
= runProgramInvocation verbosity
(registerMultiInstanceInvocation hpi verbosity packagedbs (Right pkgInfo))
-- This is a trick. Older versions of GHC do not support the
-- --enable-multi-instance flag for ghc-pkg register but it turns out that
-- the same ability is available by using ghc-pkg recache. The recache
-- command is there to support distro package managers that like to work
-- by just installing files and running update commands, rather than
-- special add/remove commands. So the way to register by this method is
-- to write the package registration file directly into the package db and
-- then call hc-pkg recache.
--
| recacheMultiInstance hpi
= do let pkgdb = last packagedbs
writeRegistrationFileDirectly hpi pkgdb pkgInfo
recache hpi verbosity pkgdb
| otherwise
= die $ "HcPkg.registerMultiInstance: the compiler does not support "
++ "registering multiple instances of packages."
writeRegistrationFileDirectly :: HcPkgInfo
-> PackageDB
-> InstalledPackageInfo
-> IO ()
writeRegistrationFileDirectly hpi (SpecificPackageDB dir) pkgInfo
| supportsDirDbs hpi
= do let pkgfile = dir </> display (installedComponentId pkgInfo) <.> "conf"
writeUTF8File pkgfile (showInstalledPackageInfo pkgInfo)
| otherwise
= die $ "HcPkg.writeRegistrationFileDirectly: compiler does not support dir style package dbs"
writeRegistrationFileDirectly _ _ _ =
-- We don't know here what the dir for the global or user dbs are,
-- if that's needed it'll require a bit more plumbing to support.
die $ "HcPkg.writeRegistrationFileDirectly: only supports SpecificPackageDB for now"
-- | Call @hc-pkg@ to unregister a package
--
-- > hc-pkg unregister [pkgid] [--user | --global | --package-db]
--
unregister :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO ()
unregister hpi verbosity packagedb pkgid =
runProgramInvocation verbosity
(unregisterInvocation hpi verbosity packagedb pkgid)
-- | Call @hc-pkg@ to recache the registered packages.
--
-- > hc-pkg recache [--user | --global | --package-db]
--
recache :: HcPkgInfo -> Verbosity -> PackageDB -> IO ()
recache hpi verbosity packagedb =
runProgramInvocation verbosity
(recacheInvocation hpi verbosity packagedb)
-- | Call @hc-pkg@ to expose a package.
--
-- > hc-pkg expose [pkgid] [--user | --global | --package-db]
--
expose :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO ()
expose hpi verbosity packagedb pkgid =
runProgramInvocation verbosity
(exposeInvocation hpi verbosity packagedb pkgid)
-- | Call @hc-pkg@ to retrieve a specific package
--
-- > hc-pkg describe [pkgid] [--user | --global | --package-db]
--
describe :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId -> IO [InstalledPackageInfo]
describe hpi verbosity packagedb pid = do
output <- getProgramInvocationOutput verbosity
(describeInvocation hpi verbosity packagedb pid)
`catchIO` \_ -> return ""
case parsePackages output of
Left ok -> return ok
_ -> die $ "failed to parse output of '"
++ programId (hcPkgProgram hpi) ++ " describe " ++ display pid ++ "'"
-- | Call @hc-pkg@ to hide a package.
--
-- > hc-pkg hide [pkgid] [--user | --global | --package-db]
--
hide :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO ()
hide hpi verbosity packagedb pkgid =
runProgramInvocation verbosity
(hideInvocation hpi verbosity packagedb pkgid)
-- | Call @hc-pkg@ to get all the details of all the packages in the given
-- package database.
--
dump :: HcPkgInfo -> Verbosity -> PackageDB -> IO [InstalledPackageInfo]
dump hpi verbosity packagedb = do
output <- getProgramInvocationOutput verbosity
(dumpInvocation hpi verbosity packagedb)
`catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " dump failed"
case parsePackages output of
Left ok -> return ok
_ -> die $ "failed to parse output of '"
++ programId (hcPkgProgram hpi) ++ " dump'"
parsePackages :: String -> Either [InstalledPackageInfo] [PError]
parsePackages str =
let parsed = map parseInstalledPackageInfo' (splitPkgs str)
in case [ msg | ParseFailed msg <- parsed ] of
[] -> Left [ setComponentId
. maybe id mungePackagePaths (pkgRoot pkg)
$ pkg
| ParseOk _ pkg <- parsed ]
msgs -> Right msgs
where
parseInstalledPackageInfo' =
parseFieldsFlat fieldsInstalledPackageInfo emptyInstalledPackageInfo
--TODO: this could be a lot faster. We're doing normaliseLineEndings twice
-- and converting back and forth with lines/unlines.
splitPkgs :: String -> [String]
splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines
where
-- Handle the case of there being no packages at all.
checkEmpty [s] | all isSpace s = []
checkEmpty ss = ss
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = ys : case zs of
[] -> []
_:ws -> splitWith p ws
where (ys,zs) = break p xs
mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo
-- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec
-- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html)
-- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}.
-- The "pkgroot" is the directory containing the package database.
mungePackagePaths pkgroot pkginfo =
pkginfo {
importDirs = mungePaths (importDirs pkginfo),
includeDirs = mungePaths (includeDirs pkginfo),
libraryDirs = mungePaths (libraryDirs pkginfo),
frameworkDirs = mungePaths (frameworkDirs pkginfo),
haddockInterfaces = mungePaths (haddockInterfaces pkginfo),
haddockHTMLs = mungeUrls (haddockHTMLs pkginfo)
}
where
mungePaths = map mungePath
mungeUrls = map mungeUrl
mungePath p = case stripVarPrefix "${pkgroot}" p of
Just p' -> pkgroot </> p'
Nothing -> p
mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of
Just p' -> toUrlPath pkgroot p'
Nothing -> p
toUrlPath r p = "file:///"
-- URLs always use posix style '/' separators:
++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p)
stripVarPrefix var p =
case splitPath p of
(root:path') -> case stripPrefix var root of
Just [sep] | isPathSeparator sep -> Just (joinPath path')
_ -> Nothing
_ -> Nothing
-- Older installed package info files did not have the installedComponentId
-- field, so if it is missing then we fill it as the source package ID.
setComponentId :: InstalledPackageInfo -> InstalledPackageInfo
setComponentId pkginfo@InstalledPackageInfo {
installedComponentId = ComponentId "",
sourcePackageId = pkgid
}
= pkginfo {
--TODO use a proper named function for the conversion
-- from source package id to installed package id
installedComponentId = ComponentId (display pkgid)
}
setComponentId pkginfo = pkginfo
-- | Call @hc-pkg@ to get the source package Id of all the packages in the
-- given package database.
--
-- This is much less information than with 'dump', but also rather quicker.
-- Note in particular that it does not include the 'ComponentId', just
-- the source 'PackageId' which is not necessarily unique in any package db.
--
list :: HcPkgInfo -> Verbosity -> PackageDB
-> IO [PackageId]
list hpi verbosity packagedb = do
output <- getProgramInvocationOutput verbosity
(listInvocation hpi verbosity packagedb)
`catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " list failed"
case parsePackageIds output of
Just ok -> return ok
_ -> die $ "failed to parse output of '"
++ programId (hcPkgProgram hpi) ++ " list'"
where
parsePackageIds = sequence . map simpleParse . words
--------------------------
-- The program invocations
--
initInvocation :: HcPkgInfo -> Verbosity -> FilePath -> ProgramInvocation
initInvocation hpi verbosity path =
programInvocation (hcPkgProgram hpi) args
where
args = ["init", path]
++ verbosityOpts hpi verbosity
registerInvocation, reregisterInvocation, registerMultiInstanceInvocation
:: HcPkgInfo -> Verbosity -> PackageDBStack
-> Either FilePath InstalledPackageInfo
-> ProgramInvocation
registerInvocation = registerInvocation' "register" False
reregisterInvocation = registerInvocation' "update" False
registerMultiInstanceInvocation = registerInvocation' "update" True
registerInvocation' :: String -> Bool
-> HcPkgInfo -> Verbosity -> PackageDBStack
-> Either FilePath InstalledPackageInfo
-> ProgramInvocation
registerInvocation' cmdname multiInstance hpi
verbosity packagedbs pkgFileOrInfo =
case pkgFileOrInfo of
Left pkgFile ->
programInvocation (hcPkgProgram hpi) (args pkgFile)
Right pkgInfo ->
(programInvocation (hcPkgProgram hpi) (args "-")) {
progInvokeInput = Just (showInstalledPackageInfo pkgInfo),
progInvokeInputEncoding = IOEncodingUTF8
}
where
args file = [cmdname, file]
++ (if noPkgDbStack hpi
then [packageDbOpts hpi (last packagedbs)]
else packageDbStackOpts hpi packagedbs)
++ [ "--enable-multi-instance" | multiInstance ]
++ verbosityOpts hpi verbosity
unregisterInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId
-> ProgramInvocation
unregisterInvocation hpi verbosity packagedb pkgid =
programInvocation (hcPkgProgram hpi) $
["unregister", packageDbOpts hpi packagedb, display pkgid]
++ verbosityOpts hpi verbosity
recacheInvocation :: HcPkgInfo -> Verbosity -> PackageDB
-> ProgramInvocation
recacheInvocation hpi verbosity packagedb =
programInvocation (hcPkgProgram hpi) $
["recache", packageDbOpts hpi packagedb]
++ verbosityOpts hpi verbosity
exposeInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId
-> ProgramInvocation
exposeInvocation hpi verbosity packagedb pkgid =
programInvocation (hcPkgProgram hpi) $
["expose", packageDbOpts hpi packagedb, display pkgid]
++ verbosityOpts hpi verbosity
describeInvocation :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId
-> ProgramInvocation
describeInvocation hpi verbosity packagedbs pkgid =
programInvocation (hcPkgProgram hpi) $
["describe", display pkgid]
++ (if noPkgDbStack hpi
then [packageDbOpts hpi (last packagedbs)]
else packageDbStackOpts hpi packagedbs)
++ verbosityOpts hpi verbosity
hideInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId
-> ProgramInvocation
hideInvocation hpi verbosity packagedb pkgid =
programInvocation (hcPkgProgram hpi) $
["hide", packageDbOpts hpi packagedb, display pkgid]
++ verbosityOpts hpi verbosity
dumpInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation
dumpInvocation hpi _verbosity packagedb =
(programInvocation (hcPkgProgram hpi) args) {
progInvokeOutputEncoding = IOEncodingUTF8
}
where
args = ["dump", packageDbOpts hpi packagedb]
++ verbosityOpts hpi silent
-- We use verbosity level 'silent' because it is important that we
-- do not contaminate the output with info/debug messages.
listInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation
listInvocation hpi _verbosity packagedb =
(programInvocation (hcPkgProgram hpi) args) {
progInvokeOutputEncoding = IOEncodingUTF8
}
where
args = ["list", "--simple-output", packageDbOpts hpi packagedb]
++ verbosityOpts hpi silent
-- We use verbosity level 'silent' because it is important that we
-- do not contaminate the output with info/debug messages.
packageDbStackOpts :: HcPkgInfo -> PackageDBStack -> [String]
packageDbStackOpts hpi dbstack = case dbstack of
(GlobalPackageDB:UserPackageDB:dbs) -> "--global"
: "--user"
: map specific dbs
(GlobalPackageDB:dbs) -> "--global"
: ("--no-user-" ++ packageDbFlag hpi)
: map specific dbs
_ -> ierror
where
specific (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db
specific _ = ierror
ierror :: a
ierror = error ("internal error: unexpected package db stack: " ++ show dbstack)
packageDbFlag :: HcPkgInfo -> String
packageDbFlag hpi
| flagPackageConf hpi
= "package-conf"
| otherwise
= "package-db"
packageDbOpts :: HcPkgInfo -> PackageDB -> String
packageDbOpts _ GlobalPackageDB = "--global"
packageDbOpts _ UserPackageDB = "--user"
packageDbOpts hpi (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db
verbosityOpts :: HcPkgInfo -> Verbosity -> [String]
verbosityOpts hpi v
| noVerboseFlag hpi
= []
| v >= deafening = ["-v2"]
| v == silent = ["-v0"]
| otherwise = []
|
randen/cabal
|
Cabal/Distribution/Simple/Program/HcPkg.hs
|
bsd-3-clause
| 18,337 | 0 | 16 | 4,572 | 3,583 | 1,888 | 1,695 | 321 | 5 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnPat]{Renaming of patterns}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE RankNTypes, ScopedTypeVariables, CPP #-}
module ETA.Rename.RnPat (-- main entry points
rnPat, rnPats, rnBindPat, rnPatAndThen,
NameMaker, applyNameMaker, -- a utility for making names:
localRecNameMaker, topRecNameMaker, -- sometimes we want to make local names,
-- sometimes we want to make top (qualified) names.
isTopRecNameMaker,
rnHsRecFields, HsRecFieldContext(..),
-- CpsRn monad
CpsRn, liftCps,
-- Literals
rnLit, rnOverLit,
-- Pattern Error messages that are also used elsewhere
checkTupSize, patSigErr
) where
-- ENH: thin imports to only what is necessary for patterns
import {-# SOURCE #-} ETA.Rename.RnExpr ( rnLExpr )
-- import {-# SOURCE #-} ETA.Rename.RnSplice ( rnSplicePat )
import ETA.Rename.RnSplice ( rnSplicePat )
-- import {-# SOURCE #-} ETA.TypeCheck.TcSplice ( runQuasiQuotePat )
import ETA.TypeCheck.TcSplice ( runQuasiQuotePat )
import ETA.HsSyn.HsSyn
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcHsSyn ( hsOverLitName )
import ETA.Rename.RnEnv
import ETA.Rename.RnTypes
import ETA.Main.DynFlags
import ETA.Prelude.PrelNames
import ETA.Types.TyCon ( tyConName )
import ETA.BasicTypes.ConLike
import ETA.BasicTypes.DataCon ( dataConTyCon )
import ETA.Types.TypeRep ( TyThing(..) )
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameSet
import ETA.BasicTypes.RdrName
import ETA.BasicTypes.BasicTypes
import ETA.Utils.Util
import ETA.Utils.ListSetOps ( removeDups )
import ETA.Utils.Outputable
import ETA.BasicTypes.SrcLoc
import ETA.Utils.FastString
import ETA.BasicTypes.Literal ( inCharRange )
import ETA.Prelude.TysWiredIn ( nilDataCon )
import ETA.BasicTypes.DataCon ( dataConName )
import Control.Monad ( when, liftM, ap )
import Data.Ratio
#include "HsVersions.h"
{-
*********************************************************
* *
The CpsRn Monad
* *
*********************************************************
Note [CpsRn monad]
~~~~~~~~~~~~~~~~~~
The CpsRn monad uses continuation-passing style to support this
style of programming:
do { ...
; ns <- bindNames rs
; ...blah... }
where rs::[RdrName], ns::[Name]
The idea is that '...blah...'
a) sees the bindings of ns
b) returns the free variables it mentions
so that bindNames can report unused ones
In particular,
mapM rnPatAndThen [p1, p2, p3]
has a *left-to-right* scoping: it makes the binders in
p1 scope over p2,p3.
-}
newtype CpsRn b = CpsRn { unCpsRn :: forall r. (b -> RnM (r, FreeVars))
-> RnM (r, FreeVars) }
-- See Note [CpsRn monad]
instance Functor CpsRn where
fmap = liftM
instance Applicative CpsRn where
pure = return
(<*>) = ap
instance Monad CpsRn where
return x = CpsRn (\k -> k x)
(CpsRn m) >>= mk = CpsRn (\k -> m (\v -> unCpsRn (mk v) k))
runCps :: CpsRn a -> RnM (a, FreeVars)
runCps (CpsRn m) = m (\r -> return (r, emptyFVs))
liftCps :: RnM a -> CpsRn a
liftCps rn_thing = CpsRn (\k -> rn_thing >>= k)
liftCpsFV :: RnM (a, FreeVars) -> CpsRn a
liftCpsFV rn_thing = CpsRn (\k -> do { (v,fvs1) <- rn_thing
; (r,fvs2) <- k v
; return (r, fvs1 `plusFV` fvs2) })
wrapSrcSpanCps :: (a -> CpsRn b) -> Located a -> CpsRn (Located b)
-- Set the location, and also wrap it around the value returned
wrapSrcSpanCps fn (L loc a)
= CpsRn (\k -> setSrcSpan loc $
unCpsRn (fn a) $ \v ->
k (L loc v))
lookupConCps :: Located RdrName -> CpsRn (Located Name)
lookupConCps con_rdr
= CpsRn (\k -> do { con_name <- lookupLocatedOccRn con_rdr
; (r, fvs) <- k con_name
; return (r, addOneFV fvs (unLoc con_name)) })
-- We add the constructor name to the free vars
-- See Note [Patterns are uses]
{-
Note [Patterns are uses]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module Foo( f, g ) where
data T = T1 | T2
f T1 = True
f T2 = False
g _ = T1
Arguably we should report T2 as unused, even though it appears in a
pattern, because it never occurs in a constructed position. See
Trac #7336.
However, implementing this in the face of pattern synonyms would be
less straightforward, since given two pattern synonyms
pattern P1 <- P2
pattern P2 <- ()
we need to observe the dependency between P1 and P2 so that type
checking can be done in the correct order (just like for value
bindings). Dependencies between bindings is analyzed in the renamer,
where we don't know yet whether P2 is a constructor or a pattern
synonym. So for now, we do report conid occurrences in patterns as
uses.
*********************************************************
* *
Name makers
* *
*********************************************************
Externally abstract type of name makers,
which is how you go from a RdrName to a Name
-}
data NameMaker
= LamMk -- Lambdas
Bool -- True <=> report unused bindings
-- (even if True, the warning only comes out
-- if -fwarn-unused-matches is on)
| LetMk -- Let bindings, incl top level
-- Do *not* check for unused bindings
TopLevelFlag
MiniFixityEnv
topRecNameMaker :: MiniFixityEnv -> NameMaker
topRecNameMaker fix_env = LetMk TopLevel fix_env
isTopRecNameMaker :: NameMaker -> Bool
isTopRecNameMaker (LetMk TopLevel _) = True
isTopRecNameMaker _ = False
localRecNameMaker :: MiniFixityEnv -> NameMaker
localRecNameMaker fix_env = LetMk NotTopLevel fix_env
matchNameMaker :: HsMatchContext a -> NameMaker
matchNameMaker ctxt = LamMk report_unused
where
-- Do not report unused names in interactive contexts
-- i.e. when you type 'x <- e' at the GHCi prompt
report_unused = case ctxt of
StmtCtxt GhciStmtCtxt -> False
-- also, don't warn in pattern quotes, as there
-- is no RHS where the variables can be used!
ThPatQuote -> False
_ -> True
rnHsSigCps :: HsWithBndrs RdrName (LHsType RdrName)
-> CpsRn (HsWithBndrs Name (LHsType Name))
rnHsSigCps sig
= CpsRn (rnHsBndrSig PatCtx sig)
newPatLName :: NameMaker -> Located RdrName -> CpsRn (Located Name)
newPatLName name_maker rdr_name@(L loc _)
= do { name <- newPatName name_maker rdr_name
; return (L loc name) }
newPatName :: NameMaker -> Located RdrName -> CpsRn Name
newPatName (LamMk report_unused) rdr_name
= CpsRn (\ thing_inside ->
do { name <- newLocalBndrRn rdr_name
; (res, fvs) <- bindLocalNames [name] (thing_inside name)
; when report_unused $ warnUnusedMatches [name] fvs
; return (res, name `delFV` fvs) })
newPatName (LetMk is_top fix_env) rdr_name
= CpsRn (\ thing_inside ->
do { name <- case is_top of
NotTopLevel -> newLocalBndrRn rdr_name
TopLevel -> newTopSrcBinder rdr_name
; bindLocalNames [name] $ -- Do *not* use bindLocalNameFV here
-- See Note [View pattern usage]
addLocalFixities fix_env [name] $
thing_inside name })
-- Note: the bindLocalNames is somewhat suspicious
-- because it binds a top-level name as a local name.
-- however, this binding seems to work, and it only exists for
-- the duration of the patterns and the continuation;
-- then the top-level name is added to the global env
-- before going on to the RHSes (see RnSource.lhs).
{-
Note [View pattern usage]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let (r, (r -> x)) = x in ...
Here the pattern binds 'r', and then uses it *only* in the view pattern.
We want to "see" this use, and in let-bindings we collect all uses and
report unused variables at the binding level. So we must use bindLocalNames
here, *not* bindLocalNameFV. Trac #3943.
*********************************************************
* *
External entry points
* *
*********************************************************
There are various entry points to renaming patterns, depending on
(1) whether the names created should be top-level names or local names
(2) whether the scope of the names is entirely given in a continuation
(e.g., in a case or lambda, but not in a let or at the top-level,
because of the way mutually recursive bindings are handled)
(3) whether the a type signature in the pattern can bind
lexically-scoped type variables (for unpacking existential
type vars in data constructors)
(4) whether we do duplicate and unused variable checking
(5) whether there are fixity declarations associated with the names
bound by the patterns that need to be brought into scope with them.
Rather than burdening the clients of this module with all of these choices,
we export the three points in this design space that we actually need:
-}
-- ----------- Entry point 1: rnPats -------------------
-- Binds local names; the scope of the bindings is entirely in the thing_inside
-- * allows type sigs to bind type vars
-- * local namemaker
-- * unused and duplicate checking
-- * no fixities
rnPats :: HsMatchContext Name -- for error messages
-> [LPat RdrName]
-> ([LPat Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnPats ctxt pats thing_inside
= do { envs_before <- getRdrEnvs
-- (1) rename the patterns, bringing into scope all of the term variables
-- (2) then do the thing inside.
; unCpsRn (rnLPatsAndThen (matchNameMaker ctxt) pats) $ \ pats' -> do
{ -- Check for duplicated and shadowed names
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- Because we don't bind the vars all at once, we can't
-- check incrementally for duplicates;
-- Nor can we check incrementally for shadowing, else we'll
-- complain *twice* about duplicates e.g. f (x,x) = ...
; addErrCtxt doc_pat $
checkDupAndShadowedNames envs_before $
collectPatsBinders pats'
; thing_inside pats' } }
where
doc_pat = ptext (sLit "In") <+> pprMatchContext ctxt
rnPat :: HsMatchContext Name -- for error messages
-> LPat RdrName
-> (LPat Name -> RnM (a, FreeVars))
-> RnM (a, FreeVars) -- Variables bound by pattern do not
-- appear in the result FreeVars
rnPat ctxt pat thing_inside
= rnPats ctxt [pat] (\pats' -> let [pat'] = pats' in thing_inside pat')
applyNameMaker :: NameMaker -> Located RdrName -> RnM (Located Name)
applyNameMaker mk rdr = do { (n, _fvs) <- runCps (newPatLName mk rdr)
; return n }
-- ----------- Entry point 2: rnBindPat -------------------
-- Binds local names; in a recursive scope that involves other bound vars
-- e.g let { (x, Just y) = e1; ... } in ...
-- * does NOT allows type sig to bind type vars
-- * local namemaker
-- * no unused and duplicate checking
-- * fixities might be coming in
rnBindPat :: NameMaker
-> LPat RdrName
-> RnM (LPat Name, FreeVars)
-- Returned FreeVars are the free variables of the pattern,
-- of course excluding variables bound by this pattern
rnBindPat name_maker pat = runCps (rnLPatAndThen name_maker pat)
{-
*********************************************************
* *
The main event
* *
*********************************************************
-}
-- ----------- Entry point 3: rnLPatAndThen -------------------
-- General version: parametrized by how you make new names
rnLPatsAndThen :: NameMaker -> [LPat RdrName] -> CpsRn [LPat Name]
rnLPatsAndThen mk = mapM (rnLPatAndThen mk)
-- Despite the map, the monad ensures that each pattern binds
-- variables that may be mentioned in subsequent patterns in the list
--------------------
-- The workhorse
rnLPatAndThen :: NameMaker -> LPat RdrName -> CpsRn (LPat Name)
rnLPatAndThen nm lpat = wrapSrcSpanCps (rnPatAndThen nm) lpat
rnPatAndThen :: NameMaker -> Pat RdrName -> CpsRn (Pat Name)
rnPatAndThen _ (WildPat _) = return (WildPat placeHolderType)
rnPatAndThen mk (ParPat pat) = do { pat' <- rnLPatAndThen mk pat; return (ParPat pat') }
rnPatAndThen mk (LazyPat pat) = do { pat' <- rnLPatAndThen mk pat; return (LazyPat pat') }
rnPatAndThen mk (BangPat pat) = do { pat' <- rnLPatAndThen mk pat; return (BangPat pat') }
rnPatAndThen mk (VarPat rdr) = do { loc <- liftCps getSrcSpanM
; name <- newPatName mk (L loc rdr)
; return (VarPat name) }
-- we need to bind pattern variables for view pattern expressions
-- (e.g. in the pattern (x, x -> y) x needs to be bound in the rhs of the tuple)
rnPatAndThen mk (SigPatIn pat sig)
-- When renaming a pattern type signature (e.g. f (a :: T) = ...), it is
-- important to rename its type signature _before_ renaming the rest of the
-- pattern, so that type variables are first bound by the _outermost_ pattern
-- type signature they occur in. This keeps the type checker happy when
-- pattern type signatures happen to be nested (#7827)
--
-- f ((Just (x :: a) :: Maybe a)
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~^ `a' is first bound here
-- ~~~~~~~~~~~~~~~^ the same `a' then used here
= do { sig' <- rnHsSigCps sig
; pat' <- rnLPatAndThen mk pat
; return (SigPatIn pat' sig') }
rnPatAndThen mk (LitPat lit)
| HsString src s <- lit
= do { ovlStr <- liftCps (xoptM Opt_OverloadedStrings)
; if ovlStr
then rnPatAndThen mk
(mkNPat (noLoc (mkHsIsString src s placeHolderType))
Nothing)
else normal_lit }
| otherwise = normal_lit
where
normal_lit = do { liftCps (rnLit lit); return (LitPat lit) }
rnPatAndThen _ (NPat (L l lit) mb_neg _eq)
= do { lit' <- liftCpsFV $ rnOverLit lit
; mb_neg' <- liftCpsFV $ case mb_neg of
Nothing -> return (Nothing, emptyFVs)
Just _ -> do { (neg, fvs) <- lookupSyntaxName negateName
; return (Just neg, fvs) }
; eq' <- liftCpsFV $ lookupSyntaxName eqName
; return (NPat (L l lit') mb_neg' eq') }
rnPatAndThen mk (NPlusKPat rdr (L l lit) _ _)
= do { new_name <- newPatName mk rdr
; lit' <- liftCpsFV $ rnOverLit lit
; minus <- liftCpsFV $ lookupSyntaxName minusName
; ge <- liftCpsFV $ lookupSyntaxName geName
; return (NPlusKPat (L (nameSrcSpan new_name) new_name)
(L l lit') ge minus) }
-- The Report says that n+k patterns must be in Integral
rnPatAndThen mk (AsPat rdr pat)
= do { new_name <- newPatLName mk rdr
; pat' <- rnLPatAndThen mk pat
; return (AsPat new_name pat') }
rnPatAndThen mk p@(ViewPat expr pat _ty)
= do { liftCps $ do { vp_flag <- xoptM Opt_ViewPatterns
; checkErr vp_flag (badViewPat p) }
-- Because of the way we're arranging the recursive calls,
-- this will be in the right context
; expr' <- liftCpsFV $ rnLExpr expr
; pat' <- rnLPatAndThen mk pat
-- Note: at this point the PreTcType in ty can only be a placeHolder
-- ; return (ViewPat expr' pat' ty) }
; return (ViewPat expr' pat' placeHolderType) }
rnPatAndThen mk (ConPatIn con stuff)
-- rnConPatAndThen takes care of reconstructing the pattern
-- The pattern for the empty list needs to be replaced by an empty explicit list pattern when overloaded lists is turned on.
= case unLoc con == nameRdrName (dataConName nilDataCon) of
True -> do { ol_flag <- liftCps $ xoptM Opt_OverloadedLists
; if ol_flag then rnPatAndThen mk (ListPat [] placeHolderType Nothing)
else rnConPatAndThen mk con stuff}
False -> rnConPatAndThen mk con stuff
rnPatAndThen mk (ListPat pats _ _)
= do { opt_OverloadedLists <- liftCps $ xoptM Opt_OverloadedLists
; pats' <- rnLPatsAndThen mk pats
; case opt_OverloadedLists of
True -> do { (to_list_name,_) <- liftCps $ lookupSyntaxName toListName
; return (ListPat pats' placeHolderType
(Just (placeHolderType, to_list_name)))}
False -> return (ListPat pats' placeHolderType Nothing) }
rnPatAndThen mk (PArrPat pats _)
= do { pats' <- rnLPatsAndThen mk pats
; return (PArrPat pats' placeHolderType) }
rnPatAndThen mk (TuplePat pats boxed _)
= do { liftCps $ checkTupSize (length pats)
; pats' <- rnLPatsAndThen mk pats
; return (TuplePat pats' boxed []) }
rnPatAndThen mk (SplicePat splice)
= do { eith <- liftCpsFV $ rnSplicePat splice
; case eith of -- See Note [rnSplicePat] in RnSplice
Left not_yet_renamed -> rnPatAndThen mk not_yet_renamed
Right already_renamed -> return already_renamed }
rnPatAndThen mk (QuasiQuotePat qq)
= do { pat <- liftCps $ runQuasiQuotePat qq
-- Wrap the result of the quasi-quoter in parens so that we don't
-- lose the outermost location set by runQuasiQuote (#7918)
; rnPatAndThen mk (ParPat pat) }
rnPatAndThen _ pat = pprPanic "rnLPatAndThen" (ppr pat)
--------------------
rnConPatAndThen :: NameMaker
-> Located RdrName -- the constructor
-> HsConPatDetails RdrName
-> CpsRn (Pat Name)
rnConPatAndThen mk con (PrefixCon pats)
= do { con' <- lookupConCps con
; pats' <- rnLPatsAndThen mk pats
; return (ConPatIn con' (PrefixCon pats')) }
rnConPatAndThen mk con (InfixCon pat1 pat2)
= do { con' <- lookupConCps con
; pat1' <- rnLPatAndThen mk pat1
; pat2' <- rnLPatAndThen mk pat2
; fixity <- liftCps $ lookupFixityRn (unLoc con')
; liftCps $ mkConOpPatRn con' fixity pat1' pat2' }
rnConPatAndThen mk con (RecCon rpats)
= do { con' <- lookupConCps con
; rpats' <- rnHsRecPatsAndThen mk con' rpats
; return (ConPatIn con' (RecCon rpats')) }
--------------------
rnHsRecPatsAndThen :: NameMaker
-> Located Name -- Constructor
-> HsRecFields RdrName (LPat RdrName)
-> CpsRn (HsRecFields Name (LPat Name))
rnHsRecPatsAndThen mk (L _ con) hs_rec_fields@(HsRecFields { rec_dotdot = dd })
= do { flds <- liftCpsFV $ rnHsRecFields (HsRecFieldPat con) VarPat hs_rec_fields
; flds' <- mapM rn_field (flds `zip` [1..])
; return (HsRecFields { rec_flds = flds', rec_dotdot = dd }) }
where
rn_field (L l fld, n') = do { arg' <- rnLPatAndThen (nested_mk dd mk n')
(hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' })) }
-- Suppress unused-match reporting for fields introduced by ".."
nested_mk Nothing mk _ = mk
nested_mk (Just _) mk@(LetMk {}) _ = mk
nested_mk (Just n) (LamMk report_unused) n' = LamMk (report_unused && (n' <= n))
{-
************************************************************************
* *
Record fields
* *
************************************************************************
-}
data HsRecFieldContext
= HsRecFieldCon Name
| HsRecFieldPat Name
| HsRecFieldUpd
rnHsRecFields
:: forall arg.
HsRecFieldContext
-> (RdrName -> arg) -- When punning, use this to build a new field
-> HsRecFields RdrName (Located arg)
-> RnM ([LHsRecField Name (Located arg)], FreeVars)
-- This surprisingly complicated pass
-- a) looks up the field name (possibly using disambiguation)
-- b) fills in puns and dot-dot stuff
-- When we we've finished, we've renamed the LHS, but not the RHS,
-- of each x=e binding
rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
= do { pun_ok <- xoptM Opt_RecordPuns
; disambig_ok <- xoptM Opt_DisambiguateRecordFields
; parent <- check_disambiguation disambig_ok mb_con
; flds1 <- mapM (rn_fld pun_ok parent) flds
; mapM_ (addErr . dupFieldErr ctxt) dup_flds
; dotdot_flds <- rn_dotdot dotdot mb_con flds1
-- Check for an empty record update e {}
-- NB: don't complain about e { .. }, because rn_dotdot has done that already
; case ctxt of
HsRecFieldUpd | Nothing <- dotdot
, null flds
-> addErr emptyUpdateErr
_ -> return ()
; let all_flds | null dotdot_flds = flds1
| otherwise = flds1 ++ dotdot_flds
; return (all_flds, mkFVs (getFieldIds all_flds)) }
where
mb_con = case ctxt of
HsRecFieldCon con | not (isUnboundName con) -> Just con
HsRecFieldPat con | not (isUnboundName con) -> Just con
_ {- update or isUnboundName con -} -> Nothing
-- The unbound name test is because if the constructor
-- isn't in scope the constructor lookup will add an error
-- add an error, but still return an unbound name.
-- We don't want that to screw up the dot-dot fill-in stuff.
doc = case mb_con of
Nothing -> ptext (sLit "constructor field name")
Just con -> ptext (sLit "field of constructor") <+> quotes (ppr con)
rn_fld pun_ok parent (L l (HsRecField { hsRecFieldId = fld
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { fld'@(L loc fld_nm) <- wrapLocM (lookupSubBndrOcc True parent doc) fld
; arg' <- if pun
then do { checkErr pun_ok (badPun fld)
; return (L loc (mk_arg (mkRdrUnqual (nameOccName fld_nm)))) }
else return arg
; return (L l (HsRecField { hsRecFieldId = fld'
, hsRecFieldArg = arg'
, hsRecPun = pun })) }
rn_dotdot :: Maybe Int -- See Note [DotDot fields] in HsPat
-> Maybe Name -- The constructor (Nothing for an update
-- or out of scope constructor)
-> [LHsRecField Name (Located arg)] -- Explicit fields
-> RnM [LHsRecField Name (Located arg)] -- Filled in .. fields
rn_dotdot Nothing _mb_con _flds -- No ".." at all
= return []
rn_dotdot (Just {}) Nothing _flds -- ".." on record update
= do { case ctxt of
HsRecFieldUpd -> addErr badDotDotUpd
_ -> return ()
; return [] }
rn_dotdot (Just n) (Just con) flds -- ".." on record construction / pat match
= ASSERT( n == length flds )
do { loc <- getSrcSpanM -- Rather approximate
; dd_flag <- xoptM Opt_RecordWildCards
; checkErr dd_flag (needFlagDotDot ctxt)
; (rdr_env, lcl_env) <- getRdrEnvs
; con_fields <- lookupConstructorFields con
; when (null con_fields) (addErr (badDotDotCon con))
; let present_flds = getFieldIds flds
parent_tc = find_tycon rdr_env con
-- For constructor uses (but not patterns)
-- the arg should be in scope (unqualified)
-- ignoring the record field itself
-- Eg. data R = R { x,y :: Int }
-- f x = R { .. } -- Should expand to R {x=x}, not R{x=x,y=y}
arg_in_scope fld
= rdr `elemLocalRdrEnv` lcl_env
|| notNull [ gre | gre <- lookupGRE_RdrName rdr rdr_env
, case gre_par gre of
ParentIs p -> p /= parent_tc
_ -> True ]
where
rdr = mkRdrUnqual (nameOccName fld)
dot_dot_gres = [ head gres
| fld <- con_fields
, not (fld `elem` present_flds)
, let gres = lookupGRE_Name rdr_env fld
, not (null gres) -- Check field is in scope
, case ctxt of
HsRecFieldCon {} -> arg_in_scope fld
_other -> True ]
; addUsedRdrNames (map greRdrName dot_dot_gres)
; return [ L loc (HsRecField
{ hsRecFieldId = L loc fld
, hsRecFieldArg = L loc (mk_arg arg_rdr)
, hsRecPun = False })
| gre <- dot_dot_gres
, let fld = gre_name gre
arg_rdr = mkRdrUnqual (nameOccName fld) ] }
check_disambiguation :: Bool -> Maybe Name -> RnM Parent
-- When disambiguation is on,
check_disambiguation disambig_ok mb_con
| disambig_ok, Just con <- mb_con
= do { env <- getGlobalRdrEnv; return (ParentIs (find_tycon env con)) }
| otherwise = return NoParent
find_tycon :: GlobalRdrEnv -> Name {- DataCon -} -> Name {- TyCon -}
-- Return the parent *type constructor* of the data constructor
-- That is, the parent of the data constructor.
-- That's the parent to use for looking up record fields.
find_tycon env con
| Just (AConLike (RealDataCon dc)) <- wiredInNameTyThing_maybe con
= tyConName (dataConTyCon dc) -- Special case for [], which is built-in syntax
-- and not in the GlobalRdrEnv (Trac #8448)
| [GRE { gre_par = ParentIs p }] <- lookupGRE_Name env con
= p
| otherwise
= pprPanic "find_tycon" (ppr con $$ ppr (lookupGRE_Name env con))
dup_flds :: [[RdrName]]
-- Each list represents a RdrName that occurred more than once
-- (the list contains all occurrences)
-- Each list in dup_fields is non-empty
(_, dup_flds) = removeDups compare (getFieldIds flds)
getFieldIds :: [LHsRecField id arg] -> [id]
getFieldIds flds = map (unLoc . hsRecFieldId . unLoc) flds
needFlagDotDot :: HsRecFieldContext -> SDoc
needFlagDotDot ctxt = vcat [ptext (sLit "Illegal `..' in record") <+> pprRFC ctxt,
ptext (sLit "Use RecordWildCards to permit this")]
badDotDotCon :: Name -> SDoc
badDotDotCon con
= vcat [ ptext (sLit "Illegal `..' notation for constructor") <+> quotes (ppr con)
, nest 2 (ptext (sLit "The constructor has no labelled fields")) ]
badDotDotUpd :: SDoc
badDotDotUpd = ptext (sLit "You cannot use `..' in a record update")
emptyUpdateErr :: SDoc
emptyUpdateErr = ptext (sLit "Empty record update")
badPun :: Located RdrName -> SDoc
badPun fld = vcat [ptext (sLit "Illegal use of punning for field") <+> quotes (ppr fld),
ptext (sLit "Use NamedFieldPuns to permit this")]
dupFieldErr :: HsRecFieldContext -> [RdrName] -> SDoc
dupFieldErr ctxt dups
= hsep [ptext (sLit "duplicate field name"),
quotes (ppr (head dups)),
ptext (sLit "in record"), pprRFC ctxt]
pprRFC :: HsRecFieldContext -> SDoc
pprRFC (HsRecFieldCon {}) = ptext (sLit "construction")
pprRFC (HsRecFieldPat {}) = ptext (sLit "pattern")
pprRFC (HsRecFieldUpd {}) = ptext (sLit "update")
{-
************************************************************************
* *
\subsubsection{Literals}
* *
************************************************************************
When literals occur we have to make sure
that the types and classes they involve
are made available.
-}
rnLit :: HsLit -> RnM ()
rnLit (HsChar _ c) = checkErr (inCharRange c) (bogusCharError c)
rnLit _ = return ()
-- Turn a Fractional-looking literal which happens to be an integer into an
-- Integer-looking literal.
generalizeOverLitVal :: OverLitVal -> OverLitVal
generalizeOverLitVal (HsFractional (FL {fl_text=src,fl_value=val}))
| denominator val == 1 = HsIntegral src (numerator val)
generalizeOverLitVal lit = lit
rnOverLit :: HsOverLit t -> RnM (HsOverLit Name, FreeVars)
rnOverLit origLit
= do { opt_NumDecimals <- xoptM Opt_NumDecimals
; let { lit@(OverLit {ol_val=val})
| opt_NumDecimals = origLit {ol_val = generalizeOverLitVal (ol_val origLit)}
| otherwise = origLit
}
; let std_name = hsOverLitName val
; (from_thing_name, fvs) <- lookupSyntaxName std_name
; let rebindable = case from_thing_name of
HsVar v -> v /= std_name
_ -> panic "rnOverLit"
; return (lit { ol_witness = from_thing_name
, ol_rebindable = rebindable
, ol_type = placeHolderType }, fvs) }
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
patSigErr :: Outputable a => a -> SDoc
patSigErr ty
= (ptext (sLit "Illegal signature in pattern:") <+> ppr ty)
$$ nest 4 (ptext (sLit "Use ScopedTypeVariables to permit it"))
bogusCharError :: Char -> SDoc
bogusCharError c
= ptext (sLit "character literal out of range: '\\") <> char c <> char '\''
badViewPat :: Pat RdrName -> SDoc
badViewPat pat = vcat [ptext (sLit "Illegal view pattern: ") <+> ppr pat,
ptext (sLit "Use ViewPatterns to enable view patterns")]
|
pparkkin/eta
|
compiler/ETA/Rename/RnPat.hs
|
bsd-3-clause
| 31,249 | 9 | 21 | 9,775 | 6,202 | 3,212 | 2,990 | -1 | -1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"../dtd/helpset_2_0.dtd">
<helpset version="1.0">
<!-- title -->
<title>LightZone - Help</title>
<!-- maps -->
<maps>
<homeID>top</homeID>
<mapref location="English/Map.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>Table Of Contents</label>
<type>javax.help.TOCView</type>
<data>English/LightZoneTOC.xml</data>
</view>
<!--
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>English/LightZoneIndex.xml</data>
</view>
-->
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
English/JavaHelpSearch
</data>
</view>
<!--
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
-->
<presentation default="true" displayviewimages="false">
<name>main window</name>
<size width="700" height="400" />
<location x="200" y="200" />
<title>LightZone - Online Help</title>
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.HomeAction</helpaction>
<helpaction>javax.help.ReloadAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.PrintAction</helpaction>
<helpaction>javax.help.PrintSetupAction</helpaction>
</toolbar>
</presentation>
<presentation>
<name>main</name>
<size width="400" height="400" />
<location x="200" y="200" />
<title>LightZone - Online Help</title>
</presentation>
</helpset>
|
MarinnaCole/LightZone
|
linux/help/LightZone.hs
|
bsd-3-clause
| 1,928 | 150 | 118 | 357 | 794 | 403 | 391 | -1 | -1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Web.Offset.Types (
attrToTaxSpecList
, CacheResult(..)
, CatType
, decodeWPResponseBody
, Filter(..)
, Requester(..)
, H.ResponseHeaders
, StatusCode
, TagType
, TaxDict(..)
, TaxonomyName
, TaxRes(..)
, TaxSpec(..)
, TaxSpecId(..)
, TaxSpecList(..)
, UserPassword
, Wordpress(..)
, WordpressConfig(..)
, WordpressInt(..)
, WPKey(..)
, WPLens
, WPOrdering(..)
, WPPostStatus(..)
, WPQuery(..)
, WPResponse(..)
) where
import qualified Control.Concurrent.MVar as M
import Control.Lens hiding (children)
import Control.Monad.State
import Data.Aeson (FromJSON, Value (..), parseJSON, (.:), (.:?), (.!=), ToJSON(..))
import Data.Default
import qualified Data.ByteString as BS
import GHC.Generics
import qualified Data.CaseInsensitive as CI
import Data.Char (toUpper)
import Data.IntSet (IntSet)
import Data.List (intercalate)
import qualified Data.Map as M
import Data.Maybe (catMaybes, isJust)
import Data.Monoid ((<>))
import qualified Data.Semigroup as SG
import Data.Set (Set)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time.Clock (UTCTime)
import qualified Network.HTTP.Types.Header as H
import Web.Offset.Cache.Types
import Web.Offset.Field
import Web.Offset.Utils
data Wordpress b =
Wordpress { wpExpireAggregates :: IO Bool
, wpExpirePost :: WPKey -> IO Bool
, cachingGet :: WPKey -> IO (CacheResult WPResponse)
, cachingGetRetry :: WPKey -> IO (Either StatusCode WPResponse)
, cachingGetError :: WPKey -> IO (Either StatusCode WPResponse)
, wpLogger :: Text -> IO ()
, cacheInternals :: WordpressInt (StateT b IO Text)
}
type WPLens b s = Lens' s (Wordpress b)
type UserPassword = (Text, Text)
data WPResponse = WPResponse { wpHeaders :: H.ResponseHeaders
, wpBody :: Text } deriving (Eq, Show, Generic)
decodeWPResponseBody :: FromJSON a => WPResponse -> Maybe a
decodeWPResponseBody (WPResponse _ body) = decodeJson body
instance ToJSON WPResponse
instance FromJSON WPResponse where
parseJSON (Object v) = do
headers <- v .:? "wpHeaders" .!= mempty
body <- v .:? "wpBody" .!= encode v
return $ WPResponse headers body
parseJSON v = WPResponse
<$> (return mempty)
<*> return (encode v)
instance ToJSON (CI.CI BS.ByteString) where
toJSON str = toJSON $ T.toLower $ T.decodeUtf8 $ CI.original str
instance FromJSON (CI.CI BS.ByteString) where
parseJSON (String str) = return $ CI.mk $ T.encodeUtf8 str
parseJSON _ = mzero
instance ToJSON BS.ByteString where
toJSON str = toJSON $ T.decodeUtf8 str
instance FromJSON BS.ByteString where
parseJSON (String str) = return $ T.encodeUtf8 str
newtype Headers = Headers { unHeaders :: M.Map Text Text} deriving (Show, Eq, Generic)
instance SG.Semigroup Headers where
(Headers h1) <> (Headers h2) = Headers (h1 SG.<> h2)
instance Monoid Headers where
mempty = Headers mempty
mappend h1 h2 = h1 SG.<> h2
instance ToJSON Headers
newtype Requester = Requester { unRequester :: Text
-> [(Text, Text)]
-> IO (Either Int WPResponse) }
data WordpressConfig m =
WordpressConfig { wpConfEndpoint :: Text
, wpConfRequester :: Either UserPassword Requester
, wpConfCacheBehavior :: CacheBehavior
, wpConfExtraFields :: [Field m]
, wpConfLogger :: Maybe (Text -> IO ())
}
instance Default (WordpressConfig m) where
def = WordpressConfig "http://127.0.0.1:8080/wp-json"
(Left ("offset", "111"))
(CacheSeconds 600)
[]
Nothing
data WordpressInt b =
WordpressInt { wpCacheGet :: WPKey -> IO (Maybe Text)
, wpCacheSet :: WPKey -> Text -> IO ()
, startReqMutex :: WPKey -> IO Bool
, wpRequest :: WPKey -> IO (Either StatusCode WPResponse)
, stopReqMutex :: WPKey -> IO ()
, runRedis :: RunRedis
}
data TaxSpec = TaxPlus Text | TaxMinus Text deriving (Eq, Ord)
data TaxSpecId = TaxPlusId Int | TaxMinusId Int deriving (Eq, Show, Ord)
data CatType
data TagType
type CustomType = Text
instance Show TaxSpec where
show (TaxPlus t) = '+' : T.unpack t
show (TaxMinus t) = '-' : T.unpack t
newtype TaxRes = TaxRes (Int, Text) deriving (Show)
instance FromJSON TaxRes where
parseJSON (Object o) = TaxRes <$> ((,) <$> o .: "id" <*> o .: "slug")
parseJSON _ = mzero
data TaxDict = TaxDict { dict :: [TaxRes]
, desc :: Text} deriving (Show)
type Year = Text
type Month = Text
type Slug = Text
type TaxonomyName = Text
data Filter = TaxFilter TaxonomyName TaxSpecId
| NumFilter Int
| OffsetFilter Int
| OrderFilter WPOrdering
| OrderByFilter Text
| PageFilter Int
| SearchFilter Text
| BeforeFilter UTCTime
| AfterFilter UTCTime
| StatusFilter WPPostStatus
| StickyFilter Bool
| UserFilter Text
deriving (Eq, Ord)
instance Show Filter where
show (TaxFilter n t) = show n ++ "_" ++ show t
show (NumFilter n) = "num_" ++ show n
show (OffsetFilter n) = "offset_" ++ show n
show (OrderFilter ordering) = "order_" ++ show ordering
show (OrderByFilter orderby) = "orderby_" ++ T.unpack orderby
show (PageFilter n) = "page_" ++ show n
show (SearchFilter search) = "search_" ++ T.unpack search
show (BeforeFilter before) = "before_" ++ show before
show (AfterFilter after) = "after_" ++ show after
show (StatusFilter status) = "status_" ++ show status
show (StickyFilter sticky) = "sticky_" ++ show sticky
show (UserFilter u) = T.unpack $ "user_" <> u
data WPKey = PostKey Int
| PostByPermalinkKey Year Month Slug
| PostsKey (Set Filter)
| PageKey Text
| AuthorKey Int
| TaxDictKey Text
| TaxSlugKey TaxonomyName Slug
| EndpointKey Text [(Text, Text)]
deriving (Eq, Show, Ord)
tagChars :: String
tagChars = ['a'..'z'] ++ "-" ++ digitChars
digitChars :: String
digitChars = ['0'..'9']
instance Read TaxSpec where
readsPrec _ ('+':cs) | not (null cs) && all (`elem` tagChars) cs = [(TaxPlus (T.pack cs), "")]
readsPrec _ ('-':cs) | not (null cs) && all (`elem` tagChars) cs = [(TaxMinus (T.pack cs), "")]
readsPrec _ cs | not (null cs) && all (`elem` tagChars) cs = [(TaxPlus (T.pack cs), "")]
readsPrec _ _ = []
instance Read TaxSpecId where
readsPrec _ ('+':cs) | not (null cs) && all (`elem` digitChars) cs = [(TaxPlusId (read cs), "")]
readsPrec _ ('-':cs) | not (null cs) && all (`elem` digitChars) cs = [(TaxMinusId (read cs), "")]
readsPrec _ cs | not (null cs) && all (`elem` digitChars) cs = [(TaxPlusId (read cs), "")]
readsPrec _ _ = []
data TaxSpecList = TaxSpecList { taxName :: TaxonomyName
, taxList :: [TaxSpec]} deriving (Eq, Ord)
instance Show TaxSpecList where
show (TaxSpecList n ts) = T.unpack n ++ ": " ++ intercalate "," (map show ts)
attrToTaxSpecList :: (Text, Text) -> TaxSpecList
attrToTaxSpecList (k, ts) =
let vs = map readSafe $ T.splitOn "," ts in
if all isJust vs
then TaxSpecList k (catMaybes vs)
else TaxSpecList k []
data WPQuery = WPPostsQuery{ qlimit :: Maybe Int
, qnum :: Maybe Int
, qoffset :: Maybe Int
, qpage :: Maybe Int
, qorder :: Maybe WPOrdering
, qorderby :: Maybe Text
, qsearch :: Maybe Text
, qbefore :: Maybe UTCTime
, qafter :: Maybe UTCTime
, qstatus :: Maybe WPPostStatus
, qsticky :: Maybe Bool
, quser :: Maybe Text
, qtaxes :: [TaxSpecList]
} deriving (Show)
data WPOrdering = Asc | Desc deriving (Eq, Show, Read, Ord)
data WPPostStatus = Publish | Future | Draft | Pending | Private deriving (Eq, Show, Read, Ord)
type StatusCode = Int
data CacheResult a = Successful a -- cache worked as expected
| Retry -- cache didn't work, but keep trying
| Abort StatusCode -- we got a 404 or something, no need to retry
deriving (Show, Functor)
|
dbp/snaplet-wordpress
|
src/Web/Offset/Types.hs
|
bsd-3-clause
| 9,587 | 0 | 13 | 3,134 | 2,838 | 1,572 | 1,266 | -1 | -1 |
{-# OPTIONS_GHC -fglasgow-exts #-} -- fix this later
{-# LANGUAGE FlexibleInstances, PatternGuards #-}
-- | This marvellous module contributed by Thomas J\344ger
module Plugin.Pl.RuleLib
( -- Using rules
RewriteRule(..), fire
, -- Defining rules
rr,rr0,rr1,rr2,up,down
) where
import Plugin.Pl.Common
import Plugin.Pl.Names
import Data.Array
import qualified Data.Set as S
import Control.Monad.Fix (fix)
-- Next time I do somthing like this, I'll actually think about the combinator
-- language before, instead of producing something ad-hoc like this:
data RewriteRule
= RR Rewrite Rewrite -- ^ A 'Rewrite' rule, rewrite the first to the second
-- 'Rewrite's can contain 'Hole's
| CRR (Expr -> Maybe Expr) -- ^ Haskell function as a rule, applied to subexpressions
| Down RewriteRule RewriteRule -- ^ Like Up, but applied to subexpressions
| Up RewriteRule RewriteRule -- ^ Apply the first rule, then try the second rule on the first result
-- if it fails, returns the result of the first rule
| Or [RewriteRule] -- ^ Use all rules
| OrElse RewriteRule RewriteRule -- ^ Try the first rule, if it fails use the second rule
| Then RewriteRule RewriteRule -- ^ Apply the first rule, apply the second rule to the result
| Opt RewriteRule -- ^ Optionally apply the rewrite rule, Opt x == Or [identity,x]
| If RewriteRule RewriteRule -- ^ Apply the second rule only if the first rule has some results
| Hard RewriteRule -- ^ Apply the rule only in the first pass
-- | An expression with holes to match or replace
data Rewrite = Rewrite {
holes :: MExpr, -- ^ Expression with holes
rid :: Int -- ^ Number of holes
}
-- What are you gonna do when no recursive modules are possible?
class RewriteC a where
getRewrite :: a -> Rewrite
instance RewriteC MExpr where
getRewrite rule = Rewrite {
holes = rule,
rid = 0
}
-- lift functions to rewrite rules
instance RewriteC a => RewriteC (MExpr -> a) where
getRewrite rule = Rewrite {
holes = holes . getRewrite . rule . Hole $ pid,
rid = pid + 1
} where
pid = rid $ getRewrite (undefined :: a)
----------------------------------------------------------------------------------------
-- Applying/matching Rewrites
type ExprArr = Array Int Expr
-- | Fill in the holes in a 'MExpr'
myFire :: ExprArr -> MExpr -> MExpr
myFire xs (MApp e1 e2) = MApp (myFire xs e1) (myFire xs e2)
myFire xs (Hole h) = Quote $ xs ! h
myFire _ me = me
nub' :: Ord a => [a] -> [a]
nub' = S.toList . S.fromList
-- | Create an array, only if the keys in 'lst' are unique and all keys [0..n-1] are given
uniqueArray :: Ord v => Int -> [(Int, v)] -> Maybe (Array Int v)
uniqueArray n lst
| length (nub' lst) == n = Just $ array (0,n-1) lst
| otherwise = Nothing
-- | Try to match a Rewrite to an expression,
-- if there is a match, returns the expressions in the holes
match :: Rewrite -> Expr -> Maybe ExprArr
match (Rewrite hl rid') e = uniqueArray rid' =<< matchWith hl e
-- | Fill in the holes in a 'Rewrite'
fire' :: Rewrite -> ExprArr -> MExpr
fire' (Rewrite hl _) = (`myFire` hl)
fire :: Rewrite -> Rewrite -> Expr -> Maybe Expr
fire r1 r2 e = (fromMExpr . fire' r2) `fmap` match r1 e
-- | Match an Expr to a MExpr template, return the values used in the holes
matchWith :: MExpr -> Expr -> Maybe [(Int, Expr)]
matchWith (MApp e1 e2) (App e1' e2') =
liftM2 (++) (matchWith e1 e1') (matchWith e2 e2')
matchWith (Quote e) e' = if e == e' then Just [] else Nothing
matchWith (Hole k) e = Just [(k,e)]
matchWith _ _ = Nothing
fromMExpr :: MExpr -> Expr
fromMExpr (MApp e1 e2) = App (fromMExpr e1) (fromMExpr e2)
fromMExpr (Hole _) = Var Pref "Hole" -- error "Hole in MExpr"
fromMExpr (Quote e) = e
----------------------------------------------------------------------------------------
-- Difining rules
-- | Yet another pointless transformation:
-- Bring an MExpr to (more pointless) form by seeing it as a function
-- \hole_n -> ...
-- and writing that in pointless form
transformM :: Int -> MExpr -> MExpr
transformM _ (Quote e) = constE `a` Quote e
transformM n (Hole n') = if n == n' then idE else constE `a` Hole n'
transformM n (Quote (Var _ ".") `MApp` e1 `MApp` e2)
| e1 `hasHole` n && not (e2 `hasHole` n)
= flipE `a` compE `a` e2 `c` transformM n e1
transformM n e@(MApp e1 e2)
| fr1 && fr2 = sE `a` transformM n e1 `a` transformM n e2
| fr1 = flipE `a` transformM n e1 `a` e2
| fr2, Hole n' <- e2, n' == n = e1
| fr2 = e1 `c` transformM n e2
| otherwise = constE `a` e
where
fr1 = e1 `hasHole` n
fr2 = e2 `hasHole` n
-- | Is there a (Hole n) in an expression?
hasHole :: MExpr -> Int -> Bool
hasHole (MApp e1 e2) n = e1 `hasHole` n || e2 `hasHole` n
hasHole (Quote _) _ = False
hasHole (Hole n') n = n == n'
-- | Variants of a rewrite rule: fill in (some of) the holes
--
-- haddock doesn't like n+k patterns, so rewrite them
--
getVariants, getVariants' :: Rewrite -> [Rewrite]
getVariants' r@(Rewrite _ 0) = [r]
getVariants' r@(Rewrite e nk)
| nk >= 1 = r : getVariants (Rewrite e' (nk-1))
| otherwise = error "getVariants' : nk went negative"
where
e' = decHoles $ transformM 0 e
-- decrement all hole numbers
decHoles (Hole n') = Hole (n'-1)
decHoles (MApp e1 e2) = decHoles e1 `MApp` decHoles e2
decHoles me = me
getVariants = getVariants' -- r = trace (show vs) vs where vs = getVariants' r
-- | Use this rewrite rule and rewrite rules derived from it by iterated
-- pointless transformation
rrList :: RewriteC a => a -> a -> [RewriteRule]
rrList r1 r2 = zipWith RR (getVariants r1') (getVariants r2') where
r1' = getRewrite r1
r2' = getRewrite r2
-- | Construct a 'RR' rewrite rule
rr, rr0, rr1, rr2 :: RewriteC a => a -> a -> RewriteRule
rr r1 r2 = Or $ rrList r1 r2
rr1 r1 r2 = Or . take 2 $ rrList r1 r2
rr2 r1 r2 = Or . take 3 $ rrList r1 r2
-- use only this rewrite rule, no variants
rr0 r1 r2 = RR r1' r2' where
r1' = getRewrite r1
r2' = getRewrite r2
-- | Apply Down/Up repeatedly
down, up :: RewriteRule -> RewriteRule
down = fix . Down
up = fix . Up
|
zeekay/lambdabot
|
Plugin/Pl/RuleLib.hs
|
mit
| 6,364 | 0 | 11 | 1,596 | 1,762 | 958 | 804 | 105 | 3 |
{-
Copyright (C) 2006-7 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.RST
Copyright : Copyright (C) 2006-7 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to reStructuredText.
reStructuredText: <http://docutils.sourceforge.net/rst.html>
-}
module Text.Pandoc.Writers.RST ( writeRST) where
import Text.Pandoc.Definition
import Text.Pandoc.Shared
import Text.Pandoc.Blocks
import Text.Pandoc.Templates (renderTemplate)
import Data.List ( isPrefixOf, isSuffixOf, intersperse, transpose )
import Text.PrettyPrint.HughesPJ hiding ( Str )
import Control.Monad.State
import Control.Applicative ( (<$>) )
data WriterState =
WriterState { stNotes :: [[Block]]
, stLinks :: KeyTable
, stImages :: KeyTable
, stHasMath :: Bool
, stOptions :: WriterOptions
}
-- | Convert Pandoc to RST.
writeRST :: WriterOptions -> Pandoc -> String
writeRST opts document =
let st = WriterState { stNotes = [], stLinks = [],
stImages = [], stHasMath = False,
stOptions = opts }
in evalState (pandocToRST document) st
-- | Return RST representation of document.
pandocToRST :: Pandoc -> State WriterState String
pandocToRST (Pandoc (Meta tit auth dat) blocks) = do
opts <- liftM stOptions get
title <- titleToRST tit
authors <- mapM inlineListToRST auth
date <- inlineListToRST dat
body <- blockListToRST blocks
notes <- liftM (reverse . stNotes) get >>= notesToRST
-- note that the notes may contain refs, so we do them first
refs <- liftM (reverse . stLinks) get >>= keyTableToRST
pics <- liftM (reverse . stImages) get >>= pictTableToRST
hasMath <- liftM stHasMath get
let main = render $ body $+$ notes $+$ text "" $+$ refs $+$ pics
let context = writerVariables opts ++
[ ("body", main)
, ("title", render title)
, ("date", render date) ] ++
[ ("math", "yes") | hasMath ] ++
[ ("author", render a) | a <- authors ]
if writerStandalone opts
then return $ renderTemplate context $ writerTemplate opts
else return main
-- | Return RST representation of reference key table.
keyTableToRST :: KeyTable -> State WriterState Doc
keyTableToRST refs = mapM keyToRST refs >>= return . vcat
-- | Return RST representation of a reference key.
keyToRST :: ([Inline], (String, String))
-> State WriterState Doc
keyToRST (label, (src, _)) = do
label' <- inlineListToRST label
let label'' = if ':' `elem` (render label')
then char '`' <> label' <> char '`'
else label'
return $ text ".. _" <> label'' <> text ": " <> text src
-- | Return RST representation of notes.
notesToRST :: [[Block]] -> State WriterState Doc
notesToRST notes =
mapM (\(num, note) -> noteToRST num note) (zip [1..] notes) >>=
return . vcat
-- | Return RST representation of a note.
noteToRST :: Int -> [Block] -> State WriterState Doc
noteToRST num note = do
contents <- blockListToRST note
let marker = text ".. [" <> text (show num) <> text "]"
return $ marker $$ nest 3 contents
-- | Return RST representation of picture reference table.
pictTableToRST :: KeyTable -> State WriterState Doc
pictTableToRST refs = mapM pictToRST refs >>= return . vcat
-- | Return RST representation of a picture substitution reference.
pictToRST :: ([Inline], (String, String))
-> State WriterState Doc
pictToRST (label, (src, _)) = do
label' <- inlineListToRST label
return $ text ".. " <> char '|' <> label' <> char '|' <> text " image:: " <>
text src
-- | Take list of inline elements and return wrapped doc.
wrappedRST :: WriterOptions -> [Inline] -> State WriterState Doc
wrappedRST opts inlines = do
lineBreakDoc <- inlineToRST LineBreak
chunks <- mapM (wrapIfNeeded opts inlineListToRST)
(splitBy LineBreak inlines)
return $ vcat $ intersperse lineBreakDoc chunks
-- | Escape special characters for RST.
escapeString :: String -> String
escapeString = escapeStringUsing (backslashEscapes "`\\|*_")
titleToRST :: [Inline] -> State WriterState Doc
titleToRST [] = return empty
titleToRST lst = do
contents <- inlineListToRST lst
let titleLength = length $ render contents
let border = text (replicate titleLength '=')
return $ border $+$ contents $+$ border
-- | Convert Pandoc block element to RST.
blockToRST :: Block -- ^ Block element
-> State WriterState Doc
blockToRST Null = return empty
blockToRST (Plain inlines) = do
opts <- get >>= (return . stOptions)
wrappedRST opts inlines
blockToRST (Para [Image txt (src,tit)]) = do
capt <- inlineListToRST txt
let fig = text "figure:: " <> text src
let align = text ":align: center"
let alt = text ":alt: " <> if null tit then capt else text tit
return $ (text ".. " <> (fig $$ align $$ alt $$ text "" $$ capt)) $$ text ""
blockToRST (Para inlines) = do
opts <- get >>= (return . stOptions)
contents <- wrappedRST opts inlines
return $ contents <> text "\n"
blockToRST (RawHtml str) =
let str' = if "\n" `isSuffixOf` str then str ++ "\n" else str ++ "\n\n" in
return $ (text "\n.. raw:: html\n") $$ (nest 3 $ vcat $ map text (lines str'))
blockToRST HorizontalRule = return $ text "--------------\n"
blockToRST (Header level inlines) = do
contents <- inlineListToRST inlines
let headerLength = length $ render contents
let headerChar = if level > 5 then ' ' else "=-~^'" !! (level - 1)
let border = text $ replicate headerLength headerChar
return $ contents $+$ border <> text "\n"
blockToRST (CodeBlock (_,classes,_) str) = do
opts <- stOptions <$> get
let tabstop = writerTabStop opts
if "haskell" `elem` classes && "literate" `elem` classes &&
writerLiterateHaskell opts
then return $ (vcat $ map (text "> " <>) $ map text (lines str)) <> text "\n"
else return $ (text "::\n") $+$
(nest tabstop $ vcat $ map text (lines str)) <> text "\n"
blockToRST (BlockQuote blocks) = do
tabstop <- get >>= (return . writerTabStop . stOptions)
contents <- blockListToRST blocks
return $ (nest tabstop contents) <> text "\n"
blockToRST (Table caption _ widths headers rows) = do
caption' <- inlineListToRST caption
let caption'' = if null caption
then empty
else text "" $+$ (text "Table: " <> caption')
headers' <- mapM blockListToRST headers
rawRows <- mapM (mapM blockListToRST) rows
let isSimple = all (==0) widths && all (all (\bs -> length bs == 1)) rows
let numChars = maximum . map (length . render)
let widthsInChars =
if isSimple
then map ((+2) . numChars) $ transpose (headers' : rawRows)
else map (floor . (78 *)) widths
let hpipeBlocks blocks = hcatBlocks [beg, middle, end]
where height = maximum (map heightOfBlock blocks)
sep' = TextBlock 3 height (replicate height " | ")
beg = TextBlock 2 height (replicate height "| ")
end = TextBlock 2 height (replicate height " |")
middle = hcatBlocks $ intersperse sep' blocks
let makeRow = hpipeBlocks . zipWith docToBlock widthsInChars
let head' = makeRow headers'
rows' <- mapM (\row -> do cols <- mapM blockListToRST row
return $ makeRow cols) rows
let border ch = char '+' <> char ch <>
(hcat $ intersperse (char ch <> char '+' <> char ch) $
map (\l -> text $ replicate l ch) widthsInChars) <>
char ch <> char '+'
let body = vcat $ intersperse (border '-') $ map blockToDoc rows'
let head'' = if all null headers
then empty
else blockToDoc head' $+$ border '='
return $ border '-' $+$ head'' $+$ body $+$ border '-' $$ caption'' $$ text ""
blockToRST (BulletList items) = do
contents <- mapM bulletListItemToRST items
-- ensure that sublists have preceding blank line
return $ text "" $+$ vcat contents <> text "\n"
blockToRST (OrderedList (start, style', delim) items) = do
let markers = if start == 1 && style' == DefaultStyle && delim == DefaultDelim
then take (length items) $ repeat "#."
else take (length items) $ orderedListMarkers
(start, style', delim)
let maxMarkerLength = maximum $ map length markers
let markers' = map (\m -> let s = maxMarkerLength - length m
in m ++ replicate s ' ') markers
contents <- mapM (\(item, num) -> orderedListItemToRST item num) $
zip markers' items
-- ensure that sublists have preceding blank line
return $ text "" $+$ vcat contents <> text "\n"
blockToRST (DefinitionList items) = do
contents <- mapM definitionListItemToRST items
return $ (vcat contents) <> text "\n"
-- | Convert bullet list item (list of blocks) to RST.
bulletListItemToRST :: [Block] -> State WriterState Doc
bulletListItemToRST items = do
contents <- blockListToRST items
return $ (text "- ") <> contents
-- | Convert ordered list item (a list of blocks) to RST.
orderedListItemToRST :: String -- ^ marker for list item
-> [Block] -- ^ list item (list of blocks)
-> State WriterState Doc
orderedListItemToRST marker items = do
contents <- blockListToRST items
return $ (text marker <> char ' ') <> contents
-- | Convert defintion list item (label, list of blocks) to RST.
definitionListItemToRST :: ([Inline], [[Block]]) -> State WriterState Doc
definitionListItemToRST (label, defs) = do
label' <- inlineListToRST label
contents <- liftM vcat $ mapM blockListToRST defs
tabstop <- get >>= (return . writerTabStop . stOptions)
return $ label' $+$ nest tabstop contents
-- | Convert list of Pandoc block elements to RST.
blockListToRST :: [Block] -- ^ List of block elements
-> State WriterState Doc
blockListToRST blocks = mapM blockToRST blocks >>= return . vcat
-- | Convert list of Pandoc inline elements to RST.
inlineListToRST :: [Inline] -> State WriterState Doc
inlineListToRST lst = mapM inlineToRST lst >>= return . hcat
-- | Convert Pandoc inline element to RST.
inlineToRST :: Inline -> State WriterState Doc
inlineToRST (Emph lst) = do
contents <- inlineListToRST lst
return $ char '*' <> contents <> char '*'
inlineToRST (Strong lst) = do
contents <- inlineListToRST lst
return $ text "**" <> contents <> text "**"
inlineToRST (Strikeout lst) = do
contents <- inlineListToRST lst
return $ text "[STRIKEOUT:" <> contents <> char ']'
inlineToRST (Superscript lst) = do
contents <- inlineListToRST lst
return $ text "\\ :sup:`" <> contents <> text "`\\ "
inlineToRST (Subscript lst) = do
contents <- inlineListToRST lst
return $ text "\\ :sub:`" <> contents <> text "`\\ "
inlineToRST (SmallCaps lst) = inlineListToRST lst
inlineToRST (Quoted SingleQuote lst) = do
contents <- inlineListToRST lst
return $ char '\'' <> contents <> char '\''
inlineToRST (Quoted DoubleQuote lst) = do
contents <- inlineListToRST lst
return $ char '"' <> contents <> char '"'
inlineToRST (Cite _ lst) =
inlineListToRST lst
inlineToRST EmDash = return $ text "--"
inlineToRST EnDash = return $ char '-'
inlineToRST Apostrophe = return $ char '\''
inlineToRST Ellipses = return $ text "..."
inlineToRST (Code str) = return $ text $ "``" ++ str ++ "``"
inlineToRST (Str str) = return $ text $ escapeString str
inlineToRST (Math t str) = do
modify $ \st -> st{ stHasMath = True }
return $ if t == InlineMath
then text $ ":math:`$" ++ str ++ "$`"
else text $ ":math:`$$" ++ str ++ "$$`"
inlineToRST (TeX _) = return empty
inlineToRST (HtmlInline _) = return empty
inlineToRST (LineBreak) = do
return $ empty -- there's no line break in RST
inlineToRST Space = return $ char ' '
inlineToRST (Link [Code str] (src, _)) | src == str ||
src == "mailto:" ++ str = do
let srcSuffix = if isPrefixOf "mailto:" src then drop 7 src else src
return $ text srcSuffix
inlineToRST (Link txt (src, tit)) = do
useReferenceLinks <- get >>= (return . writerReferenceLinks . stOptions)
linktext <- inlineListToRST $ normalizeSpaces txt
if useReferenceLinks
then do refs <- get >>= (return . stLinks)
let refs' = if (txt, (src, tit)) `elem` refs
then refs
else (txt, (src, tit)):refs
modify $ \st -> st { stLinks = refs' }
return $ char '`' <> linktext <> text "`_"
else return $ char '`' <> linktext <> text " <" <> text src <> text ">`_"
inlineToRST (Image alternate (source, tit)) = do
pics <- get >>= (return . stImages)
let labelsUsed = map fst pics
let txt = if null alternate || alternate == [Str ""] ||
alternate `elem` labelsUsed
then [Str $ "image" ++ show (length pics)]
else alternate
let pics' = if (txt, (source, tit)) `elem` pics
then pics
else (txt, (source, tit)):pics
modify $ \st -> st { stImages = pics' }
label <- inlineListToRST txt
return $ char '|' <> label <> char '|'
inlineToRST (Note contents) = do
-- add to notes in state
notes <- get >>= (return . stNotes)
modify $ \st -> st { stNotes = contents:notes }
let ref = show $ (length notes) + 1
return $ text " [" <> text ref <> text "]_"
|
yxm4109/pandoc
|
src/Text/Pandoc/Writers/RST.hs
|
gpl-2.0
| 14,380 | 0 | 20 | 3,616 | 4,457 | 2,206 | 2,251 | 266 | 9 |
module IrrefError where
import Prelude
topEntity :: Maybe Int -> Int
topEntity ~(Just x) = x
|
christiaanb/clash-compiler
|
tests/shouldwork/Basic/IrrefError.hs
|
bsd-2-clause
| 95 | 0 | 8 | 18 | 34 | 18 | 16 | 4 | 1 |
yes = baz baz >> return ()
|
mpickering/hlint-refactor
|
tests/examples/Default93.hs
|
bsd-3-clause
| 26 | 0 | 7 | 6 | 18 | 8 | 10 | 1 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sq-AL">
<title>Tips and Tricks | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/tips/src/main/javahelp/org/zaproxy/zap/extension/tips/resources/help_sq_AL/helpset_sq_AL.hs
|
apache-2.0
| 976 | 78 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
import qualified Data.Vector as U
main = print (U.length (U.replicate 1 (pi :: Float)))
|
dolio/vector
|
old-testsuite/microsuite/length-float.hs
|
bsd-3-clause
| 89 | 0 | 10 | 15 | 41 | 23 | 18 | 2 | 1 |
import Test.Cabal.Prelude
-- Test that we can resolve a module name ambiguity when reexporting
-- by explicitly specifying what package we want.
main = setupAndCabalTest $ do
skipUnless =<< ghcVersionIs (>= mkVersion [7,9])
withPackageDb $ do
withDirectory "p" $ setup_install []
withDirectory "q" $ setup_install []
withDirectory "reexport" $ setup_install []
withDirectory "reexport-test" $ do
setup_build []
runExe' "reexport-test" [] >>= assertOutputContains "p q"
|
mydaum/cabal
|
cabal-testsuite/PackageTests/Ambiguity/setup-reexport.test.hs
|
bsd-3-clause
| 533 | 0 | 16 | 128 | 128 | 58 | 70 | 10 | 1 |
{-# LANGUAGE GADTs, ScopedTypeVariables #-}
-- Supplied by Henrik Nilsson, showed up a bug in GADTs
module Nilsson where
data Event a = NoEvent | Event a
fromEvent :: Event a -> a
fromEvent = undefined
usrErr :: String -> String -> String -> a
usrErr = undefined
type DTime = Double -- [s]
data SF a b = SF {sfTF :: a -> Transition a b}
data SF' a b where
SFArr :: (DTime -> a -> Transition a b) -> FunDesc a b -> SF' a b
SFAcc :: (DTime -> Event a -> Transition (Event a) b)
-> (c -> a -> (c, b)) -> c -> b
-> SF' (Event a) b
SFCpAXA :: (DTime -> a -> Transition a d)
-> FunDesc a b -> SF' b c -> FunDesc c d
-> SF' a d
SF' :: (DTime -> a -> Transition a b) -> SF' a b
-- A transition is a pair of the next state (in the form of a signal
-- function) and the output at the present time step.
type Transition a b = (SF' a b, b)
sfTF' :: SF' a b -> (DTime -> a -> Transition a b)
sfTF' (SFArr tf _) = tf
sfTF' (SFAcc tf _ _ _) = tf
-- sfTF' (SFSScan ...)
sfTF' (SFCpAXA tf _ _ _) = tf
sfTF' (SF' tf) = tf
-- "Smart" constructors. The corresponding "raw" constructors should not
-- be used directly for construction.
sfArr :: FunDesc a b -> SF' a b
sfArr FDI = sfId
sfArr (FDC b) = sfConst b
sfArr (FDE f fne) = sfArrE f fne
sfArr (FDG f) = sfArrG f
sfId :: SF' a a
sfId = sf
where
sf = SFArr (\_ a -> (sf, a)) FDI
sfConst :: b -> SF' a b
sfConst b = sf
where
sf = SFArr (\_ _ -> (sf, b)) (FDC b)
sfNever :: SF' a (Event b)
sfNever = sfConst NoEvent
-- Assumption: fne = f NoEvent
sfArrE :: (Event a -> b) -> b -> SF' (Event a) b
sfArrE f fne = sf
where
sf = SFArr (\_ ea -> (sf, case ea of NoEvent -> fne ; _ -> f ea))
(FDE f fne)
sfArrG :: (a -> b) -> SF' a b
sfArrG f = sf
where
sf = SFArr (\_ a -> (sf, f a)) (FDG f)
sfAcc :: (c -> a -> (c, b)) -> c -> b -> SF' (Event a) b
sfAcc f c bne = sf
where
sf = SFAcc (\dt ea -> case ea of
NoEvent -> (sf, bne)
Event a -> let
(c', b) = f c a
in
(sfAcc f c' bne, b))
f
c
bne
-- sfAccHld would be very similar. The only difference is that
-- what's now called "bne" would be updated at each event.
--
-- So maybe one could use the SAME constructor, just different
-- transition functions? It really depends on what assumptions
-- one need to make when optimizing.
-- Motivation for event-processing function type
-- (alternative would be function of type a->b plus ensuring that it
-- only ever gets invoked on events):
-- * Now we need to be consistent with other kinds of arrows.
-- * We still want to be able to get hold of the original function.
data FunDesc a b where
FDI :: FunDesc a a -- Identity function
FDC :: b -> FunDesc a b -- Constant function
FDE :: (Event a -> b) -> b -> FunDesc (Event a) b -- Event-processing fun
FDG :: (a -> b) -> FunDesc a b -- General function
fdFun :: FunDesc a b -> (a -> b)
fdFun FDI = id
fdFun (FDC b) = const b
fdFun (FDE f _) = f
fdFun (FDG f) = f
fdComp :: FunDesc a b -> FunDesc b c -> FunDesc a c
fdComp FDI fd2 = fd2
fdComp fd1 FDI = fd1
fdComp (FDC b) fd2 = FDC ((fdFun fd2) b)
fdComp _ (FDC c) = FDC c
fdComp (FDE f1 f1ne) fd2 = FDE (f2 . f1) (f2 f1ne)
where
f2 = fdFun fd2
fdComp (FDG f1) (FDE f2 f2ne) = FDG f
where
f a = case f1 a of
NoEvent -> f2ne
f1a -> f2 f1a
fdComp (FDG f1) fd2 = FDG (fdFun fd2 . f1)
-- Verifies that the first argument is NoEvent. Returns the value of the
-- second argument that is the case. Raises an error otherwise.
-- Used to check that functions on events do not map NoEvent to Event
-- wherever that assumption is exploited.
vfyNoEv :: Event a -> b -> b
vfyNoEv NoEvent b = b
vfyNoEv _ _ = usrErr "AFRP" "vfyNoEv"
"Assertion failed: Functions on events must not \
\map NoEvent to Event."
compPrim :: SF a b -> SF b c -> SF a c
compPrim (SF {sfTF = tf10}) (SF {sfTF = tf20}) = SF {sfTF = tf0}
where
tf0 a0 = (cpXX sf1 sf2, c0)
where
(sf1, b0) = tf10 a0
(sf2, c0) = tf20 b0
-- Naming convention: cp<X><Y> where <X> and <Y> is one of:
-- X - arbitrary signal function
-- A - arbitrary pure arrow
-- C - constant arrow
-- E - event-processing arrow
-- G - arrow known not to be identity, constant (C) or
-- event-processing (E).
cpXX :: SF' a b -> SF' b c -> SF' a c
cpXX (SFArr _ fd1) sf2 = cpAX fd1 sf2
cpXX sf1 (SFArr _ fd2) = cpXA sf1 fd2
cpXX (SFAcc _ f1 s1 bne) (SFAcc _ f2 s2 cne) =
sfAcc f (s1, s2) (vfyNoEv bne cne)
where
f (s1, s2) a =
case f1 s1 a of
(s1', NoEvent) -> ((s1', s2), cne)
(s1', Event b) ->
let (s2', c) = f2 s2 b in ((s1', s2'), c)
cpXX (SFCpAXA _ fd11 sf12 fd13) (SFCpAXA _ fd21 sf22 fd23) =
cpAXA fd11 (cpXX (cpXA sf12 (fdComp fd13 fd21)) sf22) fd23
cpXX sf1 sf2 = SF' tf
where
tf dt a = (cpXX sf1' sf2', c)
where
(sf1', b) = (sfTF' sf1) dt a
(sf2', c) = (sfTF' sf2) dt b
cpAXA :: FunDesc a b -> SF' b c -> FunDesc c d -> SF' a d
cpAXA FDI sf2 fd3 = cpXA sf2 fd3
cpAXA fd1 sf2 FDI = cpAX fd1 sf2
cpAXA (FDC b) sf2 fd3 = cpCXA b sf2 fd3
cpAXA fd1 sf2 (FDC d) = sfConst d
cpAXA fd1 (SFArr _ fd2) fd3 = sfArr (fdComp (fdComp fd1 fd2) fd3)
cpAX :: FunDesc a b -> SF' b c -> SF' a c
cpAX FDI sf2 = sf2
cpAX (FDC b) sf2 = cpCX b sf2
cpAX (FDE f1 f1ne) sf2 = cpEX f1 f1ne sf2
cpAX (FDG f1) sf2 = cpGX f1 sf2
cpXA :: SF' a b -> FunDesc b c -> SF' a c
cpXA sf1 FDI = sf1
cpXA sf1 (FDC c) = sfConst c
cpXA sf1 (FDE f2 f2ne) = cpXE sf1 f2 f2ne
cpXA sf1 (FDG f2) = cpXG sf1 f2
cpCX :: b -> SF' b c -> SF' a c
cpCX b (SFArr _ fd2) = sfConst ((fdFun fd2) b)
cpCX b (SFAcc _ _ _ cne) = sfConst (vfyNoEv b cne)
cpCX b (SFCpAXA _ fd21 sf22 fd23) =
cpCXA ((fdFun fd21) b) sf22 fd23
cpCX b sf2 = SFCpAXA tf (FDC b) sf2 FDI
where
tf dt _ = (cpCX b sf2', c)
where
(sf2', c) = (sfTF' sf2) dt b
-- For SPJ: The following version did not work.
-- The commented out one below did work, by lambda-lifting cpCXAux
cpCXA :: b -> SF' b c -> FunDesc c d -> SF' a d
cpCXA b sf2 FDI = cpCX b sf2
cpCXA _ _ (FDC c) = sfConst c
cpCXA b (sf2 :: SF' b c) (fd3 :: FunDesc c d) = cpCXAAux sf2
where
f3 = fdFun fd3
cpCXAAux :: SF' b c -> SF' a d
cpCXAAux (SFArr _ fd2) = sfConst (f3 ((fdFun fd2) b))
cpCXAAux (SFAcc _ _ _ cne) = sfConst (vfyNoEv b (f3 cne))
cpCXAAux (SFCpAXA _ fd21 sf22 fd23) = cpCXA ((fdFun fd21) b) sf22 (fdComp fd23 fd3)
{- -- For SPJ: This version works
cpCXA :: b -> SF' b c -> FunDesc c d -> SF' a d
cpCXA b sf2 FDI = cpCX b sf2
cpCXA _ _ (FDC c) = sfConst c
cpCXA b sf2 fd3 = cpCXAAux b fd3 (fdFun fd3) sf2
where
-- f3 = fdFun fd3
-- Really something like: cpCXAAux :: SF' b c -> SF' a d
cpCXAAux :: b -> FunDesc c d -> (c -> d) -> SF' b c -> SF' a d
cpCXAAux b fd3 f3 (SFArr _ fd2) = sfConst (f3 ((fdFun fd2) b))
cpCXAAux b fd3 f3 (SFAcc _ _ _ cne) = sfConst (vfyNoEv b (f3 cne))
cpCXAAux b fd3 f3 (SFCpAXA _ fd21 sf22 fd23) = cpCXA ((fdFun fd21) b) sf22 (fdComp fd23 fd3)
-}
cpGX :: (a -> b) -> SF' b c -> SF' a c
cpGX f1 (SFArr _ fd2) = sfArr (fdComp (FDG f1) fd2)
cpGX f1 (SFCpAXA _ fd21 sf22 fd23) =
cpAXA (fdComp (FDG f1) fd21) sf22 fd23
cpGX f1 sf2 = SFCpAXA tf (FDG f1) sf2 FDI
where
tf dt a = (cpGX f1 sf2', c)
where
(sf2', c) = (sfTF' sf2) dt (f1 a)
cpXG :: SF' a b -> (b -> c) -> SF' a c
cpXG (SFArr _ fd1) f2 = sfArr (fdComp fd1 (FDG f2))
cpXG (SFAcc _ f1 s bne) f2 = sfAcc f s (f2 bne)
where
f s a = let (s', b) = f1 s a in (s', f2 b)
cpXG (SFCpAXA _ fd11 sf12 fd22) f2 =
cpAXA fd11 sf12 (fdComp fd22 (FDG f2))
cpXG sf1 f2 = SFCpAXA tf FDI sf1 (FDG f2)
where
tf dt a = (cpXG sf1' f2, f2 b)
where
(sf1', b) = (sfTF' sf1) dt a
cpEX :: (Event a -> b) -> b -> SF' b c -> SF' (Event a) c
cpEX f1 f1ne (SFArr _ fd2) = sfArr (fdComp (FDE f1 f1ne) fd2)
cpEX f1 f1ne (SFAcc _ f2 s cne) = sfAcc f s (vfyNoEv f1ne cne)
where
f s a = f2 s (fromEvent (f1 (Event a)))
cpEX f1 f1ne (SFCpAXA _ fd21 sf22 fd23) =
cpAXA (fdComp (FDE f1 f1ne) fd21) sf22 fd23
cpEX f1 f1ne sf2 = SFCpAXA tf (FDE f1 f1ne) sf2 FDI
where
tf dt ea = (cpEX f1 f1ne sf2', c)
where
(sf2', c) = case ea of
NoEvent -> (sfTF' sf2) dt f1ne
_ -> (sfTF' sf2) dt (f1 ea)
cpXE :: SF' a (Event b) -> (Event b -> c) -> c -> SF' a c
cpXE (SFArr _ fd1) f2 f2ne = sfArr (fdComp fd1 (FDE f2 f2ne))
cpXE (SFAcc _ f1 s bne) f2 f2ne = sfAcc f s (vfyNoEv bne f2ne)
where
f s a = let (s', eb) = f1 s a
in
case eb of NoEvent -> (s', f2ne); _ -> (s', f2 eb)
cpXE (SFCpAXA _ fd11 sf12 fd13) f2 f2ne =
cpAXA fd11 sf12 (fdComp fd13 (FDE f2 f2ne))
cpXE sf1 f2 f2ne = SFCpAXA tf FDI sf1 (FDE f2 f2ne)
where
tf dt a = (cpXE sf1' f2 f2ne,
case eb of NoEvent -> f2ne; _ -> f2 eb)
where
(sf1', eb) = (sfTF' sf1) dt a
|
olsner/ghc
|
testsuite/tests/gadt/Nilsson.hs
|
bsd-3-clause
| 10,900 | 0 | 17 | 4,524 | 3,829 | 1,940 | 1,889 | 172 | 37 |
import System.Environment
import Control.Monad
import Control.Concurrent
main = do
[n] <- map read <$> getArgs
mvars <- replicateM n newEmptyMVar
sequence_ [ forkIO $ putMVar m $! nsoln n
| (m,n) <- zip mvars (repeat 9) ]
mapM_ takeMVar mvars
nsoln nq = length (gen nq)
where
safe :: Int -> Int -> [Int] -> Bool
safe x d [] = True
safe x d (q:l) = x /= q && x /= q+d && x /= q-d && safe x (d+1) l
gen :: Int -> [[Int]]
gen 0 = [[]]
gen n = [ (q:b) | b <- gen (n-1), q <- [1..nq], safe q 1 b]
|
ezyang/ghc
|
testsuite/tests/rts/numa001.hs
|
bsd-3-clause
| 543 | 0 | 14 | 160 | 312 | 158 | 154 | 16 | 3 |
import System
import Foreign
import qualified Data.ByteString as B
main = do
w <- getArgs >>= readIO . head
let n = w `div` 8
loop_y = B.unfoldrN n (next_x w (2/fromIntegral w) n)
unfold x = case loop_y x of
(s, Nothing) -> B.putStr s
(s, Just x) -> B.putStr s >> unfold x
putStrLn ("P4\n"++show w++" "++show w)
unfold (T 1 0 0 (-1))
data T = T !Int !Int !Int !Double
next_x !w !iw !bw (T bx x y ci)
| y == w = Nothing
| bx == bw = Just (loop_x w x 8 iw ci 0, T 1 0 (y+1) (iw+ci))
| otherwise = Just (loop_x w x 8 iw ci 0, T (bx+1) (x+8) y ci)
loop_x !w !x !n !iw !ci !b
| x < w = if n == 0
then b
else loop_x w (x+1) (n-1) iw ci (b+b+v)
| otherwise = b `shiftL` n
where
v = fractal 0 0 (fromIntegral x * iw - 1.5) ci 50
|
stayradiated/terminal.sexy
|
templates/vim/vivify/haskell.hs
|
mit
| 880 | 0 | 15 | 328 | 492 | 241 | 251 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- Haskelly Test Script
-- Written by Liam O'Connor-Davis for comp3161 10s2
-- BSD3
-- Copyright (C) Liam O'Connor-Davis 2010
-- #define NOCOLOR
import Control.Exception
import System.Directory
import Control.Applicative((<$>))
import System.FilePath
import System.Environment
import Data.List
import Control.Monad
import Diff
import System.Process
import System.Exit
import Data.Char
#ifdef NOCOLOR
color v c = c
#else
color v c = v ++ c ++ "\ESC[0m"
#endif
brightWhite = "\ESC[1;97m"
darkWhite = "\ESC[37m"
darkRed = "\ESC[31m"
brightRed = "\ESC[1;91m"
brightGreen = "\ESC[1;92m"
darkYellow = "\ESC[33m"
traverse :: String -> IO [String]
traverse path = do
contents <- getDirectoryContents path
let sanitizedContents = map (path </>) $ contents \\ ["..","."]
directories <- filterM doesDirectoryExist sanitizedContents
files <- filterM doesFileExist sanitizedContents
if null directories
then return files
else do
traversal <- concat <$> mapM traverse directories
return $ traversal ++ files
foreach = flip mapM
showSummary marks = color brightWhite $ if length marks > 0 then "Passed " ++ show (length $ filter (/= 0) marks)
++ " out of " ++ show(length marks)
++ " tests: " ++ show(((length $ filter (/= 0) marks) * 100) `div` length marks)
++ "% Correct. Total of " ++ show (sum marks) ++ " marks."
else "No tests run."
getSkips skips = concat <$> (foreach skips $ \skip -> map (<.> ".mhs") . map (takeDirectory skip </>) . lines <$> readFile skip)
runTests exe testdir = do
files <- traverse $ testdir
let tests' = filter ((".mhs" ==) . takeExtension) files
let skips = filter (("Skip" ==) . takeBaseName) files
tests <- (tests' \\) <$> getSkips skips
marks <- foreach tests $ (\test -> do
(expect_fail, flags) <- getFlags (test `replaceFileName` "Flag")
mark <- getMarks (test `replaceFileName` "Marks")
putStr $ color brightWhite ("Running test: ") ++ color darkWhite (makeRelative testdir test) ++ color brightWhite (" (worth " ++ show mark ++ ") :- ")
(exit, out, err) <- readProcessWithExitCode exe (flags ++ ["--no-colour", test]) ""
let check = do r1 <- doCheck ".out" "Stdout" test out
r2 <- doCheck ".err" "Stderr" test err
return $ r1 * r2 * mark
case exit of
ExitFailure i -> if expect_fail then check
else do putStrLn $ color darkRed ("Executable returned non-zero exit code(" ++ show i ++ ").")
dumpOutput err out
ExitSuccess -> if not expect_fail then check
else do putStrLn $ color darkRed ("Expected program failure, but it unexpectedly succeeded.")
dumpOutput err out)
putStrLn $ showSummary marks
where
dumpOutput err out = do
putStrLn $ color darkRed ("Stderr was:")
putStrLn err
putStrLn $ color darkRed ("Stdout was:")
putStrLn out
return 0
doCheck ext name test out = do
v <- doesFileExist (test `replaceExtension` ext)
if v
then do
diff <- getDiff (filter (not . all isSpace) $ lines out) <$> filter (not . all isSpace) . lines <$> readFile (test `replaceExtension` ".out")
if all (== B) $ map fst diff
then putStrLn (color brightGreen $ name ++ " Check Passed!") >> return 1
else do putStrLn $ (color brightRed $ name ++ " Check Failed") ++ ":\n" ++ showDiff diff; return 0
else if (not $ all isSpace out)
then do
putStrLn $ color darkYellow $ "No " ++ ext ++ " file found. Printing output..."
putStr out
return 1
else return 1
getFlags filename = do
v <- doesFileExist filename
if v then do
str <- lines <$> readFile filename
return ("expect-fail" `elem` str, delete "expect-fail" str)
else return (False, [])
getMarks filename = let readInteger s = case reads s of
[(a,b)] -> a
_ -> 1
in do v <- doesFileExist filename
if v then readInteger <$> readFile filename
else return 1
main = do
cd <- getCurrentDirectory
v <- getArgs
when (v == [ "--help" ]) $ do
putStrLn $ "Liam's Haskelly Test Runner v0.1. \n" ++
"This program is usually accompanied by a runner shell script.\n" ++
" Usage: ./run_tests.sh [--no-color] [program_to_test] [test_folder_location]\n\n" ++
"If no shell script is available, it can be run easily via runhaskell:\n" ++
" Usage: runhaskell -i./tests/driver -cpp [-DNOCOLOR] ./tests/driver/Check.hs [program_to_test] [test_folder_location]"
exitSuccess
let (dir, exe) = case v of
[ filename ] -> (cd </> "tests", filename)
[ filename, tests ] -> (tests, filename)
[] -> (cd </> "tests", cd </> "dist" </> "build" </> "minhs-1" </> "minhs-1")
de <- doesDirectoryExist $ cd </> "tests"
fe <- doesFileExist $ exe
when (not fe) $ error $ "I cannot find an executable. I tried:" ++ exe
when (not de) $ error "I cannot find a `tests' directory. Exiting"
runTests exe dir
showDiff :: [(DI,String)] -> String
showDiff diff = unlines $ map (\(a,b) -> color (colorDI a) (showDI a ++ b )) diff
where showDI F = "+"
showDI S = "-"
showDI B = " "
colorDI F = darkRed
colorDI S = darkRed
colorDI B = darkWhite
|
pierzchalski/cs3161a1
|
tests/driver/Check.hs
|
mit
| 5,865 | 0 | 24 | 1,891 | 1,687 | 843 | 844 | 117 | 7 |
{-# LANGUAGE RecordWildCards #-}
{-
The Parser is responsible for transforming a 'String' into an AST.
-}
module SuperUserSpark.Parser where
import Import
import Control.Exception (try)
import SuperUserSpark.Language.Types
import SuperUserSpark.OptParse.Types
import SuperUserSpark.Parser.Internal
import SuperUserSpark.Parser.Types
import SuperUserSpark.Utils
parseFromArgs :: ParseArgs -> IO ()
parseFromArgs pa = do
errOrAss <- parseAssignment pa
case errOrAss of
Left err -> die $ unwords ["Unable to build parse assignment:", err]
Right ass -> parse ass
parseAssignment :: ParseArgs -> IO (Either String ParseAssignment)
parseAssignment ParseArgs {..} =
ParseAssignment <$$>
((left (show :: PathParseException -> String)) <$>
try (resolveFile' parseFilePath))
parse :: ParseAssignment -> IO ()
parse ParseAssignment {..} = do
errOrFile <- parseFile fileToParse
case errOrFile of
Left err -> die $ formatParseError err
Right _ -> pure ()
formatParseError :: ParseError -> String
formatParseError (ParseError pe) = show pe
parseFile :: Path Abs File -> IO (Either ParseError SparkFile)
parseFile file = (left ParseError . parseCardFile file) <$> readFile (toFilePath file)
|
NorfairKing/super-user-spark
|
src/SuperUserSpark/Parser.hs
|
mit
| 1,250 | 0 | 12 | 230 | 353 | 178 | 175 | 30 | 2 |
{-# LANGUAGE ScopedTypeVariables
, GADTs
#-}
-- ============================================================================
-- ----------------------------------------------------------------------------
{-|
Module : Game.Make10Spec
Description : puzzle game
Copyright : (c) hanepjiv, 2015
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : portable
make10, 10-puzzle
-}
module Game.Make10Spec(spec) where
-- ============================================================================
-- ----------------------------------------------------------------------------
import Prelude
import Test.Hspec
--import Test.QuickCheck
-- ============================================================================
-- ----------------------------------------------------------------------------
spec :: Spec
spec = return ()
{--
describe "select" $
it "select" $ property $
\ xs -> length (xs :: [Int]) == length (select xs)
--}
|
hanepjiv/make10_hs
|
test/spec/Game/Make10Spec.hs
|
mit
| 1,018 | 0 | 6 | 148 | 44 | 30 | 14 | 7 | 1 |
module Y2016.M11.D21.Exercise where
{--
Let's say you've joined the National November Writing Month challenge to write
a novel of 50,000 words during the month of November. Let's say you've ...
'kinda' started that novel, and it's here in this directory at foo.txt or at
the URL:
https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2016/M11/D21/foo.txt
And let's say today is today, and the novel of at least 50,000 words is due by
the end of the month, because I love deadlines: I love the sound they make as
they whoosh by!
Douglas Adams.
So, today's Haskell exercise.
1) how many words are in this novel so far
--}
wordCount :: FilePath -> IO WordsWritten
wordCount = undefined
-- 2) How many days until the deadline, the end of the month
daysLeft :: IO DaysLeft -- from 'today.' Where today is defined as today.
daysLeft = undefined
-- hint: maybe use something in the Data.Time module set.
-- 3) How many words per day must be written to complete the novel (or at least
-- the first 50,000 words of it) in the time remaining?
type DaysLeft = Integer
type WordsPerDay = Integer
type WordsWritten = Int
wordsPerDay :: DaysLeft -> WordsWritten -> WordsPerDay
wordsPerDay daysLeft wordsAlreadyWritten = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M11/D21/Exercise.hs
|
mit
| 1,255 | 0 | 6 | 217 | 85 | 52 | 33 | 10 | 1 |
module MLUtil.LabelledMatrix
( LabelId
, LabelledMatrix (..)
, mkLabelledMatrix
, readLabelledMatrix
) where
import Control.Exception
import Control.Monad
import Control.Monad.ST
import Data.List.Split
import qualified Data.Map as M
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as VUM
import MLUtil.Imports
import MLUtil.Util
type LabelId = Int
data LabelledMatrix = LabelledMatrix
{ lmValues :: Matrix
, lmLabelIds :: VU.Vector LabelId
, lmLabelIdMap :: M.Map String LabelId
, lmLabelMap :: M.Map LabelId String
} deriving Show
splitLine :: String -> [String]
splitLine = splitOneOf [' ', '\t']
swapPair :: (a, b) -> (b, a)
swapPair (a, b) = (b, a)
swapMap :: (Ord k, Ord v) => M.Map k v -> M.Map v k
swapMap = M.fromList . map swapPair . M.toList
-- cf kNN.file2matrix
readLabelledMatrix :: FilePath -> IO (Maybe LabelledMatrix)
readLabelledMatrix path = (mkLabelledMatrix . lines) <$> readFile path
-- cf kNN.file2matrix
mkLabelledMatrix :: [String] -> Maybe LabelledMatrix
mkLabelledMatrix [] = Nothing
mkLabelledMatrix ls =
let rowCount = length ls
tokenCount = length $ splitLine (head ls)
columnCount = tokenCount - 1
(valuesV, labelIds, labels) = runST $ do
mValues <- VSM.new (rowCount * columnCount)
mLabelIds <- VUM.new rowCount
labels' <- forFoldM M.empty (zip [0..] ls) $ \labels'' (r, l) -> do
let allTokens = splitLine l
labelToken = last allTokens
valueTokens = init $ allTokens
(labels''', labelId) = case M.lookup labelToken labels'' of
Just labelId -> (labels'', labelId)
Nothing ->
let newLabelId = length labels''
in (M.insert labelToken newLabelId labels'', newLabelId)
VUM.unsafeWrite mLabelIds r labelId
forM_ (zip [0..] valueTokens) $ \(c, valueToken) ->
VSM.unsafeWrite mValues (r * columnCount + c) (read valueToken)
return labels'''
values' <- VS.unsafeFreeze mValues
labelIds' <- VU.unsafeFreeze mLabelIds
return (values', labelIds', labels')
values = matrixFromVector RowMajor rowCount columnCount valuesV
in Just $ LabelledMatrix values labelIds labels (swapMap labels)
|
rcook/mlutil
|
mlutil/src/MLUtil/LabelledMatrix.hs
|
mit
| 2,616 | 7 | 12 | 776 | 718 | 404 | 314 | 58 | 2 |
import Control.Arrow ((***))
import Data.Set (Set)
import qualified Data.Set as S
import Common (bothF, bothV)
main :: IO ()
main = do
partOne >>= print
partTwo >>= print
partOne :: IO Int
partOne = process stepOne
partTwo :: IO Int
partTwo = process stepTwo
process :: (String -> Int) -> IO Int
process f = f <$> readFile "../input/03.txt"
stepOne :: String -> Int
stepOne = S.size . moveSet
stepTwo :: String -> Int
stepTwo xs = S.size $ a `S.union` b
where (a,b) = moveSet `bothF` splitOddEven xs
type Position = (Int,Int)
stepMove :: Char -> Position -> Position
stepMove '^' (x,y) = (x, succ y)
stepMove 'v' (x,y) = (x, pred y)
stepMove '>' (x,y) = (succ x, y)
stepMove '<' (x,y) = (pred x, y)
stepMove c _ = error $ "stepMove: unknown direction " ++ show c
-- |
-- >>> S.size $ moveSet ">"
-- 2
-- >>> S.size $ moveSet "^>v<"
-- 4
-- >>> S.size $ moveSet "^v^v^v^v^v"
-- 2
moveSet :: String -> Set Position
moveSet xs = S.singleton (0,0) `S.union` a `S.union` b
where (a,b) = (***) id S.singleton $ moveSet' xs
moveSet':: String -> (Set Position, Position)
moveSet' = foldl f (S.empty, (0,0))
f :: (Set Position, Position) -> Char -> (Set Position, Position)
f (s,p) c = stepMove c p `bothV` (S.union s . S.singleton, id)
-- | split a list into two list by its index number
-- https://stackoverflow.com/a/36058429/1664572
-- >>> splitOddEven [1..10]
-- ([1,3,5,7,9],[2,4,6,8,10])
--
splitOddEven :: [a] -> ([a],[a])
splitOddEven = foldr (\x ~(y2,y1) -> (x:y1, y2)) ([],[])
|
wizzup/advent_of_code
|
2015/haskell/exe/Day03.hs
|
mit
| 1,537 | 2 | 9 | 320 | 700 | 368 | 332 | 35 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.MediaList (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.MediaList
#else
module Graphics.UI.Gtk.WebKit.DOM.MediaList
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.MediaList
#else
import Graphics.UI.Gtk.WebKit.DOM.MediaList
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/MediaList.hs
|
mit
| 430 | 0 | 5 | 39 | 33 | 26 | 7 | 4 | 0 |
module Main where
import Control.Exception
import Data.Typeable
handler :: SomeException -> IO ()
handler (SomeException e) = do
print (typeOf e)
putStrLn ("We errored! It was: " ++ show e)
-- didn't compile: "Couldn't match expected type ‘e’ with actual type ‘AsyncException’"
-- handler :: SomeException -> IO ()
-- handler (SomeException StackOverflow) = putStrLn "We errored! It was: StackOverflow"
-- handler (SomeException _) = putStrLn "We errored! It wasn't: StackOverflow"
-- compiles but doesn't catch the exception.
-- handler :: AsyncException -> IO ()
-- handler StackOverflow = putStrLn "We errored!, StackOverflow"
-- handler _ = putStrLn "We errored!, something other than StackOverflow"
main = do
writeFile "zzz" "hi" `catch` handler
putStrLn "wrote to a file"
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter30/writePls.hs
|
mit
| 818 | 0 | 10 | 149 | 103 | 55 | 48 | 10 | 1 |
-- Since July 20 2013
module Main where
import System.IO
main = do
putStrLn "What's your name?"
name <- getLine
tell name
tell_case name
tell name = do
putStrLn (
if elem name ["Simon", "John", "Phil"] then
"I think Haskell is a great programming language."
else if name == "Koen" then
"I think debugging Haskell is fun."
else
"I don't know who you are."
)
tell_case name = do
case name of
"Simon" ->
putStrLn "I think Haskell is a great programming language."
"John" ->
putStrLn "I think Haskell is a great programming language."
"Phil" ->
putStrLn "I think Haskell is a great programming language."
"Koen" ->
putStrLn "I think debugging Haskell is fun."
_ ->
putStrLn "I don't know who you are."
|
fossilet/yaht
|
ex_5.1.hs
|
mit
| 898 | 0 | 11 | 323 | 151 | 74 | 77 | 26 | 5 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "System.Environment.Compat"
-- from a globally unique namespace.
module System.Environment.Compat.Repl (
module System.Environment.Compat
) where
import "this" System.Environment.Compat
|
haskell-compat/base-compat
|
base-compat/src/System/Environment/Compat/Repl.hs
|
mit
| 304 | 0 | 5 | 31 | 28 | 21 | 7 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Data.Binary (encode)
import qualified Data.ByteString.Char8 as Char
import Data.Global (declareIORef)
import Data.IORef (IORef, readIORef, modifyIORef)
import Data.Maybe (fromJust)
import Data.Text as Text
import Network.HTTP.Types (status201, status302, status404)
import Network.Wai (Request, Response, pathInfo, responseLBS, queryString, ResponseReceived)
import Network.Wai.Handler.Warp (run)
import System.Environment (getEnvironment)
import Shortener
world :: IORef World
world = declareIORef "world" initialWorld
main :: IO ()
main = do
env <- getEnvironment
let port = maybe 8080 read $ lookup "PORT" env
run port app
app :: Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived
app request respond = do
response <- dispatch request
respond response
dispatch :: Request -> IO Response
dispatch request = case pathInfo request of
[] -> return indexHandler
["shorten"] -> shortenHandler request
_ -> expandHandler request
indexHandler :: Response
indexHandler = redirectTo "https://github.com/justincampbell/url-shorteners"
redirectTo :: Char.ByteString -> Response
redirectTo url = responseLBS status302 [("Location", url)] ""
shortenHandler :: Request -> IO Response
shortenHandler request = do
let
url = extractUrl request
headers = []
_ <- updateShortenIORef url
token <- lastToken <$>
readIORef world
let
body = encode $ "/" ++ token
return $ responseLBS status201 headers body
extractUrl :: Request -> Url
extractUrl request = case url of
Just url' -> Char.unpack $ fromJust url'
Nothing -> ""
where url = lookup "url" $ queryString request
expandHandler :: Request -> IO Response
expandHandler request = do
url <- expandTokenIORef $ extractToken request
case url of
Nothing -> return $ responseLBS status404 [] ""
Just url' -> return $ redirectTo $ Char.pack url'
extractToken :: Request -> Token
extractToken request =
case pathInfo request of
[] -> Text.unpack ""
[a] -> Text.unpack a
_ -> ""
updateShortenIORef :: Url -> IO ()
updateShortenIORef url = modifyIORef world (shorten url)
expandTokenIORef :: Token -> IO (Maybe Url)
expandTokenIORef token = do
currentWorld <- readIORef world
return $ expand token currentWorld
|
justincampbell/url-shortener-haskell
|
Main.hs
|
mit
| 2,503 | 0 | 12 | 586 | 789 | 390 | 399 | 67 | 3 |
module Network.IPFS where
import Control.Applicative ((<$>))
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Base58 as B58
import qualified Data.ByteString.Char8 as C
import Data.Maybe (fromJust)
import Data.Foldable (toList)
import Text.ProtocolBuffers.WireMessage (messageGet)
import Text.ProtocolBuffers.Basic (uToString)
import qualified Network.IPFS.API as API
import qualified Network.IPFS.MerkleDAG.PBNode as PBN
import qualified Network.IPFS.MerkleDAG.PBLink as PBL
type Hash = B.ByteString -- TODO use multihash library
type Data = B.ByteString
-- newtype the links, hash, and data.
data Object = Object { hash :: Hash
, payload :: Data
, links :: [(String, Object)]
} deriving (Show)
-- newtype the String
cat :: API.Endpoint -> String -> IO BL.ByteString
cat endpoint path = API.call endpoint ["cat"] [] [path]
getPBNode :: API.Endpoint -> Hash -> IO PBN.PBNode
getPBNode endpoint digest = do
resp <- API.call endpoint
["object", "get"] [("encoding", "protobuf")]
[C.unpack $ B58.encodeBase58 B58.bitcoinAlphabet digest]
return $ case messageGet resp of
Right (node, _) -> node
Left err -> error err
getObject :: API.Endpoint -> Hash -> IO Object
getObject endpoint digest = do
pbnode <- getPBNode endpoint digest
let links' = toList $ PBN.links pbnode
names = uToString . fromJust . PBL.name <$> links'
data' = BL.toStrict . fromJust $ PBN.data' pbnode
children <- mapM resolveLink links'
return (Object digest data' $ zip names children)
where resolveLink = getObject endpoint . BL.toStrict . fromJust . PBL.hash
|
bitemyapp/hs-ipfs-api
|
src/Network/IPFS.hs
|
mit
| 1,736 | 0 | 12 | 365 | 515 | 289 | 226 | 38 | 2 |
module Categories where
categories :: Directory -> Map String File
|
Soares/tagwiki
|
src/Categories.hs
|
mit
| 68 | 0 | 6 | 11 | 18 | 10 | 8 | 2 | 0 |
module NetHack.Control.Farlook
(farLook)
where
import Control.Monad(when)
import Control.Monad.IO.Class(liftIO)
import NetHack.Monad.NHAction
import NetHack.Control.Screen
import NetHack.Data.Messages(trim)
import qualified Terminal.Data as T
import qualified Terminal.Terminal as T
farLook :: (Int, Int) -> NHAction String
farLook (x, y) = do
t <- getTerminalM
let (cx, cy) = (T.cursorX t, T.cursorY t)
let str = ";" ++
replicate (x - cx) 'l' ++
replicate (cx - x) 'h' ++
replicate (y - cy) 'j' ++
replicate (cy - y) 'k' ++ "."
answer str
str <- farLookResult
answer ' '
return str
farLookResult :: NHAction String
farLookResult = do
t <- getTerminalM
case T.captureString "\\((.+)\\)" (1, 1) (80, 2) t of
Just r -> return r
Nothing ->
case T.captureString " an? (.+) *$" (1, 1) (80, 1) t of
Just r -> return $ trim r
Nothing -> return ""
|
Noeda/Megaman
|
src/NetHack/Control/Farlook.hs
|
mit
| 971 | 0 | 17 | 267 | 366 | 193 | 173 | 31 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-- | This module holds all of the logic used to build queries.
module Docker.QueryBuilder ( QueryBuilder
, isAllowedQS
, param
, stringParam
, noParams
, compileQuery
) where
import Control.Monad.Writer.Strict
import qualified Data.ByteString.Char8 as BS
import Data.Char (isAlphaNum)
import Data.List (intersperse)
import Network.URI (escapeURIString)
-- | This type is used to build query parameters.
type QueryBuilder a = Writer [BS.ByteString] a
-- | Add a query parameter.
param :: Show a => BS.ByteString -> a -> QueryBuilder ()
param key = stringParam key . BS.pack . show
-- | Test if the given 'Char' is allowed in the query.
isAllowedQS :: Char -> Bool
isAllowedQS char = elem char "-_.~" || isAlphaNum char
-- | Add a 'BS.ByteString' query parameter.
stringParam :: BS.ByteString -> BS.ByteString -> QueryBuilder ()
stringParam key val = tell . return $ key' `BS.append` ('=' `BS.cons` val')
where key' = escp key
val' = escp val
escp = BS.pack . escapeURIString isAllowedQS . BS.unpack
-- | Convenience method to show that there are no parameters.
noParams :: QueryBuilder ()
noParams = return ()
-- | Turn the 'QueryBuilder' into a 'BS.ByteString'.
compileQuery :: QueryBuilder a -> BS.ByteString
compileQuery query =
case execWriter query of
[] -> ""
qs -> '?' `BS.cons` mconcat (intersperse "&" qs)
|
nahiluhmot/docker-hs
|
src/Docker/QueryBuilder.hs
|
mit
| 1,563 | 0 | 11 | 428 | 353 | 194 | 159 | 29 | 2 |
foldl1 (lcm) [1..20]
|
tamasgal/haskell_exercises
|
ProjectEuler/p005.hs
|
mit
| 21 | 0 | 6 | 3 | 17 | 8 | 9 | -1 | -1 |
module PlotLab.Events (attachHandlers) where
--------------------------------------------------------------------------------
import PlotLab.Figure (figurePlot)
import PlotLab.Settings
--------------------------------------------------------------------------------
import Data.Char (toLower)
import Data.IORef (IORef, modifyIORef, readIORef)
import Data.Text (unpack)
import Graphics.Rendering.Plot
import Graphics.UI.Gtk hiding (Circle, Color, Cross)
--------------------------------------------------------------------------------
attachHandlers :: (ComboBoxClass cbox) =>
Builder
-> IORef FigureSettings
-> [Adjustment]
-> (DrawingArea -> IORef FigureSettings
-> [Adjustment]
-> IO ())
-> ([Double] -> Double -> Double)
-> cbox
-> IO ()
attachHandlers builder iofset adjs updateCanvas g combo = do
-- Canvas
canvas <- builderGetObject builder castToDrawingArea "Plotting Canvas"
let redraw = updateCanvas canvas iofset adjs
_ <- onExpose canvas $ \_ -> redraw >> return False
-- Adjustments for parameter sliders
mapM_ (`onValueChanged` redraw) adjs
-- Plot-Title Entry
titleEntry <- builderGetObject builder castToEntry "Plot Title Entry"
_ <- onEntryActivate titleEntry $ do
titleNew <- entryGetText titleEntry
modifyIORef iofset $ \f -> f { plotTitle = Just titleNew }
redraw
-- Plot-Title Font Size
titleSize <- builderGetObject builder castToAdjustment "Title Font Size"
_ <- onValueChanged titleSize $ do
size <- adjustmentGetValue titleSize
modifyIORef iofset $ \f -> f { plotTitleSize = size }
redraw
-- Subtitle Entry
subEntry <- builderGetObject builder castToEntry "Subtitle Entry"
_ <- onEntryActivate subEntry $ do
titleNew <- entryGetText subEntry
modifyIORef iofset $ \f -> f { subTitle = Just titleNew }
redraw
-- Subtitle Font Size
subSize <- builderGetObject builder castToAdjustment "Subtitle Font Size"
_ <- onValueChanged subSize $ do
size <- adjustmentGetValue subSize
modifyIORef iofset $ \f -> f { subTitleSize = size }
redraw
-- Show X-Axis CheckButton
showX <- builderGetObject builder castToCheckButton "X-Axis Check"
_ <- onToggled showX $ do
state <- toggleButtonGetActive showX
modifyIORef iofset
(\f -> f { showXAxis = state })
redraw
-- Show Y-Axis CheckButton
showY <- builderGetObject builder castToCheckButton "Y-Axis Check"
_ <- onToggled showY $ do
state <- toggleButtonGetActive showY
modifyIORef iofset
(\f -> f { showYAxis = state })
redraw
let adjustmentPairValues (a1, a2) = do v1 <- adjustmentGetValue a1
v2 <- adjustmentGetValue a2
return (v1, v2)
-- X-Axis Range
xLower <- builderGetObject builder castToAdjustment "X-Lower"
xUpper <- builderGetObject builder castToAdjustment "X-Upper"
let updateX x = onValueChanged x $ do
(xl, xu) <- adjustmentPairValues (xLower, xUpper)
modifyIORef iofset $ \f -> f { xRange = Just (xl, xu) }
redraw
in mapM_ updateX [xLower, xUpper]
-- X-Range Auto-determination CheckBox
xRangeEntries <- builderGetObject builder castToHBox "X-Range Entries"
autoXCheck <- builderGetObject builder castToCheckButton "X-Range Check"
_ <- onToggled autoXCheck $ do
state <- toggleButtonGetActive autoXCheck
if state
then error "Invalid xRange"
-- do modifyIORef iofset $ \f -> f { xRange = Nothing }
-- widgetHideAll xRangeEntries
else do lower <- builderGetObject builder castToAdjustment "X-Lower"
upper <- builderGetObject builder castToAdjustment "X-Upper"
(l, u) <- adjustmentPairValues (lower, upper)
widgetShowAll xRangeEntries
modifyIORef iofset $ \f -> f { xRange = Just (l, u) }
redraw
-- Y-Axis Range
yLower <- builderGetObject builder castToAdjustment "Y-Lower"
yUpper <- builderGetObject builder castToAdjustment "Y-Upper"
let updateY y = onValueChanged y $ do
(yl, yu) <- adjustmentPairValues (yLower, yUpper)
modifyIORef iofset $ \f -> f { yRange = Just (yl, yu) }
redraw
in mapM_ updateY [yLower, yUpper]
-- Y-Range Auto-determination CheckBox
yRangeEntries <- builderGetObject builder castToHBox "Y-Range Entries"
autoYCheck <- builderGetObject builder castToCheckButton "Y-Range Check"
_ <- onToggled autoYCheck $ do
state <- toggleButtonGetActive autoYCheck
if state
then do modifyIORef iofset $ \f -> f { yRange = Nothing }
widgetHideAll yRangeEntries
else do lower <- builderGetObject builder castToAdjustment "Y-Lower"
upper <- builderGetObject builder castToAdjustment "Y-Upper"
(l, u) <- adjustmentPairValues (lower, upper)
widgetShowAll yRangeEntries
modifyIORef iofset $ \f -> f { yRange = Just (l, u) }
redraw
-- Sampling Rate
sampleRate <- builderGetObject builder castToAdjustment "Sampling Adj"
_ <- onValueChanged sampleRate $ do
rate <- adjustmentGetValue sampleRate
modifyIORef iofset $ \f -> f { samplingRate = rate }
redraw
-- X-Label Entry
xLabelEntry <- builderGetObject builder castToEntry "X-Label Entry"
_ <- onEntryActivate xLabelEntry $ do
label' <- entryGetText xLabelEntry
modifyIORef iofset $ \f -> f { xLabel = Just label' }
redraw
-- X-Label Size
xlSize <- builderGetObject builder castToAdjustment "X-Label Size"
_ <- onValueChanged xlSize $ do
size <- adjustmentGetValue xlSize
modifyIORef iofset $ \f -> f { xLabelSize = size }
redraw
-- Y-Label Entry
yLabelEntry <- builderGetObject builder castToEntry "Y-Label Entry"
_ <- onEntryActivate yLabelEntry $ do
label' <- entryGetText yLabelEntry
modifyIORef iofset $ \f -> f { yLabel = Just label' }
redraw
ylSize <- builderGetObject builder castToAdjustment "Y-Label Size"
_ <- onValueChanged ylSize $ do
size <- adjustmentGetValue ylSize
modifyIORef iofset $ \f -> f { yLabelSize = size }
redraw
-- Export Filename extry
-- Export Button
export <- builderGetObject builder castToButton "Export Button"
_ <- onClicked export $ do
fset <- readIORef iofset
comboText <- comboBoxGetActiveText combo
nameEntry <- builderGetObject builder castToEntry "File Entry"
nameText <- entryGetText nameEntry
modifyIORef iofset $ \f -> f { fileName = nameText }
figure <- figurePlot g iofset adjs
let dimensions = exportSize fset
parseType txt = case txt of
"PNG" -> PNG
"SVG" -> SVG
"PS" -> PS
"PDF" -> PDF
_ -> error "Invalid FileType!"
filetype = parseType $ maybe (error "Invalid Type!") unpack comboText
parseExt fileType = case fileType of
PNG -> "PNG"
SVG -> "SVG"
PS -> "PS"
PDF -> "PDF"
filename = nameText ++ "." ++ map toLower (parseExt filetype)
writeFigure filetype filename dimensions figure
return ()
--------------------------------------------------------------------------------
|
sumitsahrawat/plot-lab
|
src/PlotLab/Events.hs
|
gpl-2.0
| 7,628 | 0 | 19 | 2,154 | 1,903 | 913 | 990 | 146 | 10 |
{-# LANGUAGE TypeFamilies #-}
-- |
-- Module: Instruments.Instrument
-- Copyright: (c) Johan Astborg, Andreas Bock
-- License: BSD-3
-- Maintainer: Andreas Bock <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- Top-level class for financial instruments
module Instruments.Instrument where
import Utils.Currency
-- | Instrument is a base class for financial instruments
class Instrument i where
data PricingEngine i :: *
expired :: i -> IO Bool
pv :: i -> PricingEngine i -> IO Cash
|
andreasbock/hql
|
src/Instruments/Instrument.hs
|
gpl-2.0
| 538 | 0 | 9 | 102 | 69 | 42 | 27 | 7 | 0 |
{-# LANGUAGE Arrows, NoMonomorphismRestriction, TypeOperators #-}
{- The Robot example in circuits.
- Copyright : (C)opyright 2006, 2009-2011 peteg42 at gmail dot com
- License : GPL (see COPYING for details)
-
- ghc -O -main-is Robot_spr_broadcast.main -rtsopts -prof -package ADHOC Robot_spr_broadcast.hs
- ghci -package ADHOC Robot_spr_broadcast.hs
- mapM minimize kautos >>= dot
-}
module Robot_spr_broadcast where
-------------------------------------------------------------------
-- Dependencies.
-------------------------------------------------------------------
import Prelude hiding ( id )
import ADHOC
import ADHOC.NonDet
import ADHOC.Data.Arithmetic
import ADHOC.Patterns
import ADHOC.ModelChecker.CTL
import ADHOC.Knowledge
-------------------------------------------------------------------
-- Parameters.
-------------------------------------------------------------------
goalRegionCentre :: ArrowNum (~>) n => () ~> n
goalRegionCentre = fromIntegerA 3
-------------------------------------------------------------------
-- An implementation of the environment.
--
-- Non-deterministically move right or stay still, unless the robot
-- has signalled it wishes to stop. Update the sensor with a reading
-- within 1 unit of the present position.
--
-- - Need to have the robot's halt instantaneously responded to.
-------------------------------------------------------------------
environment = proc halt ->
do rec pos <- (| delayAC
(fromIntegerA 0 -< ())
(| muxAC (returnA -< halt)
(returnA -< pos)
(| nondetFairAC (returnA -< pos)
(incA -< pos) |) |) |)
(posP, posS) <- decA &&& incA -< pos
sensor <- nondetListA 3 -< [posP, pos, posS]
returnA -< (pos, sensor)
-------------------------------------------------------------------
-- KBP.
-------------------------------------------------------------------
-- | The @Robot@ agent running a KBP.
robot = broadcast
(mkSizedListSingletonA ("Robot", id, kTest ("Robot" `knows` probe "inGoal")))
id
id
-------------------------------------------------------------------
-- Robot top-level.
-------------------------------------------------------------------
robotTop = proc () ->
do rec [halted] <- unSizedListA <<< robot -< sensor
(pos, sensor) <- environment -< halted
natA (undefined :: Three) -< pos
returnA -< (halted, pos, sensor)
-------------------------------------------------------------------
-- Propositions.
-------------------------------------------------------------------
haltedp = arr (\(halted, _pos, _sensor) -> halted)
initiallySanep = proc (halted, pos, sensor) ->
do a <- eqA <<< second (fromIntegerA 0) -< (pos, ())
b <- orA <<< (eqA <<< second (fromIntegerA 0))
&&& (eqA <<< second (fromIntegerA 1)) -< (sensor, ())
c <- notA -< halted
conjoinA 3 -< [a, b, c]
inGoalp = proc (_halted, pos, _sensor) ->
do grcv <- goalRegionCentre -< ()
(grlv, grrv) <- decA &&& incA -< grcv
disjoinA 3 <<< mapA 3 eqA -< zip (repeat pos) [grlv, grcv, grrv]
pastGoalp = proc (_halted, pos, _sensor) ->
do grrv <- incA <<< goalRegionCentre -< ()
gtA -< (pos, grrv)
props = proc x ->
do initiallySanev <- initiallySanep -< x
haltedv <- haltedp -< x
inGoalv <- probeA "inGoal" <<< inGoalp -< x
pastGoalv <- pastGoalp -< x
returnA -< (initiallySanev, haltedv, inGoalv, pastGoalv)
cprops = robotTop >>> props
-------------------------------------------------------------------
-- Model checking.
-------------------------------------------------------------------
Just (kautos, m, (initiallySane, halted, inGoal, pastGoal)) = broadcastSprSynth MinSTAMINA cprops
ctlM = mkCTLModel m
-- Sanity check the initial state.
test_initially_sane = isOK (mc ctlM (prop initiallySane))
-- Due to fairness we halt on all runs.
test_af_halt = isOK (mc ctlM (af (prop halted)))
-- At every instant it is still possible we might halt.
test_ag_ef_halt = isOK (mc ctlM (ag (ef (prop halted))))
test_neg_ag_ef_halt = not (isOK (mc ctlM (neg (ag (ef (prop halted))))))
-- We never make it past the goal region.
test_ag_neg_pastGoal = isOK (mc ctlM (ag (neg (prop pastGoal))))
-- If the robot halts, then it is in the goal region.
test_if_halts_in_goal = isOK (mc ctlM (ag (prop halted --> prop inGoal)))
test_not_if_in_goal_halted = not (isOK (mc ctlM (ag (prop halted <-- prop inGoal))))
-- 'halt' cannot stutter.
test_halt_stutter = not (isOK (mc ctlM (ef (prop halted /\ ef (neg (prop halted))))))
-- The robot does nothing until it halts for all time. The eventuality
-- holds due to fairness.
test_halt_permanent = isOK (mc ctlM ( (neg (prop halted) `au` ag (prop halted)) ))
-- ok_netlist = runNL cprops
-------------------------------------------------------------------
-- Profiling top-level.
-------------------------------------------------------------------
-- main = let ([bm], m') = broadcastSprSynth m in minimize bm >>= dot m
|
peteg/ADHOC
|
Apps/Robot/Robot_spr_broadcast.hs
|
gpl-2.0
| 5,182 | 12 | 18 | 1,020 | 1,184 | 635 | 549 | -1 | -1 |
module HEP.Automation.MadGraph.Dataset.Set20110516set1 where
import HEP.Storage.WebDAV
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Machine
import HEP.Automation.MadGraph.UserCut
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Model.ZpHFull
import HEP.Automation.MadGraph.Dataset.Common
import HEP.Automation.MadGraph.Dataset.Processes
import qualified Data.ByteString as B
psetup_zphfull_WBZprimeDecay :: ProcessSetup ZpHFull
psetup_zphfull_WBZprimeDecay = PS {
mversion = MadGraph4
, model = ZpHFull
, process = preDefProcess WBZprimeDecay
, processBrief = "WBZpDecay"
, workname = "516ZpH_WBZprimeDecay"
}
zpHFullParamSet :: [ModelParam ZpHFull]
zpHFullParamSet = [ ZpHFullParam m g (0.28)
| m <-[150.0]
, g <- [1.0] ]
psetuplist :: [ProcessSetup ZpHFull]
psetuplist = [ psetup_zphfull_WBZprimeDecay ]
sets :: [Int]
sets = [1..20]
zptasklist :: ScriptSetup -> ClusterSetup ZpHFull -> [WorkSetup ZpHFull]
zptasklist ssetup csetup =
[ WS ssetup (psetup_zphfull_WBZprimeDecay )
(RS { param = p
, numevent = 10000
, machine = TeVatron
, rgrun = Fixed
, rgscale = 200.0
, match = NoMatch
, cut = DefCut
, pythia = RunPYTHIA
, usercut = NoUserCutDef
, pgs = RunPGSNoTau
, setnum = num
})
csetup
(WebDAVRemoteDir "mc/TeVatronFor3/ZpHFull0516Big")
| p <- zpHFullParamSet , num <- sets ]
totaltasklistEvery :: Int -> Int -> ScriptSetup -> ClusterSetup ZpHFull -> [WorkSetup ZpHFull]
totaltasklistEvery n r ssetup csetup =
let lst = zip [1..] (zptasklist ssetup csetup)
in map snd . filter (\(x,_)-> x `mod` n == r) $ lst
|
wavewave/madgraph-auto-dataset
|
src/HEP/Automation/MadGraph/Dataset/Set20110516set1.hs
|
gpl-3.0
| 1,829 | 0 | 13 | 470 | 462 | 273 | 189 | 46 | 1 |
module Extension.Data.Accessor (
-- * Containers
mapDirect
) where
import qualified Data.Map as M
import Data.Accessor
import Data.Accessor.Container
-- | A direct accessor for maps. Will fail at runtime, if the element is not
-- present.
mapDirect :: Ord k => k -> Accessor (M.Map k v) v
mapDirect k = accessor (M.! k) (\v -> M.insert k v)
|
meiersi/scyther-proof
|
src/Extension/Data/Accessor.hs
|
gpl-3.0
| 349 | 0 | 10 | 67 | 99 | 57 | 42 | 7 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FusionTables.Template.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing template
--
-- /See:/ <https://developers.google.com/fusiontables Fusion Tables API Reference> for @fusiontables.template.update@.
module Network.Google.Resource.FusionTables.Template.Update
(
-- * REST Resource
TemplateUpdateResource
-- * Creating a Request
, templateUpdate
, TemplateUpdate
-- * Request Lenses
, tuTemplateId
, tuPayload
, tuTableId
) where
import Network.Google.FusionTables.Types
import Network.Google.Prelude
-- | A resource alias for @fusiontables.template.update@ method which the
-- 'TemplateUpdate' request conforms to.
type TemplateUpdateResource =
"fusiontables" :>
"v2" :>
"tables" :>
Capture "tableId" Text :>
"templates" :>
Capture "templateId" (Textual Int32) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Template :> Put '[JSON] Template
-- | Updates an existing template
--
-- /See:/ 'templateUpdate' smart constructor.
data TemplateUpdate = TemplateUpdate'
{ _tuTemplateId :: !(Textual Int32)
, _tuPayload :: !Template
, _tuTableId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TemplateUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tuTemplateId'
--
-- * 'tuPayload'
--
-- * 'tuTableId'
templateUpdate
:: Int32 -- ^ 'tuTemplateId'
-> Template -- ^ 'tuPayload'
-> Text -- ^ 'tuTableId'
-> TemplateUpdate
templateUpdate pTuTemplateId_ pTuPayload_ pTuTableId_ =
TemplateUpdate'
{ _tuTemplateId = _Coerce # pTuTemplateId_
, _tuPayload = pTuPayload_
, _tuTableId = pTuTableId_
}
-- | Identifier for the template that is being updated
tuTemplateId :: Lens' TemplateUpdate Int32
tuTemplateId
= lens _tuTemplateId (\ s a -> s{_tuTemplateId = a})
. _Coerce
-- | Multipart request metadata.
tuPayload :: Lens' TemplateUpdate Template
tuPayload
= lens _tuPayload (\ s a -> s{_tuPayload = a})
-- | Table to which the updated template belongs
tuTableId :: Lens' TemplateUpdate Text
tuTableId
= lens _tuTableId (\ s a -> s{_tuTableId = a})
instance GoogleRequest TemplateUpdate where
type Rs TemplateUpdate = Template
type Scopes TemplateUpdate =
'["https://www.googleapis.com/auth/fusiontables"]
requestClient TemplateUpdate'{..}
= go _tuTableId _tuTemplateId (Just AltJSON)
_tuPayload
fusionTablesService
where go
= buildClient (Proxy :: Proxy TemplateUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-fusiontables/gen/Network/Google/Resource/FusionTables/Template/Update.hs
|
mpl-2.0
| 3,489 | 0 | 15 | 822 | 481 | 285 | 196 | 73 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Spanner.Scans.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Return available scans given a Database-specific resource name.
--
-- /See:/ <https://cloud.google.com/spanner/ Cloud Spanner API Reference> for @spanner.scans.list@.
module Network.Google.Resource.Spanner.Scans.List
(
-- * REST Resource
ScansListResource
-- * Creating a Request
, scansList
, ScansList
-- * Request Lenses
, slParent
, slXgafv
, slUploadProtocol
, slAccessToken
, slUploadType
, slView
, slFilter
, slPageToken
, slPageSize
, slCallback
) where
import Network.Google.Prelude
import Network.Google.Spanner.Types
-- | A resource alias for @spanner.scans.list@ method which the
-- 'ScansList' request conforms to.
type ScansListResource =
"v1" :>
Capture "parent" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "view" ScansListView :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListScansResponse
-- | Return available scans given a Database-specific resource name.
--
-- /See:/ 'scansList' smart constructor.
data ScansList =
ScansList'
{ _slParent :: !Text
, _slXgafv :: !(Maybe Xgafv)
, _slUploadProtocol :: !(Maybe Text)
, _slAccessToken :: !(Maybe Text)
, _slUploadType :: !(Maybe Text)
, _slView :: !(Maybe ScansListView)
, _slFilter :: !(Maybe Text)
, _slPageToken :: !(Maybe Text)
, _slPageSize :: !(Maybe (Textual Int32))
, _slCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ScansList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'slParent'
--
-- * 'slXgafv'
--
-- * 'slUploadProtocol'
--
-- * 'slAccessToken'
--
-- * 'slUploadType'
--
-- * 'slView'
--
-- * 'slFilter'
--
-- * 'slPageToken'
--
-- * 'slPageSize'
--
-- * 'slCallback'
scansList
:: Text -- ^ 'slParent'
-> ScansList
scansList pSlParent_ =
ScansList'
{ _slParent = pSlParent_
, _slXgafv = Nothing
, _slUploadProtocol = Nothing
, _slAccessToken = Nothing
, _slUploadType = Nothing
, _slView = Nothing
, _slFilter = Nothing
, _slPageToken = Nothing
, _slPageSize = Nothing
, _slCallback = Nothing
}
-- | Required. The unique name of the parent resource, specific to the
-- Database service implementing this interface.
slParent :: Lens' ScansList Text
slParent = lens _slParent (\ s a -> s{_slParent = a})
-- | V1 error format.
slXgafv :: Lens' ScansList (Maybe Xgafv)
slXgafv = lens _slXgafv (\ s a -> s{_slXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
slUploadProtocol :: Lens' ScansList (Maybe Text)
slUploadProtocol
= lens _slUploadProtocol
(\ s a -> s{_slUploadProtocol = a})
-- | OAuth access token.
slAccessToken :: Lens' ScansList (Maybe Text)
slAccessToken
= lens _slAccessToken
(\ s a -> s{_slAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
slUploadType :: Lens' ScansList (Maybe Text)
slUploadType
= lens _slUploadType (\ s a -> s{_slUploadType = a})
-- | Specifies which parts of the Scan should be returned in the response.
-- Note, only the SUMMARY view (the default) is currently supported for
-- ListScans.
slView :: Lens' ScansList (Maybe ScansListView)
slView = lens _slView (\ s a -> s{_slView = a})
-- | A filter expression to restrict the results based on information present
-- in the available Scan collection. The filter applies to all fields
-- within the Scan message except for \`data\`.
slFilter :: Lens' ScansList (Maybe Text)
slFilter = lens _slFilter (\ s a -> s{_slFilter = a})
-- | The next_page_token value returned from a previous List request, if any.
slPageToken :: Lens' ScansList (Maybe Text)
slPageToken
= lens _slPageToken (\ s a -> s{_slPageToken = a})
-- | The maximum number of items to return.
slPageSize :: Lens' ScansList (Maybe Int32)
slPageSize
= lens _slPageSize (\ s a -> s{_slPageSize = a}) .
mapping _Coerce
-- | JSONP
slCallback :: Lens' ScansList (Maybe Text)
slCallback
= lens _slCallback (\ s a -> s{_slCallback = a})
instance GoogleRequest ScansList where
type Rs ScansList = ListScansResponse
type Scopes ScansList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/spanner.data"]
requestClient ScansList'{..}
= go _slParent _slXgafv _slUploadProtocol
_slAccessToken
_slUploadType
_slView
_slFilter
_slPageToken
_slPageSize
_slCallback
(Just AltJSON)
spannerService
where go
= buildClient (Proxy :: Proxy ScansListResource)
mempty
|
brendanhay/gogol
|
gogol-spanner/gen/Network/Google/Resource/Spanner/Scans/List.hs
|
mpl-2.0
| 6,001 | 0 | 19 | 1,506 | 1,041 | 601 | 440 | 139 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.BigtableAdmin
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Administer your Cloud Bigtable tables and instances.
--
-- /See:/ <https://cloud.google.com/bigtable/ Cloud Bigtable Admin API Reference>
module Network.Google.BigtableAdmin
(
-- * Service Configuration
bigtableAdminService
-- * OAuth Scopes
, bigtableAdminClusterScope
, cloudBigtableAdminTableScope
, cloudPlatformReadOnlyScope
, bigtableAdminScope
, cloudPlatformScope
, cloudBigtableAdminScope
, cloudBigtableAdminClusterScope
, bigtableAdminTableScope
, bigtableAdminInstanceScope
-- * API Declaration
, BigtableAdminAPI
-- * Resources
-- ** bigtableadmin.operations.cancel
, module Network.Google.Resource.BigtableAdmin.Operations.Cancel
-- ** bigtableadmin.operations.delete
, module Network.Google.Resource.BigtableAdmin.Operations.Delete
-- ** bigtableadmin.operations.get
, module Network.Google.Resource.BigtableAdmin.Operations.Get
-- ** bigtableadmin.operations.projects.operations.list
, module Network.Google.Resource.BigtableAdmin.Operations.Projects.Operations.List
-- ** bigtableadmin.projects.instances.appProfiles.create
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Create
-- ** bigtableadmin.projects.instances.appProfiles.delete
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Delete
-- ** bigtableadmin.projects.instances.appProfiles.get
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Get
-- ** bigtableadmin.projects.instances.appProfiles.list
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.List
-- ** bigtableadmin.projects.instances.appProfiles.patch
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Patch
-- ** bigtableadmin.projects.instances.clusters.backups.create
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Create
-- ** bigtableadmin.projects.instances.clusters.backups.delete
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Delete
-- ** bigtableadmin.projects.instances.clusters.backups.get
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Get
-- ** bigtableadmin.projects.instances.clusters.backups.getIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.GetIAMPolicy
-- ** bigtableadmin.projects.instances.clusters.backups.list
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.List
-- ** bigtableadmin.projects.instances.clusters.backups.patch
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Patch
-- ** bigtableadmin.projects.instances.clusters.backups.setIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.SetIAMPolicy
-- ** bigtableadmin.projects.instances.clusters.backups.testIamPermissions
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.TestIAMPermissions
-- ** bigtableadmin.projects.instances.clusters.create
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Create
-- ** bigtableadmin.projects.instances.clusters.delete
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Delete
-- ** bigtableadmin.projects.instances.clusters.get
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Get
-- ** bigtableadmin.projects.instances.clusters.list
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.List
-- ** bigtableadmin.projects.instances.clusters.partialUpdateCluster
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.PartialUpdateCluster
-- ** bigtableadmin.projects.instances.clusters.update
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Update
-- ** bigtableadmin.projects.instances.create
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Create
-- ** bigtableadmin.projects.instances.delete
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Delete
-- ** bigtableadmin.projects.instances.get
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Get
-- ** bigtableadmin.projects.instances.getIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.GetIAMPolicy
-- ** bigtableadmin.projects.instances.list
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.List
-- ** bigtableadmin.projects.instances.partialUpdateInstance
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.PartialUpdateInstance
-- ** bigtableadmin.projects.instances.setIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.SetIAMPolicy
-- ** bigtableadmin.projects.instances.tables.checkConsistency
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.CheckConsistency
-- ** bigtableadmin.projects.instances.tables.create
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Create
-- ** bigtableadmin.projects.instances.tables.delete
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Delete
-- ** bigtableadmin.projects.instances.tables.dropRowRange
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.DropRowRange
-- ** bigtableadmin.projects.instances.tables.generateConsistencyToken
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.GenerateConsistencyToken
-- ** bigtableadmin.projects.instances.tables.get
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Get
-- ** bigtableadmin.projects.instances.tables.getIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.GetIAMPolicy
-- ** bigtableadmin.projects.instances.tables.list
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.List
-- ** bigtableadmin.projects.instances.tables.modifyColumnFamilies
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.ModifyColumnFamilies
-- ** bigtableadmin.projects.instances.tables.restore
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Restore
-- ** bigtableadmin.projects.instances.tables.setIamPolicy
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.SetIAMPolicy
-- ** bigtableadmin.projects.instances.tables.testIamPermissions
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.TestIAMPermissions
-- ** bigtableadmin.projects.instances.testIamPermissions
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.TestIAMPermissions
-- ** bigtableadmin.projects.instances.update
, module Network.Google.Resource.BigtableAdmin.Projects.Instances.Update
-- ** bigtableadmin.projects.locations.get
, module Network.Google.Resource.BigtableAdmin.Projects.Locations.Get
-- ** bigtableadmin.projects.locations.list
, module Network.Google.Resource.BigtableAdmin.Projects.Locations.List
-- * Types
-- ** SingleClusterRouting
, SingleClusterRouting
, singleClusterRouting
, scrAllowTransactionalWrites
, scrClusterId
-- ** InstanceLabels
, InstanceLabels
, instanceLabels
, ilAddtional
-- ** ListBackupsResponse
, ListBackupsResponse
, listBackupsResponse
, lbrNextPageToken
, lbrBackups
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** CreateInstanceRequest
, CreateInstanceRequest
, createInstanceRequest
, cirParent
, cirInstanceId
, cirClusters
, cirInstance
-- ** AuditConfig
, AuditConfig
, auditConfig
, acService
, acAuditLogConfigs
-- ** GenerateConsistencyTokenRequest
, GenerateConsistencyTokenRequest
, generateConsistencyTokenRequest
-- ** ModifyColumnFamiliesRequest
, ModifyColumnFamiliesRequest
, modifyColumnFamiliesRequest
, mcfrModifications
-- ** Expr
, Expr
, expr
, eLocation
, eExpression
, eTitle
, eDescription
-- ** ListLocationsResponse
, ListLocationsResponse
, listLocationsResponse
, llrNextPageToken
, llrLocations
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** CreateClusterRequest
, CreateClusterRequest
, createClusterRequest
, ccrParent
, ccrCluster
, ccrClusterId
-- ** GetIAMPolicyRequest
, GetIAMPolicyRequest
, getIAMPolicyRequest
, giprOptions
-- ** Cluster
, Cluster
, cluster
, cState
, cDefaultStorageType
, cLocation
, cServeNodes
, cName
, cEncryptionConfig
-- ** Split
, Split
, split
, sKey
-- ** MultiClusterRoutingUseAny
, MultiClusterRoutingUseAny
, multiClusterRoutingUseAny
-- ** ClusterState
, ClusterState
, clusterState
, csReplicationState
, csEncryptionInfo
-- ** Location
, Location
, location
, lName
, lMetadata
, lDisplayName
, lLabels
, lLocationId
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Empty
, Empty
, empty
-- ** ClusterDefaultStorageType
, ClusterDefaultStorageType (..)
-- ** ListAppProFilesResponse
, ListAppProFilesResponse
, listAppProFilesResponse
, lapfrNextPageToken
, lapfrFailedLocations
, lapfrAppProFiles
-- ** OperationProgress
, OperationProgress
, operationProgress
, opStartTime
, opProgressPercent
, opEndTime
-- ** TableClusterStates
, TableClusterStates
, tableClusterStates
, tcsAddtional
-- ** TableColumnFamilies
, TableColumnFamilies
, tableColumnFamilies
, tcfAddtional
-- ** CreateTableRequest
, CreateTableRequest
, createTableRequest
, ctrInitialSplits
, ctrTableId
, ctrTable
-- ** RestoreInfoSourceType
, RestoreInfoSourceType (..)
-- ** CreateClusterMetadata
, CreateClusterMetadata
, createClusterMetadata
, ccmRequestTime
, ccmTables
, ccmOriginalRequest
, ccmFinishTime
-- ** TableProgress
, TableProgress
, tableProgress
, tpState
, tpEstimatedSizeBytes
, tpEstimatedCopiedBytes
-- ** Union
, Union
, union
, uRules
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** CreateClusterMetadataTables
, CreateClusterMetadataTables
, createClusterMetadataTables
, ccmtAddtional
-- ** ProjectsInstancesTablesListView
, ProjectsInstancesTablesListView (..)
-- ** EncryptionInfoEncryptionType
, EncryptionInfoEncryptionType (..)
-- ** UpdateAppProFileMetadata
, UpdateAppProFileMetadata
, updateAppProFileMetadata
-- ** RestoreTableMetadataSourceType
, RestoreTableMetadataSourceType (..)
-- ** GetPolicyOptions
, GetPolicyOptions
, getPolicyOptions
, gpoRequestedPolicyVersion
-- ** Backup
, Backup
, backup
, bSizeBytes
, bState
, bStartTime
, bSourceTable
, bName
, bEndTime
, bExpireTime
, bEncryptionInfo
-- ** UpdateClusterMetadata
, UpdateClusterMetadata
, updateClusterMetadata
, ucmRequestTime
, ucmOriginalRequest
, ucmFinishTime
-- ** ClusterStateReplicationState
, ClusterStateReplicationState (..)
-- ** SetIAMPolicyRequest
, SetIAMPolicyRequest
, setIAMPolicyRequest
, siprUpdateMask
, siprPolicy
-- ** InstanceType
, InstanceType (..)
-- ** FailureTrace
, FailureTrace
, failureTrace
, ftFrames
-- ** CheckConsistencyRequest
, CheckConsistencyRequest
, checkConsistencyRequest
, ccrConsistencyToken
-- ** ListTablesResponse
, ListTablesResponse
, listTablesResponse
, ltrNextPageToken
, ltrTables
-- ** TableProgressState
, TableProgressState (..)
-- ** RestoreTableRequest
, RestoreTableRequest
, restoreTableRequest
, rtrBackup
, rtrTableId
-- ** CreateBackupMetadata
, CreateBackupMetadata
, createBackupMetadata
, cbmStartTime
, cbmSourceTable
, cbmName
, cbmEndTime
-- ** AuditLogConfigLogType
, AuditLogConfigLogType (..)
-- ** PartialUpdateInstanceRequest
, PartialUpdateInstanceRequest
, partialUpdateInstanceRequest
, puirUpdateMask
, puirInstance
-- ** Xgafv
, Xgafv (..)
-- ** TableGranularity
, TableGranularity (..)
-- ** GcRule
, GcRule
, gcRule
, grMaxAge
, grUnion
, grIntersection
, grMaxNumVersions
-- ** TestIAMPermissionsRequest
, TestIAMPermissionsRequest
, testIAMPermissionsRequest
, tiprPermissions
-- ** ClusterType
, ClusterType (..)
-- ** ProjectsInstancesTablesGetView
, ProjectsInstancesTablesGetView (..)
-- ** AppProFile
, AppProFile
, appProFile
, apfSingleClusterRouting
, apfEtag
, apfMultiClusterRoutingUseAny
, apfName
, apfDescription
-- ** Frame
, Frame
, frame
, fWorkflowGuid
, fZoneId
, fTargetName
-- ** CreateInstanceRequestClusters
, CreateInstanceRequestClusters
, createInstanceRequestClusters
, circAddtional
-- ** GenerateConsistencyTokenResponse
, GenerateConsistencyTokenResponse
, generateConsistencyTokenResponse
, gctrConsistencyToken
-- ** EncryptionConfig
, EncryptionConfig
, encryptionConfig
, ecKmsKeyName
-- ** DropRowRangeRequest
, DropRowRangeRequest
, dropRowRangeRequest
, drrrRowKeyPrefix
, drrrDeleteAllDataFromTable
-- ** UpdateInstanceMetadata
, UpdateInstanceMetadata
, updateInstanceMetadata
, uimRequestTime
, uimOriginalRequest
, uimFinishTime
-- ** Intersection
, Intersection
, intersection
, iRules
-- ** ColumnFamily
, ColumnFamily
, columnFamily
, cfGcRule
-- ** TestIAMPermissionsResponse
, TestIAMPermissionsResponse
, testIAMPermissionsResponse
, tiamprPermissions
-- ** ListClustersResponse
, ListClustersResponse
, listClustersResponse
, lcrNextPageToken
, lcrFailedLocations
, lcrClusters
-- ** BackupInfo
, BackupInfo
, backupInfo
, biStartTime
, biSourceTable
, biBackup
, biEndTime
-- ** Policy
, Policy
, policy
, pAuditConfigs
, pEtag
, pVersion
, pBindings
-- ** LocationLabels
, LocationLabels
, locationLabels
, llAddtional
-- ** CreateInstanceMetadata
, CreateInstanceMetadata
, createInstanceMetadata
, cimRequestTime
, cimOriginalRequest
, cimFinishTime
-- ** LocationMetadata
, LocationMetadata
, locationMetadata
, lmAddtional
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** AuditLogConfig
, AuditLogConfig
, auditLogConfig
, alcLogType
, alcExemptedMembers
-- ** ListInstancesResponse
, ListInstancesResponse
, listInstancesResponse
, lirNextPageToken
, lirFailedLocations
, lirInstances
-- ** RestoreTableMetadata
, RestoreTableMetadata
, restoreTableMetadata
, rtmOptimizeTableOperationName
, rtmSourceType
, rtmProgress
, rtmName
, rtmBackupInfo
-- ** CheckConsistencyResponse
, CheckConsistencyResponse
, checkConsistencyResponse
, ccrConsistent
-- ** InstanceState
, InstanceState (..)
-- ** Modification
, Modification
, modification
, mDrop
, mCreate
, mId
, mUpdate
-- ** Table
, Table
, table
, tGranularity
, tName
, tRestoreInfo
, tClusterStates
, tColumnFamilies
-- ** OptimizeRestoredTableMetadata
, OptimizeRestoredTableMetadata
, optimizeRestoredTableMetadata
, ortmProgress
, ortmName
-- ** RestoreInfo
, RestoreInfo
, restoreInfo
, riSourceType
, riBackupInfo
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
-- ** BackupState
, BackupState (..)
-- ** EncryptionInfo
, EncryptionInfo
, encryptionInfo
, eiEncryptionType
, eiKmsKeyVersion
, eiEncryptionStatus
-- ** Binding
, Binding
, binding
, bMembers
, bRole
, bCondition
-- ** Instance
, Instance
, instance'
, iState
, iName
, iDisplayName
, iLabels
, iType
) where
import Network.Google.Prelude
import Network.Google.BigtableAdmin.Types
import Network.Google.Resource.BigtableAdmin.Operations.Cancel
import Network.Google.Resource.BigtableAdmin.Operations.Delete
import Network.Google.Resource.BigtableAdmin.Operations.Get
import Network.Google.Resource.BigtableAdmin.Operations.Projects.Operations.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Create
import Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Delete
import Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Get
import Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.AppProFiles.Patch
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Create
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Delete
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Get
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.GetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.Patch
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.SetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Backups.TestIAMPermissions
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Create
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Delete
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Get
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.PartialUpdateCluster
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Clusters.Update
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Create
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Delete
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Get
import Network.Google.Resource.BigtableAdmin.Projects.Instances.GetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.PartialUpdateInstance
import Network.Google.Resource.BigtableAdmin.Projects.Instances.SetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.CheckConsistency
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Create
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Delete
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.DropRowRange
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.GenerateConsistencyToken
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Get
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.GetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.List
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.ModifyColumnFamilies
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.Restore
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.SetIAMPolicy
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Tables.TestIAMPermissions
import Network.Google.Resource.BigtableAdmin.Projects.Instances.TestIAMPermissions
import Network.Google.Resource.BigtableAdmin.Projects.Instances.Update
import Network.Google.Resource.BigtableAdmin.Projects.Locations.Get
import Network.Google.Resource.BigtableAdmin.Projects.Locations.List
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Bigtable Admin API service.
type BigtableAdminAPI =
OperationsProjectsOperationsListResource :<|>
OperationsGetResource
:<|> OperationsCancelResource
:<|> OperationsDeleteResource
:<|> ProjectsInstancesAppProFilesListResource
:<|> ProjectsInstancesAppProFilesPatchResource
:<|> ProjectsInstancesAppProFilesGetResource
:<|> ProjectsInstancesAppProFilesCreateResource
:<|> ProjectsInstancesAppProFilesDeleteResource
:<|> ProjectsInstancesTablesDropRowRangeResource
:<|> ProjectsInstancesTablesListResource
:<|> ProjectsInstancesTablesRestoreResource
:<|> ProjectsInstancesTablesGetIAMPolicyResource
:<|>
ProjectsInstancesTablesGenerateConsistencyTokenResource
:<|> ProjectsInstancesTablesGetResource
:<|>
ProjectsInstancesTablesModifyColumnFamiliesResource
:<|> ProjectsInstancesTablesCreateResource
:<|> ProjectsInstancesTablesSetIAMPolicyResource
:<|> ProjectsInstancesTablesCheckConsistencyResource
:<|>
ProjectsInstancesTablesTestIAMPermissionsResource
:<|> ProjectsInstancesTablesDeleteResource
:<|> ProjectsInstancesClustersBackupsListResource
:<|>
ProjectsInstancesClustersBackupsGetIAMPolicyResource
:<|> ProjectsInstancesClustersBackupsPatchResource
:<|> ProjectsInstancesClustersBackupsGetResource
:<|> ProjectsInstancesClustersBackupsCreateResource
:<|>
ProjectsInstancesClustersBackupsSetIAMPolicyResource
:<|>
ProjectsInstancesClustersBackupsTestIAMPermissionsResource
:<|> ProjectsInstancesClustersBackupsDeleteResource
:<|> ProjectsInstancesClustersListResource
:<|>
ProjectsInstancesClustersPartialUpdateClusterResource
:<|> ProjectsInstancesClustersGetResource
:<|> ProjectsInstancesClustersCreateResource
:<|> ProjectsInstancesClustersDeleteResource
:<|> ProjectsInstancesClustersUpdateResource
:<|> ProjectsInstancesListResource
:<|> ProjectsInstancesGetIAMPolicyResource
:<|> ProjectsInstancesGetResource
:<|> ProjectsInstancesCreateResource
:<|> ProjectsInstancesSetIAMPolicyResource
:<|> ProjectsInstancesPartialUpdateInstanceResource
:<|> ProjectsInstancesTestIAMPermissionsResource
:<|> ProjectsInstancesDeleteResource
:<|> ProjectsInstancesUpdateResource
:<|> ProjectsLocationsListResource
:<|> ProjectsLocationsGetResource
|
brendanhay/gogol
|
gogol-bigtableadmin/gen/Network/Google/BigtableAdmin.hs
|
mpl-2.0
| 23,906 | 0 | 49 | 4,187 | 2,484 | 1,846 | 638 | 495 | 0 |
{-# LANGUAGE OverloadedStrings, RecordWildCards, DataKinds #-}
module Model.Transcode
( module Model.Transcode.Types
, defaultTranscodeOptions
, transcodeAuth
, lookupTranscode
, lookupActiveTranscodes
, addTranscode
, updateTranscode
, findTranscode
, findMatchingTranscode
, checkAlreadyTranscoded
) where
-- import Database.PostgreSQL.Typed (pgSQL)
--import Database.PostgreSQL.Typed.Query
import Database.PostgreSQL.Typed.Types
-- import qualified Data.ByteString
-- import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BSB
import qualified Data.ByteString.Lazy as BSL
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.String
-- import Database.PostgreSQL.Typed.Query (pgSQL)
import Has
import Service.DB
import Service.Types
import Service.Crypto
import Store.Types
import Store.AV
import Store.Probe
-- import Model.SQL
import Model.Audit
import Model.Id
import Model.Party
import Model.Party.SQL
import Model.Permission.Types
import Model.Segment
import Model.Volume.Types
import Model.Format
import Model.Asset
import Model.Asset.SQL
import Model.AssetRevision.Types
import Model.Transcode.Types
import Model.Volume.SQL
defaultTranscodeOptions :: TranscodeArgs
defaultTranscodeOptions = ["-vf", "pad=iw+mod(iw\\,2):ih+mod(ih\\,2)"]
transcodeAuth :: Transcode -> Secret -> BS.ByteString
transcodeAuth t = signature $ BSL.toStrict $ BSB.toLazyByteString
$ maybe id ((<>) . BSB.byteString) (assetSHA1 $ assetRow $ transcodeOrig t)
$ BSB.int32LE (unId $ transcodeId t)
lookupTranscode :: MonadDB c m => Id Transcode -> m (Maybe Transcode)
lookupTranscode a = do
-- dbQuery1 $(selectQuery selectTranscode "WHERE transcode.asset = ${a}")
let _tenv_a9v0o = unknownPGTypeEnv
mRow <-
dbQuery1
(mapQuery2
((\ _p_a9v0p ->
(BS.concat
[Data.String.fromString
"SELECT transcode.segment,transcode.options,transcode.start,transcode.process,transcode.log,party.id,party.name,party.prename,party.orcid,party.affiliation,party.url,account.email,account.password,authorize_view.site,authorize_view.member,asset.id,asset.format,asset.release,asset.duration,asset.name,asset.sha1,asset.size,orig.id,orig.format,orig.release,orig.duration,orig.name,orig.sha1,orig.size,volume.id,volume.name,volume.body,volume.alias,volume.doi,volume_creation(volume.id) FROM transcode JOIN party JOIN account USING (id) LEFT JOIN authorize_view ON account.id = authorize_view.child AND authorize_view.parent = 0 ON transcode.owner = party.id JOIN asset ON transcode.asset = asset.id JOIN asset AS orig ON transcode.orig = orig.id JOIN volume ON asset.volume = volume.id AND orig.volume = volume.id WHERE transcode.asset = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v0p]))
a)
(\
[_csegment_a9v0q,
_coptions_a9v0r,
_cstart_a9v0s,
_cprocess_a9v0t,
_clog_a9v0u,
_cid_a9v0v,
_cname_a9v0w,
_cprename_a9v0x,
_corcid_a9v0y,
_caffiliation_a9v0z,
_curl_a9v0A,
_cemail_a9v0B,
_cpassword_a9v0C,
_csite_a9v0D,
_cmember_a9v0E,
_cid_a9v0F,
_cformat_a9v0G,
_crelease_a9v0H,
_cduration_a9v0I,
_cname_a9v0J,
_csha1_a9v0K,
_csize_a9v0L,
_cid_a9v0M,
_cformat_a9v0N,
_crelease_a9v0O,
_cduration_a9v0P,
_cname_a9v0Q,
_csha1_a9v0R,
_csize_a9v0S,
_cid_a9v0T,
_cname_a9v0U,
_cbody_a9v0V,
_calias_a9v0W,
_cdoi_a9v0X,
_cvolume_creation_a9v0Y]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_csegment_a9v0q,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_coptions_a9v0r,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cstart_a9v0s,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cprocess_a9v0t,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_clog_a9v0u,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v0v,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v0w,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cprename_a9v0x,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bpchar")
_corcid_a9v0y,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_caffiliation_a9v0z,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_curl_a9v0A,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cemail_a9v0B,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cpassword_a9v0C,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_csite_a9v0D,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_cmember_a9v0E,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v0F,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9v0G,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9v0H,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9v0I,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v0J,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9v0K,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9v0L,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v0M,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9v0N,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9v0O,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9v0P,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v0Q,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9v0R,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9v0S,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v0T,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v0U,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cbody_a9v0V,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_calias_a9v0W,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cdoi_a9v0X,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v0o
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cvolume_creation_a9v0Y)))
pure
(fmap
(\ (vsegment_a9uZh, voptions_a9uZi, vstart_a9uZj, vprocess_a9uZk,
vlog_a9uZl, vid_a9uZm, vname_a9uZn, vprename_a9uZo, vorcid_a9uZp,
vaffiliation_a9uZq, vurl_a9uZr, vemail_a9uZs, vpassword_a9uZt,
vsite_a9uZu, vmember_a9uZv, vid_a9uZw, vformat_a9uZx,
vrelease_a9uZy, vduration_a9uZz, vname_a9uZA, vc_a9uZB,
vsize_a9uZC, vid_a9uZD, vformat_a9uZE, vrelease_a9uZF,
vduration_a9uZG, vname_a9uZH, vc_a9uZI, vsize_a9uZJ, vid_a9uZK,
vname_a9uZL, vbody_a9uZM, valias_a9uZN, vdoi_a9uZO, vc_a9uZP)
-> makeTranscode
(makeOrigTranscode
(($)
(makeTranscodeRow
vsegment_a9uZh
voptions_a9uZi
vstart_a9uZj
vprocess_a9uZk
vlog_a9uZl)
(Model.Party.SQL.makeSiteAuth
(Model.Party.SQL.makeUserAccount
(Model.Party.SQL.makeAccount
(PartyRow
vid_a9uZm
vname_a9uZn
vprename_a9uZo
vorcid_a9uZp
vaffiliation_a9uZq
vurl_a9uZr)
(Account vemail_a9uZs)))
vpassword_a9uZt
(do { cm_a9uZQ <- vsite_a9uZu;
cm_a9uZR <- vmember_a9uZv;
Just
(Model.Permission.Types.Access cm_a9uZQ cm_a9uZR) })))
(Model.Asset.SQL.makeAssetRow
vid_a9uZw
vformat_a9uZx
vrelease_a9uZy
vduration_a9uZz
vname_a9uZA
vc_a9uZB
vsize_a9uZC))
(Model.Asset.SQL.makeAssetRow
vid_a9uZD
vformat_a9uZE
vrelease_a9uZF
vduration_a9uZG
vname_a9uZH
vc_a9uZI
vsize_a9uZJ)
(Model.Volume.SQL.setCreation
(VolumeRow
vid_a9uZK vname_a9uZL vbody_a9uZM valias_a9uZN vdoi_a9uZO)
vc_a9uZP
[]))
mRow)
lookupActiveTranscodes :: MonadDB c m => m [Transcode]
lookupActiveTranscodes = do
-- dbQuery $(selectQuery selectTranscode "WHERE asset.size IS NULL ORDER BY transcode.asset")
let _tenv_a9v38 = unknownPGTypeEnv
rows <-
dbQuery
(mapQuery2
(BS.concat
[Data.String.fromString
"SELECT transcode.segment,transcode.options,transcode.start,transcode.process,transcode.log,party.id,party.name,party.prename,party.orcid,party.affiliation,party.url,account.email,account.password,authorize_view.site,authorize_view.member,asset.id,asset.format,asset.release,asset.duration,asset.name,asset.sha1,asset.size,orig.id,orig.format,orig.release,orig.duration,orig.name,orig.sha1,orig.size,volume.id,volume.name,volume.body,volume.alias,volume.doi,volume_creation(volume.id) FROM transcode JOIN party JOIN account USING (id) LEFT JOIN authorize_view ON account.id = authorize_view.child AND authorize_view.parent = 0 ON transcode.owner = party.id JOIN asset ON transcode.asset = asset.id JOIN asset AS orig ON transcode.orig = orig.id JOIN volume ON asset.volume = volume.id AND orig.volume = volume.id WHERE asset.size IS NULL ORDER BY transcode.asset"])
(\
[_csegment_a9v39,
_coptions_a9v3a,
_cstart_a9v3b,
_cprocess_a9v3c,
_clog_a9v3d,
_cid_a9v3e,
_cname_a9v3f,
_cprename_a9v3g,
_corcid_a9v3h,
_caffiliation_a9v3i,
_curl_a9v3j,
_cemail_a9v3k,
_cpassword_a9v3l,
_csite_a9v3m,
_cmember_a9v3n,
_cid_a9v3o,
_cformat_a9v3p,
_crelease_a9v3q,
_cduration_a9v3r,
_cname_a9v3s,
_csha1_a9v3t,
_csize_a9v3u,
_cid_a9v3v,
_cformat_a9v3w,
_crelease_a9v3x,
_cduration_a9v3y,
_cname_a9v3z,
_csha1_a9v3A,
_csize_a9v3B,
_cid_a9v3C,
_cname_a9v3D,
_cbody_a9v3E,
_calias_a9v3F,
_cdoi_a9v3G,
_cvolume_creation_a9v3H]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_csegment_a9v39,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_coptions_a9v3a,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cstart_a9v3b,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cprocess_a9v3c,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_clog_a9v3d,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v3e,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v3f,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cprename_a9v3g,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bpchar")
_corcid_a9v3h,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_caffiliation_a9v3i,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_curl_a9v3j,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cemail_a9v3k,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cpassword_a9v3l,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_csite_a9v3m,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_cmember_a9v3n,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v3o,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9v3p,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9v3q,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9v3r,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v3s,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9v3t,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9v3u,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v3v,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9v3w,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9v3x,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9v3y,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v3z,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9v3A,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9v3B,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v3C,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v3D,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cbody_a9v3E,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_calias_a9v3F,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cdoi_a9v3G,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v38
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cvolume_creation_a9v3H)))
pure
(fmap
(\ (vsegment_a9v1L, voptions_a9v1M, vstart_a9v1N, vprocess_a9v1O,
vlog_a9v1P, vid_a9v1Q, vname_a9v1R, vprename_a9v1S, vorcid_a9v1T,
vaffiliation_a9v1U, vurl_a9v1V, vemail_a9v1W, vpassword_a9v1X,
vsite_a9v1Y, vmember_a9v1Z, vid_a9v20, vformat_a9v21,
vrelease_a9v22, vduration_a9v23, vname_a9v24, vc_a9v25,
vsize_a9v26, vid_a9v27, vformat_a9v28, vrelease_a9v29,
vduration_a9v2a, vname_a9v2b, vc_a9v2c, vsize_a9v2d, vid_a9v2e,
vname_a9v2f, vbody_a9v2g, valias_a9v2h, vdoi_a9v2i, vc_a9v2j)
-> makeTranscode
(makeOrigTranscode
(($)
(makeTranscodeRow
vsegment_a9v1L
voptions_a9v1M
vstart_a9v1N
vprocess_a9v1O
vlog_a9v1P)
(Model.Party.SQL.makeSiteAuth
(Model.Party.SQL.makeUserAccount
(Model.Party.SQL.makeAccount
(PartyRow
vid_a9v1Q
vname_a9v1R
vprename_a9v1S
vorcid_a9v1T
vaffiliation_a9v1U
vurl_a9v1V)
(Account vemail_a9v1W)))
vpassword_a9v1X
(do { cm_a9v2k <- vsite_a9v1Y;
cm_a9v2l <- vmember_a9v1Z;
Just
(Model.Permission.Types.Access cm_a9v2k cm_a9v2l) })))
(Model.Asset.SQL.makeAssetRow
vid_a9v20
vformat_a9v21
vrelease_a9v22
vduration_a9v23
vname_a9v24
vc_a9v25
vsize_a9v26))
(Model.Asset.SQL.makeAssetRow
vid_a9v27
vformat_a9v28
vrelease_a9v29
vduration_a9v2a
vname_a9v2b
vc_a9v2c
vsize_a9v2d)
(Model.Volume.SQL.setCreation
(VolumeRow
vid_a9v2e vname_a9v2f vbody_a9v2g valias_a9v2h vdoi_a9v2i)
vc_a9v2j
[]))
rows)
minAppend :: Ord a => Maybe a -> Maybe a -> Maybe a
minAppend (Just x) (Just y) = Just $ min x y
minAppend Nothing x = x
minAppend x Nothing = x
addTranscode :: (MonadHas SiteAuth c m, MonadAudit c m, MonadStorage c m) => Asset -> Segment -> TranscodeArgs -> Probe -> m Transcode
addTranscode orig seg@(Segment rng) opts (ProbeAV _ fmt av) = do
own <- peek
a <- addAsset orig
{ assetRow = (assetRow orig)
{assetFormat = fmt
, assetDuration = dur
, assetSHA1 = Nothing
, assetSize = Nothing
}
} Nothing
let _tenv_a9v5h = unknownPGTypeEnv
dbExecute1' -- [pgSQL|INSERT INTO transcode (asset, owner, orig, segment, options) VALUES (${assetId $ assetRow a}, ${partyId $ partyRow $ accountParty $ siteAccount own}, ${assetId $ assetRow orig}, ${seg}, ${map Just opts})|]
(mapQuery2
((\ _p_a9v5i _p_a9v5j _p_a9v5k _p_a9v5l _p_a9v5m ->
(BS.concat
[Data.String.fromString
"INSERT INTO transcode (asset, owner, orig, segment, options) VALUES (",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v5h
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v5i,
Data.String.fromString ", ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v5h
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v5j,
Data.String.fromString ", ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v5h
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v5k,
Data.String.fromString ", ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v5h
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_p_a9v5l,
Data.String.fromString ", ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v5h
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_p_a9v5m,
Data.String.fromString ")"]))
(assetId $ assetRow a)
(partyId $ partyRow $ accountParty $ siteAccount own)
(assetId $ assetRow orig)
seg
(map Just opts))
(\[] -> ()))
let _tenv_a9v7a = unknownPGTypeEnv
_ <- dbExecute1 -- [pgSQL|UPDATE slot_asset SET asset = ${assetId $ assetRow a}, segment = segment(lower(segment) + ${fromMaybe 0 $ lowerBound rng}, COALESCE(lower(segment) + ${upperBound rng}, upper(segment))) WHERE asset = ${assetId $ assetRow orig}|]
(mapQuery2
((\ _p_a9v7b _p_a9v7c _p_a9v7d _p_a9v7e ->
(BS.concat
[Data.String.fromString "UPDATE slot_asset SET asset = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7a
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v7b,
Data.String.fromString ", segment = segment(lower(segment) + ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7a
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_p_a9v7c,
Data.String.fromString ", COALESCE(lower(segment) + ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7a
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_p_a9v7d,
Data.String.fromString ", upper(segment))) WHERE asset = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7a
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v7e]))
(assetId $ assetRow a)
(fromMaybe 0 $ lowerBound rng)
(upperBound rng)
(assetId $ assetRow orig))
(\[] -> ()))
return Transcode
{ transcodeRevision = AssetRevision
{ revisionAsset = a
, revisionOrig = orig
}
, transcodeOwner = own
, transcodeSegment = seg
, transcodeOptions = opts
, transcodeStart = Nothing
, transcodeProcess = Nothing
, transcodeLog = Nothing
}
where
dur = maybe id (flip (-) . max 0) (lowerBound rng) <$>
minAppend (avProbeLength av) (upperBound rng)
addTranscode _ _ _ _ = fail "addTranscode: invalid probe type"
updateTranscode :: MonadDB c m => Transcode -> Maybe TranscodePID -> Maybe String -> m Transcode
updateTranscode tc pid logs = do
let _tenv_a9v7W = unknownPGTypeEnv
r <- dbQuery1 -- [pgSQL|UPDATE transcode SET process = ${pid}, log = COALESCE(COALESCE(log || E'\n', '') || ${logs}, log) WHERE asset = ${assetId $ assetRow $ transcodeAsset tc} AND COALESCE(process, 0) = ${fromMaybe 0 $ transcodeProcess tc} RETURNING log|]
(mapQuery2
((\ _p_a9v7X _p_a9v7Y _p_a9v7Z _p_a9v80 ->
(BS.concat
[Data.String.fromString "UPDATE transcode SET process = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7W
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v7X,
Data.String.fromString
", log = COALESCE(COALESCE(log || E'\\n', '') || ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7W
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_p_a9v7Y,
Data.String.fromString ", log) WHERE asset = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7W
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v7Z,
Data.String.fromString " AND COALESCE(process, 0) = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v7W
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v80,
Data.String.fromString " RETURNING log"]))
pid
logs
(assetId $ assetRow $ transcodeAsset tc)
(fromMaybe 0 $ transcodeProcess tc))
(\[_clog_a9v81]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v7W
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_clog_a9v81)))
return $ maybe tc (\l -> tc
{ transcodeProcess = pid
, transcodeLog = l
}) r
findTranscode :: MonadDB c m => Asset -> Segment -> TranscodeArgs -> m (Maybe Transcode)
findTranscode orig seg opts = do
-- dbQuery1 $ ($ orig) <$> $(selectQuery selectOrigTranscode "WHERE transcode.orig = ${assetId $ assetRow orig} AND transcode.segment = ${seg} AND transcode.options = ${map Just opts} AND asset.volume = ${volumeId $ volumeRow $ assetVolume orig} LIMIT 1")
let _tenv_a9v93 = unknownPGTypeEnv
mRow <- dbQuery1
(mapQuery2
((\ _p_a9v94 _p_a9v97 _p_a9v99 _p_a9v9a ->
(BS.concat
[Data.String.fromString
"SELECT transcode.segment,transcode.options,transcode.start,transcode.process,transcode.log,party.id,party.name,party.prename,party.orcid,party.affiliation,party.url,account.email,account.password,authorize_view.site,authorize_view.member,asset.id,asset.format,asset.release,asset.duration,asset.name,asset.sha1,asset.size FROM transcode JOIN party JOIN account USING (id) LEFT JOIN authorize_view ON account.id = authorize_view.child AND authorize_view.parent = 0 ON transcode.owner = party.id JOIN asset ON transcode.asset = asset.id WHERE transcode.orig = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v94,
Data.String.fromString " AND transcode.segment = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_p_a9v97,
Data.String.fromString " AND transcode.options = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_p_a9v99,
Data.String.fromString " AND asset.volume = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9v9a,
Data.String.fromString " LIMIT 1"]))
(assetId $ assetRow orig)
seg
(map Just opts)
(volumeId $ volumeRow $ assetVolume orig))
(\
[_csegment_a9v9b,
_coptions_a9v9c,
_cstart_a9v9d,
_cprocess_a9v9e,
_clog_a9v9f,
_cid_a9v9g,
_cname_a9v9h,
_cprename_a9v9i,
_corcid_a9v9j,
_caffiliation_a9v9k,
_curl_a9v9l,
_cemail_a9v9m,
_cpassword_a9v9n,
_csite_a9v9o,
_cmember_a9v9p,
_cid_a9v9q,
_cformat_a9v9r,
_crelease_a9v9s,
_cduration_a9v9t,
_cname_a9v9u,
_csha1_a9v9v,
_csize_a9v9w]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_csegment_a9v9b,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_coptions_a9v9c,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cstart_a9v9d,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cprocess_a9v9e,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_clog_a9v9f,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v9g,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v9h,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cprename_a9v9i,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bpchar")
_corcid_a9v9j,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_caffiliation_a9v9k,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_curl_a9v9l,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cemail_a9v9m,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cpassword_a9v9n,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_csite_a9v9o,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_cmember_a9v9p,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9v9q,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9v9r,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9v9s,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9v9t,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9v9u,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9v9v,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9v93
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9v9w)))
pure
(fmap (\mk -> mk orig)
(fmap
(\ (vsegment_a9v8b, voptions_a9v8c, vstart_a9v8d, vprocess_a9v8e,
vlog_a9v8f, vid_a9v8g, vname_a9v8h, vprename_a9v8i, vorcid_a9v8j,
vaffiliation_a9v8k, vurl_a9v8l, vemail_a9v8m, vpassword_a9v8n,
vsite_a9v8o, vmember_a9v8p, vid_a9v8q, vformat_a9v8r,
vrelease_a9v8s, vduration_a9v8t, vname_a9v8u, vc_a9v8v,
vsize_a9v8w)
-> makeOrigTranscode
(($)
(makeTranscodeRow
vsegment_a9v8b
voptions_a9v8c
vstart_a9v8d
vprocess_a9v8e
vlog_a9v8f)
(Model.Party.SQL.makeSiteAuth
(Model.Party.SQL.makeUserAccount
(Model.Party.SQL.makeAccount
(PartyRow
vid_a9v8g
vname_a9v8h
vprename_a9v8i
vorcid_a9v8j
vaffiliation_a9v8k
vurl_a9v8l)
(Account vemail_a9v8m)))
vpassword_a9v8n
(do { cm_a9v8A <- vsite_a9v8o;
cm_a9v8B <- vmember_a9v8p;
Just
(Model.Permission.Types.Access cm_a9v8A cm_a9v8B) })))
(Model.Asset.SQL.makeAssetRow
vid_a9v8q
vformat_a9v8r
vrelease_a9v8s
vduration_a9v8t
vname_a9v8u
vc_a9v8v
vsize_a9v8w))
mRow))
findMatchingTranscode :: MonadDB c m => Transcode -> m (Maybe Transcode)
findMatchingTranscode t@Transcode{..} = do
let _tenv_a9vgl = unknownPGTypeEnv
mRow <- dbQuery1 -- .(selectQuery selectTranscode "WHERE orig.sha1 = ${assetSHA1 $ assetRow $ transcodeOrig t} AND transcode.segment = ${transcodeSegment} AND transcode.options = ${map Just transcodeOptions} AND asset.id < ${assetId $ assetRow $ transcodeAsset t} ORDER BY asset.id LIMIT 1")
(mapQuery2
((\ _p_a9vgm _p_a9vgn _p_a9vgo _p_a9vgp ->
(BS.concat
[Data.String.fromString
"SELECT transcode.segment,transcode.options,transcode.start,transcode.process,transcode.log,party.id,party.name,party.prename,party.orcid,party.affiliation,party.url,account.email,account.password,authorize_view.site,authorize_view.member,asset.id,asset.format,asset.release,asset.duration,asset.name,asset.sha1,asset.size,orig.id,orig.format,orig.release,orig.duration,orig.name,orig.sha1,orig.size,volume.id,volume.name,volume.body,volume.alias,volume.doi,volume_creation(volume.id) FROM transcode JOIN party JOIN account USING (id) LEFT JOIN authorize_view ON account.id = authorize_view.child AND authorize_view.parent = 0 ON transcode.owner = party.id JOIN asset ON transcode.asset = asset.id JOIN asset AS orig ON transcode.orig = orig.id JOIN volume ON asset.volume = volume.id AND orig.volume = volume.id WHERE orig.sha1 = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_p_a9vgm,
Data.String.fromString " AND transcode.segment = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_p_a9vgn,
Data.String.fromString " AND transcode.options = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_p_a9vgo,
Data.String.fromString " AND asset.id < ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_p_a9vgp,
Data.String.fromString " ORDER BY asset.id LIMIT 1"]))
(assetSHA1 $ assetRow $ transcodeOrig t)
transcodeSegment
(map Just transcodeOptions)
(assetId $ assetRow $ transcodeAsset t))
(\
[_csegment_a9vgq,
_coptions_a9vgr,
_cstart_a9vgs,
_cprocess_a9vgt,
_clog_a9vgu,
_cid_a9vgv,
_cname_a9vgw,
_cprename_a9vgx,
_corcid_a9vgy,
_caffiliation_a9vgz,
_curl_a9vgA,
_cemail_a9vgB,
_cpassword_a9vgC,
_csite_a9vgD,
_cmember_a9vgE,
_cid_a9vgF,
_cformat_a9vgG,
_crelease_a9vgH,
_cduration_a9vgI,
_cname_a9vgJ,
_csha1_a9vgK,
_csize_a9vgL,
_cid_a9vgM,
_cformat_a9vgN,
_crelease_a9vgO,
_cduration_a9vgP,
_cname_a9vgQ,
_csha1_a9vgR,
_csize_a9vgS,
_cid_a9vgT,
_cname_a9vgU,
_cbody_a9vgV,
_calias_a9vgW,
_cdoi_a9vgX,
_cvolume_creation_a9vgY]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "segment")
_csegment_a9vgq,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text[]")
_coptions_a9vgr,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cstart_a9vgs,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cprocess_a9vgt,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_clog_a9vgu,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9vgv,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9vgw,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cprename_a9vgx,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bpchar")
_corcid_a9vgy,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_caffiliation_a9vgz,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_curl_a9vgA,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cemail_a9vgB,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cpassword_a9vgC,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_csite_a9vgD,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "permission")
_cmember_a9vgE,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9vgF,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9vgG,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9vgH,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9vgI,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9vgJ,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9vgK,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9vgL,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9vgM,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_cformat_a9vgN,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "release")
_crelease_a9vgO,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "interval")
_cduration_a9vgP,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9vgQ,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_csha1_a9vgR,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bigint")
_csize_a9vgS,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_cid_a9vgT,
Database.PostgreSQL.Typed.Types.pgDecodeColumnNotNull
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cname_a9vgU,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "text")
_cbody_a9vgV,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_calias_a9vgW,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "character varying")
_cdoi_a9vgX,
Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vgl
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "timestamp with time zone")
_cvolume_creation_a9vgY)))
pure
(fmap
(\ (vsegment_a9vaq, voptions_a9var, vstart_a9vas, vprocess_a9vat,
vlog_a9vau, vid_a9vav, vname_a9vaw, vprename_a9vax, vorcid_a9vay,
vaffiliation_a9vaz, vurl_a9vaA, vemail_a9vaB, vpassword_a9vaC,
vsite_a9vaD, vmember_a9vaE, vid_a9vaF, vformat_a9vaG,
vrelease_a9vaH, vduration_a9vaI, vname_a9vaJ, vc_a9vaK,
vsize_a9vaL, vid_a9vaM, vformat_a9vaN, vrelease_a9vaO,
vduration_a9vaP, vname_a9vaQ, vc_a9vaR, vsize_a9vaS, vid_a9vaT,
vname_a9vaU, vbody_a9vaV, valias_a9vaW, vdoi_a9vaX, vc_a9vaY)
-> makeTranscode
(makeOrigTranscode
(($)
(makeTranscodeRow
vsegment_a9vaq
voptions_a9var
vstart_a9vas
vprocess_a9vat
vlog_a9vau)
(Model.Party.SQL.makeSiteAuth
(Model.Party.SQL.makeUserAccount
(Model.Party.SQL.makeAccount
(PartyRow
vid_a9vav
vname_a9vaw
vprename_a9vax
vorcid_a9vay
vaffiliation_a9vaz
vurl_a9vaA)
(Account vemail_a9vaB)))
vpassword_a9vaC
(do { cm_a9vaZ <- vsite_a9vaD;
cm_a9vb0 <- vmember_a9vaE;
Just
(Model.Permission.Types.Access cm_a9vaZ cm_a9vb0) })))
(Model.Asset.SQL.makeAssetRow
vid_a9vaF
vformat_a9vaG
vrelease_a9vaH
vduration_a9vaI
vname_a9vaJ
vc_a9vaK
vsize_a9vaL))
(Model.Asset.SQL.makeAssetRow
vid_a9vaM
vformat_a9vaN
vrelease_a9vaO
vduration_a9vaP
vname_a9vaQ
vc_a9vaR
vsize_a9vaS)
(Model.Volume.SQL.setCreation
(VolumeRow
vid_a9vaT vname_a9vaU vbody_a9vaV valias_a9vaW vdoi_a9vaX)
vc_a9vaY
[]))
mRow)
checkAlreadyTranscoded :: MonadDB c m => Asset -> Probe -> m Bool
checkAlreadyTranscoded Asset{ assetRow = AssetRow { assetFormat = fmt, assetSHA1 = Just sha1 } } ProbeAV{ probeTranscode = tfmt, probeAV = av }
| fmt == tfmt && avProbeCheckFormat fmt av = do
let _tenv_a9vmk = unknownPGTypeEnv
(Just (Just (1 :: Int32)) ==) <$> dbQuery1 -- [pgSQL|SELECT 1 FROM asset WHERE asset.sha1 = ${sha1} AND asset.format = ${formatId fmt} AND asset.duration IS NOT NULL LIMIT 1|]
(mapQuery2
((\ _p_a9vml _p_a9vmm ->
(BS.concat
[Data.String.fromString "SELECT 1 FROM asset WHERE asset.sha1 = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vmk
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "bytea")
_p_a9vml,
Data.String.fromString " AND asset.format = ",
Database.PostgreSQL.Typed.Types.pgEscapeParameter
_tenv_a9vmk
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "smallint")
_p_a9vmm,
Data.String.fromString " AND asset.duration IS NOT NULL LIMIT 1"]))
sha1 (formatId fmt))
(\[_ccolumn_a9vmn]
-> (Database.PostgreSQL.Typed.Types.pgDecodeColumn
_tenv_a9vmk
(Database.PostgreSQL.Typed.Types.PGTypeProxy ::
Database.PostgreSQL.Typed.Types.PGTypeName "integer")
_ccolumn_a9vmn)))
checkAlreadyTranscoded _ _ = return False
|
databrary/databrary
|
src/Model/Transcode.hs
|
agpl-3.0
| 72,130 | 0 | 25 | 29,753 | 8,881 | 5,436 | 3,445 | -1 | -1 |
import Test.Hspec
import Data.List
import Digits
factorial :: Integer -> Integer
factorial 1 = 1
factorial n = n * factorial(n-1)
process n = sum (digits_list (factorial n))
-- Tests + result print
main = hspec $ do
describe "Dummy" $ do
it "dummy test" $ do
True `shouldBe` True
describe "Euler test" $ do
it "Test digits sum for 10!" $ do
(process 10) `shouldBe` 27
describe "Euler actual problem" $ do
it "Test digits sum for 100!" $ do
putStrLn ("res = " ++ show (process 100))
|
orbitgray/ProjectEuler
|
haskell/020.hs
|
lgpl-3.0
| 556 | 0 | 19 | 163 | 192 | 92 | 100 | 17 | 1 |
module Utils where
import qualified Data.ByteString as B
import Data.List
import Data.List.Split
import Data.Word
import Data.Bits
import Data.Maybe
import Data.Char
digits = "0123456789ABCDEF"
-- Utility code
bs2hex :: B.ByteString -> String
bs2hex = concatMap toHex . map fromIntegral . B.unpack
toHex n = (digits !! highByte) : (digits !! lowByte) : [] where
highByte = (n `div` 16)
lowByte = (n `mod` 16)
hex2bs :: String -> B.ByteString
hex2bs = B.pack . map toByte . chunksOf 2 . map hexDigit . removeSpaces where
toByte = int2word . packByte
removeSpaces = filter (not . isSpace)
packByte (h:l:[]) = (h `shiftL` 4) .|. l
packByte as = error $ "Expected two element list, got " ++ show as
hexDigit c = let c' = toUpper c in
maybe (error $ "No parse of " ++ [c] ++ " in toNum")
id
(elemIndex c' digits)
int2word :: Int -> Word8
int2word = toEnum . fromEnum
char2word :: Char -> Word8
char2word = toEnum . fromEnum
word2char :: Word8 -> Char
word2char = toEnum . fromEnum
singleton a = [a]
a # f = f a
runEach = foldl (.) id
|
nsmryan/BIC
|
Utils.hs
|
unlicense
| 1,068 | 0 | 11 | 226 | 411 | 223 | 188 | 33 | 1 |
dupli :: [a] -> [a]
dupli [] = []
dupli (x:xs) = x:x:(dupli xs)
|
alephnil/h99
|
14.hs
|
apache-2.0
| 63 | 0 | 7 | 13 | 57 | 30 | 27 | 3 | 1 |
-- -*- coding: utf-8 -*-
-- Copyright (c) 2010-2014, MIT Probabilistic Computing Project
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE TupleSections #-}
module Haxcat where
import Control.Monad.State.Lazy
import qualified Data.Map as M
import Data.Random.RVar
import Types
import RowSweep
import ColumnSweep
modifyT :: (Monad m) => (a -> m a) -> StateT a m ()
modifyT f = StateT $ liftM ((),) . f
infer :: M.Map ColID (ColumnData a) -> StateT (Crosscat a) RVar ()
infer ds = do
modifyT (col_sweep ds)
modifyT (row_sweep2 ds)
train :: M.Map ColID (ColumnData Double) -> Int -> RVar (Crosscat Double)
train ds k = cc_initialize ds >>= execStateT (replicateM k (infer ds)) where
|
probcomp/haxcat
|
Haxcat.hs
|
apache-2.0
| 1,234 | 0 | 10 | 232 | 250 | 137 | 113 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Codec.Sarsi.SBT where
import Codec.Sarsi (Message (..))
import Codec.Sarsi.Scala
import Data.Attoparsec.Text
import qualified Data.Attoparsec.Text as AttoText
import Data.Text (Text)
import qualified Data.Text as Text
data SBTEvent = CompileStart Text | TaskFinish Bool Text | Throw Message
deriving (Show)
cleaningCursesSBT :: Parser Text
cleaningCursesSBT = choice [silent, empty, keep]
where
silent = " | =>" >> untilLineBreak >> "\n" >> return Text.empty
empty = do
_ <- AttoText.takeWhile1 $ \w -> w == '\n'
return Text.empty
keep = do
content <- (AttoText.takeWhile1 $ \w -> w /= '\n') <* end
return $ content `Text.snoc` '\n'
eventParser :: FilePath -> Parser SBTEvent
eventParser root = choice [compile, finish, Throw <$> messageParser root]
where
compile = do
txt <- string "[info] " *> choice ["Build triggered", "Compiling", "compiling"] *> untilLineBreak <* end
return $ CompileStart txt
finish = do
res <- status <* space
txt <- string "Total time: " *> untilLineBreak <* end
_ <- end
return $ TaskFinish res txt
where
status = choice [string "[success]" *> return True, string "[error]" *> return False]
|
aloiscochard/sarsi
|
src/Codec/Sarsi/SBT.hs
|
apache-2.0
| 1,266 | 0 | 14 | 283 | 401 | 213 | 188 | 30 | 1 |
module FractalFlame.Histogram
( render )
where
import Control.Monad
import Control.Monad.ST
import Data.Monoid
import Data.STRef
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import qualified Data.Vector.Storable.Mutable as MV
import FractalFlame.Camera
import FractalFlame.Color
import FractalFlame.Palette.Types.Palette
import FractalFlame.Point.Types.CartesianPoint
import FractalFlame.Point.Types.Point
import FractalFlame.Types.Base
import FractalFlame.Types.PixelFlame
import FractalFlame.Types.Plottable
import FractalFlame.Types.Size
-- | Plot a list of points on a 2D histogram with color indices for each point,
-- | then apply logarithmic scaling and gamma correction.
render :: Camera -- ^ 'camera'. Specifies the viewport onto which the CartesianPoints are projected.
-> Palette -- ^ 'palette'. Maps floating-point color indices [0,1] to RGBA Color values.
-> FloatChannel -- ^ 'vibrancy'. A floating-point value [0,1] that determines how independently the channels in a given pixel are gamma corrected.
-> FloatChannel -> [Plottable] -> PixelFlame
render camera@(Camera {size = (Size {width, height})}) palette vibrancy gamma plottables =
let mapping = pointToIndex camera
plot = plotWithPalette palette
gammaCorrect = gammaColor vibrancy gamma
maxIx = width * height * nFloatChannels
in
let (amax :: FloatChannel, colors :: SV.Vector FloatChannel) = runST $ do
-- accumulate color values at points that map to each pixel
colors <- MV.new (maxIx - 1) :: ST s (MV.MVector s FloatChannel)
--colors <- newArray (0, maxIx) 0 :: ST s (STUArray s Int FloatChannel)
-- need to find the maximum alpha to use for scaling all other alpha values
amax <- newSTRef floatChannelMin
forM_ plottables (\(Plottable point colorIx) -> do
let ix = mapping point
color <- readColor colors ix
let ncolor@(Color r g b a) = plot colorIx color
acc <- readSTRef amax
when (a > acc) $ writeSTRef amax a
writeColor colors ix ncolor)
amax' <- readSTRef amax
-- logarithmic scaling and gamma correction pass on each pixel
-- TODO: I think it should be scaleColor, then gammaCorrect. Refactor those functions to not compute brightness twice (or dereference the same thunk twice or whatever) and verify that result is [0,1]
colorMap width height (gammaCorrect . scaleColor amax') colors
colors' <- SV.unsafeFreeze colors
return (amax', colors')
-- switch to Repa here?, seems like the U (unboxed vectors) representation is appropriate
-- scale logarithmically, apply gamma correction, and write out a pixel for each summed color in the histogram
-- switch to Repa to get automatic parallelization here
--pixels = fmap (scaleColor amax . gammaCorrect amax) colors
pixels = colors
in PixelFlame (Size width height) pixels
-- helpers
plotWithPalette :: Palette -> Coord -> Color -> Color
plotWithPalette palette colorIx color =
mappend color $ palette colorIx
pointToIndex :: Camera -> CartesianPoint -> Int
pointToIndex camera@(Camera {size = (Size {width, height})}) point =
let (Point px py) = project camera point
in
nFloatChannels * (px + py * width)
-- | Read a FractalFlame.Types.Color from the specified vector at index ix
readColor vec ix = do
r <- MV.unsafeRead vec ix
g <- MV.unsafeRead vec (ix + 1)
b <- MV.unsafeRead vec (ix + 2)
a <- MV.unsafeRead vec (ix + 3)
return $ Color r g b a
-- | Write a FractalFlame.Types.Color to the specified vector at index ix
writeColor vec ix (Color r g b a) = do
mapM_ (uncurry $ MV.unsafeWrite vec)
[(ix , r)
,(ix + 1, g)
,(ix + 2, b)
,(ix + 3, a)
]
return ()
-- | Iterate over vec as if it were a vector of FractalFlame.Types.Color
colorVecEach width height f vec = do
let g = f vec
forM_ [0,nFloatChannels..nFloatChannels*width*height-1] $ flip (>>=) g . return
-- | Map f over vec as if it were a vector of FractalFlame.Types.Color
colorMap width height f vec =
let f' vec ix = do
color <- readColor vec ix
writeColor vec ix (f color)
in
colorVecEach width height f' vec
|
anthezium/fractal_flame_renderer_haskell
|
FractalFlame/Histogram.hs
|
bsd-2-clause
| 4,686 | 0 | 24 | 1,369 | 991 | 516 | 475 | -1 | -1 |
module Main
( main
) where
import Spec.StripExtensions
import Parse.Spec
import Write.Spec
import System.IO(hPutStr, stderr)
import System.Exit
main :: IO ()
main = do specString <- getContents
specMay <- parseSpec specString
case specMay of
Nothing -> do hPutStr stderr "Failed to parse spec"
exitFailure
Just spec -> let strippedSpec = stripWSIExtensions spec
in writeSpecModules "out" strippedSpec
|
oldmanmike/vulkan
|
generate/src/Main.hs
|
bsd-3-clause
| 505 | 0 | 14 | 160 | 126 | 63 | 63 | 15 | 2 |
import Data.List
import Data.List.Split
import Data.Char
import Debug.Trace
triangles = [(n / 2) * (n + 1) | n <- [1..]]
codedTriangleWords ws = length [w | w <- ws, elem (fromIntegral (score w)) (take 18 triangles)]
where score w = sum $ map (subtract 64 . ord) w
main = do
f <- readFile "042.txt"
print . codedTriangleWords $ map (filter isLetter) (splitOn "," f)
|
JacksonGariety/euler.hs
|
042.hs
|
bsd-3-clause
| 375 | 0 | 12 | 75 | 184 | 94 | 90 | 10 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Test where
import Language.Haskell.TH.SCCs
data E a = Nil | ECons a (Odd a)
data O a = OCons a (E a)
type Odd = O
data X = X (E Int)
printQ (Just "E: ") (binding_group ''E)
printQ (Just "O: ") (binding_group ''O)
printQ (Just "O!: ") (scc ''O)
printQ (Just "Odd: ") (binding_group ''Odd)
printQ (Just "X: ") (binding_group ''X)
printQ (Just "E/Xs: ") (binding_groups [''E, ''X])
printQ (Just "E/X: ") (sccs [''E, ''X])
printQ (Just "String: ") (binding_groups [''String])
printQ (Just "String!: ") (scc ''String)
printQ (Just "String!s: ") (sccs [''String])
printQ (Just "String/Char: ") (binding_groups [''String, ''Char])
printQ (Just "String/Char!s: ") (sccs [''String, ''Char])
|
nfrisby/th-sccs
|
Test.hs
|
bsd-3-clause
| 734 | 0 | 9 | 125 | 374 | 187 | 187 | 19 | 0 |
module Main where
import Control.Monad.Reader (runReaderT)
import Data.IORef (newIORef)
import Data.Types (bufLen)
import qualified Data.Vector.Storable.Mutable as VM (replicate)
import Engine
import Linear.V3
import Render
import Render.Camera
import Render.Engine
import Render.Mesh
main :: IO ()
main = do
camera <- Camera <$> newIORef (V3 0 0 10)
<*> newIORef (V3 0 0 0)
mesh <- Mesh <$> return "Cube"
<*> newIORef [ V3 (-1) 1 1
, V3 1 1 1
, V3 (-1) (-1) 1
, V3 1 (-1) 1
, V3 (-1) 1 (-1)
, V3 1 1 (-1)
, V3 1 (-1) (-1)
, V3 (-1) (-1) (-1)
]
<*> newIORef [ V3 0 1 2
, V3 1 2 3
, V3 1 3 6
, V3 1 5 6
, V3 0 1 4
, V3 1 4 5
, V3 2 3 7
, V3 3 6 7
, V3 0 2 7
, V3 0 4 7
, V3 4 5 6
, V3 4 6 7
]
<*> newIORef (V3 0 0 0)
<*> newIORef (V3 0 0 0)
buf <- VM.replicate bufLen 0
initState <- EngineState <$> newIORef camera
<*> newIORef [mesh]
<*> return buf
runReaderT (start engine) initState
|
Lucsanszky/soft-engine
|
app/Main.hs
|
bsd-3-clause
| 1,621 | 0 | 16 | 933 | 513 | 266 | 247 | 43 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
-- | Preprocessing collecting names, data types, newtypes, imports, and exports
-- for all modules recursively.
module Fay.Compiler.InitialPass
(initialPass
) where
import Fay.Compiler.Prelude
import Fay.Compiler.Desugar
import Fay.Compiler.GADT
import Fay.Compiler.Import
import Fay.Compiler.Misc
import Fay.Compiler.Parse
import qualified Fay.Exts as F
import Fay.Exts.NoAnnotation (unAnn)
import Fay.Types
import Control.Monad.Except
import Control.Monad.RWS
import qualified Data.Map as M
import Language.Haskell.Exts.Annotated hiding (name, var)
import qualified Language.Haskell.Names as HN (getInterfaces)
-- | Preprocess and collect all information needed during code generation.
initialPass :: FilePath -> Compile ()
initialPass = startCompile preprocessFileWithSource
-- | Preprocess a module given its filepath and content.
preprocessFileWithSource :: FilePath -> String -> Compile ()
preprocessFileWithSource filepath contents = do
(_,st,_) <- compileWith filepath preprocessAST preprocessFileWithSource desugar contents
-- This is the state we want to keep
modify $ \s -> s { stateRecords = stateRecords st
, stateRecordTypes = stateRecordTypes st
, stateImported = stateImported st
, stateNewtypes = stateNewtypes st
, stateInterfaces = stateInterfaces st
, stateTypeSigs = stateTypeSigs st
-- TODO This needs to be added otherwise the
-- "executable" generation in Fay.hs gets the
-- wrong name. Not sure why it works to do it
-- here!
, stateModuleName = stateModuleName st
}
-- | Preprocess from an AST
preprocessAST :: () -> F.Module -> Compile ()
preprocessAST () mod@(Module _ _ _ _ decls) = do
-- This can only return one element since we only compile one module.
([exports],_) <- HN.getInterfaces Haskell2010 defaultExtensions [mod]
modify $ \s -> s { stateInterfaces = M.insert (stateModuleName s) exports $ stateInterfaces s }
forM_ decls scanTypeSigs
forM_ decls scanRecordDecls
ifOptimizeNewtypes
(forM_ decls scanNewtypeDecls)
(return ())
preprocessAST () mod = throwError $ UnsupportedModuleSyntax "preprocessAST" mod
--------------------------------------------------------------------------------
-- | Preprocessing
-- | Find newtype declarations
scanNewtypeDecls :: F.Decl -> Compile ()
scanNewtypeDecls (DataDecl _ NewType{} _ _ constructors _) = compileNewtypeDecl constructors
scanNewtypeDecls _ = return ()
-- | Add new types to the state
compileNewtypeDecl :: [F.QualConDecl] -> Compile ()
compileNewtypeDecl [QualConDecl _ _ _ condecl] = case condecl of
-- newtype declaration without destructor
ConDecl _ name [ty] -> addNewtype name Nothing ty
RecDecl _ cname [FieldDecl _ [dname] ty] -> addNewtype cname (Just dname) ty
x -> error $ "compileNewtypeDecl case: Should be impossible (this is a bug). Got: " ++ show x
where
addNewtype cname dname ty = do
qcname <- qualify cname
qdname <- case dname of
Nothing -> return Nothing
Just n -> Just <$> qualify n
modify (\cs@CompileState{stateNewtypes=nts} ->
cs{stateNewtypes=(qcname,qdname,unAnn ty):nts})
compileNewtypeDecl q = error $ "compileNewtypeDecl: Should be impossible (this is a bug). Got: " ++ show q
-- | Add record declarations to the state
scanRecordDecls :: F.Decl -> Compile ()
scanRecordDecls decl = do
case decl of
DataDecl _loc ty _ctx (F.declHeadName -> name) qualcondecls _deriv -> do
let addIt = let ns = for qualcondecls (\(QualConDecl _loc' _tyvarbinds _ctx' condecl) -> conDeclName condecl)
in addRecordTypeState name ns
case ty of
DataType{} -> addIt
NewType{} -> ifOptimizeNewtypes
(return ())
addIt
_ -> return ()
case decl of
DataDecl _ ty _ _ constructors _ ->
case ty of
DataType{} -> dataDecl constructors
NewType{} -> ifOptimizeNewtypes
(return ())
(dataDecl constructors)
GDataDecl _ ty _ _ _ decls _ ->
case ty of
DataType{} -> dataDecl (map convertGADT decls)
NewType{} -> ifOptimizeNewtypes
(return ())
(dataDecl (map convertGADT decls))
_ -> return ()
where
addRecordTypeState (unAnn -> name') (map unAnn -> cons') = do
name <- qualify name'
cons <- mapM qualify cons'
modify $ \s -> s { stateRecordTypes = (name, cons) : stateRecordTypes s }
conDeclName (ConDecl _ n _) = n
conDeclName (InfixConDecl _ _ n _) = n
conDeclName (RecDecl _ n _) = n
-- | Collect record definitions and store record name and field names.
-- A ConDecl will have fields named slot1..slotN
dataDecl :: [F.QualConDecl] -> Compile ()
dataDecl constructors =
forM_ constructors $ \(QualConDecl _ _ _ condecl) ->
case condecl of
ConDecl _ name types -> do
let fields = map (Ident () . ("slot"++) . show . fst) . zip [1 :: Integer ..] $ types
addRecordState name fields
InfixConDecl _ _t1 name _t2 ->
addRecordState name [F.mkIdent "slot1", F.mkIdent "slot2"]
RecDecl _ name fields' -> do
let fields = concatMap F.fieldDeclNames fields'
addRecordState name fields
where
addRecordState :: Name a -> [Name b] -> Compile ()
addRecordState name' fields = do
name <- qualify name'
modify $ \s -> s
{ stateRecords = (name,map unAnn fields) : stateRecords s }
scanTypeSigs :: F.Decl -> Compile ()
scanTypeSigs decl = case decl of
TypeSig _ names typ -> mapM_ (`addTypeSig` typ) names
_ -> return ()
where
addTypeSig :: F.Name -> F.Type -> Compile ()
addTypeSig (unAnn -> n') (unAnn -> t) = do
n <- qualify n'
modify $ \s -> s { stateTypeSigs = M.insert n t (stateTypeSigs s) }
|
beni55/fay
|
src/Fay/Compiler/InitialPass.hs
|
bsd-3-clause
| 6,424 | 21 | 35 | 1,870 | 1,715 | 874 | 841 | 115 | 11 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PixeloSolver.AI.OCR(
PixeloBoard(..)
, pixeloBoardGetTileWidth
, pixeloBoardGetWidth
, pixeloBoardGetHeight
, TileDistanceTolerance
, MinimalTileLength
, FindBoardConstants
, EmptyStripeSpaceTolerance
, NumberTolerances(..)
, OCRConstants(..)
, findPixeloBoard
, screenshotToPixeloGame
, screenshotToPixeloGameAuto
) where
import Control.Applicative
import Control.Monad.Reader
import Data.List
import Data.Maybe
import Text.Parsec() -- in 3.1.3 instance Stream [tok] m tok is here
import Text.Parsec.Prim hiding (many, (<|>))
import Text.Parsec.Pos
import Text.Parsec.Combinator
import PixeloSolver.Data.Graphics
import PixeloSolver.Game.Nonogram
import System.IO (hFlush, stdout)
safeHead :: [a] -> Maybe a
safeHead [] = Nothing
safeHead (a:_) = Just a
-- | Distance tolerance between board's white tile. Two white groups of pixels
-- that lie withing that distance are considered to potentially form the game's
-- board.
type TileDistanceTolerance = Int
-- | Minimal length at which a group of white pixels is considered to be a
-- potential tile.
type MinimalTileLength = Int
type FindBoardConstants = (TileDistanceTolerance, MinimalTileLength)
-- | Number of horizontal or vertical rows without a black pixel in hint strip
-- after which the strip is considered to end. Should be large enough to cover
-- the gap between the first hint and the board.
type EmptyStripeSpaceTolerance = Int
-- | Tolerances used for determining whether two digts form the same number.
data NumberTolerances = NT {
ntGetMaxNumberWidth :: Int -- ^ Maximum width of a number
, ntGetMaxInterDigitSpace :: Int -- ^ Maximum distance between two digits in
-- the same number
}
data OCRConstants = OCRConstants {
ocrGetEmptyStripeSpaceTolerance :: EmptyStripeSpaceTolerance
, ocrNumberTolerances :: NumberTolerances
}
data PixeloBoard = PixeloBoard {
pixeloBoardGetRows :: [(Int, Int)]
, pixeloBoardGetColumns :: [(Int, Int)]
} deriving (Eq, Show)
pixeloBoardGetWidth :: PixeloBoard -> Int
pixeloBoardGetWidth = length . pixeloBoardGetColumns
pixeloBoardGetHeight :: PixeloBoard -> Int
pixeloBoardGetHeight = length . pixeloBoardGetRows
pixeloBoardGetTileWidth :: PixeloBoard -> Int
pixeloBoardGetTileWidth pixeloBoard = rE - rB
where (rB, rE) = head . pixeloBoardGetRows $ pixeloBoard
pixeloBoardGetFirstRowY :: PixeloBoard -> Int
pixeloBoardGetFirstRowY = fst . head . pixeloBoardGetRows
pixeloBoardGetFirstColX :: PixeloBoard -> Int
pixeloBoardGetFirstColX = fst . head . pixeloBoardGetColumns
-- | Extracts hint information from the screenshot and uses OCR to transform it
-- into a PixeloGame object. Uses human suggestion in case a number can't be
-- recognized.
screenshotToPixeloGame :: (Map m r RGB)
=> OCRConstants
-> m RGB
-> PixeloBoard
-> IO PixeloGame
screenshotToPixeloGame = screenshotToPixeloGame' specPicsToHintManual
-- | Extracts hint information from the screenshot and uses OCR to transform it
-- into a PixeloGame object. In case of recognition failure it returns Nothing.
screenshotToPixeloGameAuto :: (Map m r RGB)
=> OCRConstants
-> m RGB
-> PixeloBoard
-> Maybe PixeloGame
screenshotToPixeloGameAuto = screenshotToPixeloGame' specPicsToHintAuto
screenshotToPixeloGame' :: (Map m r RGB, Monad m')
=> (forall e . (BlackCheckable e, Map m r e) => [[m e]] -> m' [Int])
-> OCRConstants
-> m RGB
-> PixeloBoard
-> m' PixeloGame
screenshotToPixeloGame'
specPicsToHint
(OCRConstants emptyStripeSpaceTolerance numberTolerances)
colorMap
pixeloBoard =
do
rowHints <- sequence (map specPicsToHint rowHintPics)
colHints <- sequence (map specPicsToHint colHintPics)
let
rowHints' = map (\l -> case l of { [] -> [0]; _ -> l; }) rowHints
colHints' = map (\l -> case l of { [] -> [0]; _ -> l; }) colHints
return $ PixeloGame (emptyGameBoard height width) rowHints' colHints'
where
columnHintsStrips = getColumnHints emptyStripeSpaceTolerance colorMap
pixeloBoard -- :: [[m RGB]]
colHintPics = map
(map (map (trimNonblack . snd) . splitBlackPatchesByColumn))
columnHintsStrips
rowHintPics = map (mergeHints numberTolerances)
$ getRowHints emptyStripeSpaceTolerance colorMap pixeloBoard
(height, width) = (length rowHintPics, length colHintPics)
groupNeighbours :: (a -> a -> Bool) -> [a] -> [[a]]
groupNeighbours check as = groupNeighbours' [] check as
where
groupNeighbours' :: [a] -> (a -> a -> Bool) -> [a] -> [[a]]
groupNeighbours' [] _ [] = []
groupNeighbours' acc _ [] = [reverse acc]
groupNeighbours' [] checkFun (x : xs) = groupNeighbours' [x] checkFun xs
groupNeighbours' (accHead : acc) checkFun (x : xs) =
if checkFun accHead x
then groupNeighbours' (x : (accHead : acc)) checkFun xs
else reverse (accHead : acc) : (groupNeighbours' [] checkFun (x : xs))
neighbourGroupToRanges :: [[(Int, a)]] -> [(Int, Int)]
neighbourGroupToRanges [] = []
neighbourGroupToRanges (ns : nss) = (fst . head $ ns, fst . last $ ns) :
(neighbourGroupToRanges nss)
groupSimilarRanges :: Int -> [(Int, Int)] -> [[(Int, Int)]]
groupSimilarRanges tolerance = groupNeighbours
(\(beg0, end0) (beg1, end1) ->
(abs ((end1 - beg1) - (end0 - beg0)) <= tolerance) &&
(beg1 - end0 - 1 <= tolerance))
findWhitePatches :: [(Int, RGB)] -> [(Int, Int)]
findWhitePatches row =
let
whites = filter (\(_, c) -> isWhite c) row
neighbourRanges = neighbourGroupToRanges $ groupNeighbours
(\(i0, _) (i1, _) -> i1 - i0 == 1)
whites
in
neighbourRanges
getPixeloBoardRow :: FindBoardConstants -> [(Int, RGB)] -> Maybe [(Int, Int)]
getPixeloBoardRow (distanceTolerance, minimalTileLength) row =
let
whitePatches = findWhitePatches row
whitePatchesOfSimilarLength = groupSimilarRanges
distanceTolerance
(filter ((>= minimalTileLength) . (\(a, b) -> b - a + 1)) whitePatches)
potentialRows = filter ((>= 5) . length) whitePatchesOfSimilarLength
in
safeHead potentialRows
findPixeloBoardRow :: FindBoardConstants -> [[(Int, RGB)]] -> Maybe [(Int, Int)]
findPixeloBoardRow constants = safeHead
. catMaybes
. map (getPixeloBoardRow constants)
-- | Find white tiles representing pixelo board on screen image.
findPixeloBoard :: Map m r RGB
=> FindBoardConstants
-> m RGB
-> Maybe PixeloBoard
findPixeloBoard constants colorMap = do
let (height, width) = mapGetSize colorMap
let rows = map rowAssocs $ map (\i -> mapGetRow i colorMap) (reverse [0..height])
let columns = map rowAssocs $ map (\i -> mapGetColumn i colorMap) (reverse [0..width])
boardColumn <- findPixeloBoardRow constants rows
boardRow <- findPixeloBoardRow constants columns
return $ PixeloBoard boardRow boardColumn
splitBlackPatchesByColumn :: (BlackCheckable e, Map m r e)
=> m e
-> [((Int, Int), m e)]
splitBlackPatchesByColumn = splitBlackPatchesByColumn' 0
splitBlackPatchesByColumn' :: (BlackCheckable e, Map m r e)
=> Int
-> m e
-> [((Int, Int), m e)]
splitBlackPatchesByColumn' shift strip =
case maybeBegOfFirstPatch of
Just (begColumn, _) -> ((shift + begColumn, shift + endColumn),
submap ((0, begColumn), (height, endColumn)) strip)
: splitBlackPatchesByColumn'
(shift + endColumn + 1)
(submap ((0, endColumn + 1), (height, width)) strip)
Nothing -> []
where
(height, width) = mapGetSize strip
idxColumns = zip [0..width]
(map (\i -> mapGetColumn i strip) [0..width])
(maybeBegOfFirstPatch, columnsTail) =
findFirstWithTail (any isBlack . rowElems . snd) idxColumns
maybeEndOfFirstPatch = findFirst (not . any isBlack . rowElems . snd)
columnsTail
endColumn = fromMaybe width . fmap fst $ maybeEndOfFirstPatch
splitBlackPatchesByRow :: (BlackCheckable e, Map m r e)
=> m e
-> [m e]
splitBlackPatchesByRow strip =
case maybeBegOfFirstPatch of
Just (begRow, _) -> submap ((begRow, 0), (endRow, width)) strip
: splitBlackPatchesByRow
(submap ((endRow + 1, 0), (height, width)) strip)
Nothing -> []
where
(height, width) = mapGetSize strip
idxRows = zip [0..height]
(map (\i -> mapGetRow i strip) [0..height])
(maybeBegOfFirstPatch, rowsTail) =
findFirstWithTail (any isBlack . rowElems . snd) idxRows
maybeEndOfFirstPatch = findFirst (not . any isBlack . rowElems . snd)
rowsTail
endRow = fromMaybe height . fmap fst $ maybeEndOfFirstPatch
findFirst :: (a -> Bool) -> [a] -> Maybe a
findFirst predicate as = safeHead . filter predicate $ as
findFirstWithTail :: (a -> Bool) -> [a] -> (Maybe a, [a])
findFirstWithTail _ [] = (Nothing, [])
findFirstWithTail predicate (a : as) =
if predicate a
then (Just a, as)
else findFirstWithTail predicate as
findLast :: (a -> Bool) -> [a] -> Maybe a
findLast predicate as =
case first of
Nothing -> Nothing
_ ->
case second of
Nothing -> first
Just a -> findLast predicate (a : foundTail')
where
(first, foundTail) = findFirstWithTail predicate as
(second, foundTail') = findFirstWithTail predicate foundTail
-- | trims all edge rows that do not contain a black pixel
trimNonblack :: (BlackCheckable e, Map m r e) => m e -> m e
trimNonblack bwMap =
case maybeTrimmed of
Just finalBWMap -> finalBWMap
Nothing -> submap ((h + 1, w + 1), (h, w)) bwMap
where
(h, w) = mapGetSize bwMap
rowsWithIdx = map (\i -> (i, mapGetRow i bwMap)) [0..h]
colsWithIdx = map (\i -> (i, mapGetColumn i bwMap)) [0..w]
firstRow = fmap fst
. findFirst (\(_, c) -> any isBlack (rowElems c))
$ rowsWithIdx
lastRow = fmap fst
. findLast (\(_, c) -> any isBlack (rowElems c))
$ rowsWithIdx
firstCol = fmap fst
. findFirst (\(_, c) -> any isBlack (rowElems c))
$ colsWithIdx
lastCol = fmap fst
. findLast (\(_, c) -> any isBlack (rowElems c))
$ colsWithIdx
maybeTrimmed = do
fR <- firstRow
lR <- lastRow
fC <- firstCol
lC <- lastCol
return $ submap ((fR, fC), (lR, lC)) bwMap
getColumnHints :: (BlackCheckable e, Map m r e)
=> EmptyStripeSpaceTolerance
-> m e
-> PixeloBoard
-> [[m e]]
getColumnHints tolerance screenshot board =
map (map trimNonblack . splitBlackPatchesByRow)
. map (trimColumnHintsStrip tolerance)
$ cutColumnHintsStrips screenshot board
getRowHints :: (BlackCheckable e, Map m r e)
=> EmptyStripeSpaceTolerance
-> m e
-> PixeloBoard
-> [[((Int, Int), m e)]]
getRowHints tolerance screenshot board =
map (map (fmap trimNonblack) . splitBlackPatchesByColumn)
. map (trimRowHintsStrip tolerance)
$ cutRowHintsStrips screenshot board
-- | Given dimenstions and images of hints it merges hints that might represent
-- the same number based on NumberTolerances
mergeHints :: NumberTolerances -> [((Int, Int), a)] -> [[a]]
mergeHints _ [] = []
mergeHints _ (x : []) = [[snd x]]
mergeHints t (x : y : xs) =
if joinedWidth <= ntGetMaxNumberWidth t
&& midDistances <= ntGetMaxInterDigitSpace t
then [snd x, snd y] : mergeHints t xs
else [snd x] : mergeHints t (y : xs)
where
(yBegFst, yEndFst) = fst x
(yBegSnd, yEndSnd) = fst y
joinedWidth = yEndSnd - yBegFst
midDistances = (-)
((yBegSnd + yEndSnd) `div` 2)
((yBegFst + yEndFst) `div` 2)
hintPicsToIntManual :: (BlackCheckable e, Map m r e) => [m e] -> IO Int
hintPicsToIntManual digitImages =
case recognizeNumber digitImages of
Just num -> return num
_ -> do
digits <- mapM (\image -> do
putStr . prettyPrintBWMap . mapToBWMap $ image
putStr "Write number represented by the above bitmap: "
hFlush stdout
num <- getLine
return $ read num) digitImages
return $ foldl (\a b -> 10 * a + b) 0 digits
specPicsToHintAuto :: (BlackCheckable e, Map m r e) => [[m e]] -> Maybe [Int]
specPicsToHintAuto [] = return []
specPicsToHintAuto (p : ps) = do
spec <- recognizeNumber p
restOfHint <- specPicsToHintAuto ps
return (spec : restOfHint)
specPicsToHintManual :: (BlackCheckable e, Map m r e) => [[m e]] -> IO [Int]
specPicsToHintManual [] = return []
specPicsToHintManual (p : ps) = do
spec <- hintPicsToIntManual p
restOfHint <- specPicsToHintManual ps
return (spec : restOfHint)
-- TODO Blog Above we have a IO dependence that pipes/conduit solves
-- Strip extraction functions
-- | Cut submaps of a screenshot which are aligned with board's columns and
-- start at the vertical beginning of the screenshot and end on first row.
cutColumnHintsStrips :: (Map m r e) => m e -> PixeloBoard -> [m e]
cutColumnHintsStrips m b = map (cutColumnHintsStrip m)
. pixeloBoardGetColumns $ b
where
yEnd = pixeloBoardGetFirstRowY b
cutColumnHintsStrip :: (Map m r e) => m e -> (Int, Int) -> m e
cutColumnHintsStrip strip (xBeg, xEnd) = submap
((0, xBeg), (yEnd, xEnd))
strip
-- | Cut submaps of a screenshot which are aligned with board's rows and start
-- at the horizontal beginning of the screenshot and end on first column.
cutRowHintsStrips :: (Map m r e) => m e -> PixeloBoard -> [m e]
cutRowHintsStrips m b = map (cutRowHintsStrip m)
. pixeloBoardGetRows $ b
where
xEnd = pixeloBoardGetFirstColX b
cutRowHintsStrip :: (Map m r e) => m e -> (Int, Int) -> m e
cutRowHintsStrip strip (yBeg, yEnd) = submap
((yBeg, 0), (yEnd, xEnd))
strip
-- | Trims column hints strip. Starting from bottom of the strip it searches for
-- N consecutive rows that do not have black pixels in them. Then it cuts the
-- top of the strip at that point. N is also called empty space tolerance.
trimColumnHintsStrip ::
(BlackCheckable e, Map m r e)
=> Int -- ^ empty space tolerance.
-> m e -- ^ hint strip
-> m e
trimColumnHintsStrip t m = submap ((lastHintRow, 0), (height, width)) m
where
(height, width) = mapGetSize $ m
lastHintRow = (height + 1 -)
$ findLastHintDim mapGetRow t m (reverse [0..height])
-- | Row version of trimColumnHintsStrip
trimRowHintsStrip ::
(BlackCheckable e, Map m r e)
=> Int -- ^ empty space tolerance.
-> m e
-> m e
trimRowHintsStrip t m = submap ((0, lastHintColumn), (height, width)) m
where
(height, width) = mapGetSize $ m
lastHintColumn = (width + 1 -)
$ findLastHintDim mapGetColumn t m (reverse [0..width])
hasRowBlack :: (BlackCheckable e, Row r e) => r e -> Bool
hasRowBlack = any (isBlack) . rowElems
-- Finds last row in a hint strip such that it contains a black pixel and up to
-- (tolerance + 1) next rows do not. Row here means either horizontal or
-- vertical row.
findLastHintDim ::
(BlackCheckable e, Map m r e)
=> ((BlackCheckable e, Map m r e) => Int -> m e -> r e) -- ^ get row function
-> Int -- ^ tolerance parameter
-> m e -- ^ hint strip
-> [Int] -- ^ row indexes to search over
-> Int
findLastHintDim getDim tolerance m searchOrder =
length
. concat
. takeWhile (\bs -> head bs == True || length bs <= tolerance)
. group
. map (\i -> hasRowBlack . getDim i $ m)
$ searchOrder
-- Number OCR
type BlackGroups = [(Int, Int)]
type RowWidth = Int
type DigitRecognizer = ParsecT [BlackGroups] BlackGroups (Reader RowWidth)
blackGroupToken :: (BlackGroups -> Bool)
-> DigitRecognizer ()
blackGroupToken predicate = tokenPrim show (\s _ _ -> incSourceColumn s 1)
(\t -> if predicate t then return t else fail "")
>>= putState
ellipse :: DigitRecognizer [()]
ellipse = many1 ellipseBeg >> many1 ellipseMid >> many1 ellipseEnd
ellipseBeg :: DigitRecognizer ()
ellipseBeg = do
s <- getState
width <- ask
case s of
[] -> blackGroupToken (coveringMiddle width)
[(xBeg, xEnd)] -> blackGroupToken
((&&) <$> coveringMiddle width <*> predicate)
where
predicate [(xBeg', xEnd')] = xBeg' <= xBeg
&& xEnd' >= xEnd
predicate _ = False
_ -> fail ""
where
coveringMiddle width [(xBeg, xEnd)] = xEnd > (width `div` 2)
&& xBeg < (width `div` 2)
coveringMiddle _ _ = False
ellipseMid :: DigitRecognizer ()
ellipseMid = do
s <- getState
case s of
[(xBeg, xEnd)] -> blackGroupToken predicate
where
predicate [(x0Beg, _), (_, x1End)] = x0Beg <= xBeg
&& x1End >= xEnd
predicate _ = False
[_, _] -> blackGroupToken ((== 2) . length)
_ -> fail ""
ellipseEnd :: DigitRecognizer ()
ellipseEnd = do
s <- getState
width <- ask
case s of
[(x0Beg, _), (_, x1End)] -> blackGroupToken
((&&) <$> coveringMiddle width <*> predicate)
where
predicate [(xBeg, xEnd)] = xBeg >= x0Beg && xEnd <= x1End
predicate _ = False
[(xBeg, xEnd)] -> blackGroupToken
((&&) <$> coveringMiddle width <*> predicate)
where
predicate [(xBeg', xEnd')] = xBeg' >= xBeg
&& xEnd' <= xEnd
predicate _ = False
_ -> fail ""
where
coveringMiddle width [(xBeg, xEnd)] = xEnd > (width `div` 2)
&& xBeg < (width `div` 2)
coveringMiddle _ _ = False
leftEdge :: DigitRecognizer ()
leftEdge = blackGroupToken predicate
where
predicate [(xBeg, _)] = xBeg == 0
predicate _ = False
rightEdge :: DigitRecognizer ()
rightEdge = do
width <- ask
blackGroupToken (predicate width)
where
predicate width [(_, xEnd)] = xEnd == width
predicate _ _ = False
coveringBar :: DigitRecognizer ()
coveringBar = do
width <- ask
blackGroupToken (predicate width)
where
predicate width [(xBeg, xEnd)] = xBeg == 0 && xEnd == width
predicate _ _ = False
zero :: DigitRecognizer Int
zero = ellipse >> eof >> return 0
one :: DigitRecognizer Int
one = anyToken >> many1 coveringBar >> eof >> return 1
two :: DigitRecognizer Int
two = anyToken
>> many1 ellipseBeg
>> rightEdge
>> twoMiddle
>> many1 coveringBar
>> eof
>> return 2
twoMiddle :: DigitRecognizer [()]
twoMiddle = many1 $ do
s <- getState
case s of
[(xBeg, xEnd)] -> blackGroupToken predicate
where
predicate [(xBeg', xEnd')] = xBeg' <= xBeg && xEnd' <= xEnd
predicate _ = False
_ -> fail ""
three :: DigitRecognizer Int
three = threeTwoForks >> threeThreeForks >> threeEnd >> eof >> return 3
threeTwoForks :: DigitRecognizer [()]
threeTwoForks = many $ blackGroupToken ((== 2) . length)
threeThreeForks :: DigitRecognizer [()]
threeThreeForks = many1 $ blackGroupToken ((== 3) . length)
threeEnd :: DigitRecognizer [()]
threeEnd = many1 $ blackGroupToken ((== 1) . length)
four :: DigitRecognizer Int
four = fourTwoForks >> many1 coveringBar >> fourBottom >> eof >> return 4
fourTwoForks :: DigitRecognizer [()]
fourTwoForks = many1 $ blackGroupToken predicate
where
predicate [(xBeg, _), _] = xBeg == 0
predicate _ = False
fourBottom :: DigitRecognizer [()]
fourBottom = many1 $ do
width <- ask
blackGroupToken (predicate width)
where
predicate width [(xBeg, _)] = xBeg >= (width `div` 2)
predicate _ _ = False
-- recognizeFive
five :: DigitRecognizer Int
five = anyToken -- Sometimes the first row has spurious pixels
>> many1 coveringBar
>> leftEdge
>> fiveMiddle
>> fiveBottom
>> eof
>> return 5
fiveMiddle :: DigitRecognizer [()]
fiveMiddle = many1 $ do
s <- getState
case s of
[(xBeg, xEnd)] -> blackGroupToken predicate
where
predicate [(xBeg', xEnd')] = xBeg' >= xBeg && xEnd' >= xEnd
predicate _ = False
_ -> fail ""
fiveBottom :: DigitRecognizer [()]
fiveBottom = many1 $ do
s <- getState
case s of
[(_, xEnd)] -> blackGroupToken predicate
where
predicate [(xBeg', xEnd')] = xBeg' == 0 && xEnd' <= xEnd
predicate _ = False
_ -> fail ""
six :: DigitRecognizer Int
six = many ellipseBeg >> many1 leftEdge >> many1 ellipseMid
>> many1 ellipseEnd
>> eof
>> return 6
nine :: DigitRecognizer Int
nine = ellipse >> many1 rightEdge >> many ellipseEnd >> eof >> return 9
seven :: DigitRecognizer Int
seven = many1 sevenTop >> many1 sevenBottom >> eof >> return 7
sevenTop :: DigitRecognizer ()
sevenTop = do
s <- getState
case s of
[] -> blackGroupToken ((== 1) . length)
[(xBeg, _)] -> blackGroupToken predicate
where
predicate [(xBeg', _)] = xBeg' <= xBeg
predicate _ = False
_ -> fail ""
sevenBottom :: DigitRecognizer ()
sevenBottom = do
s <- getState
case s of
[(xBeg, xEnd)] -> blackGroupToken predicate
where
predicate [(xBeg', xEnd')] = xBeg' >= xBeg && xEnd' == xEnd
predicate _ = False
_ -> fail ""
eight :: DigitRecognizer Int
eight = ellipse
>> try (many ellipseBeg >> many1 ellipseMid) <|> many1 ellipseMid
>> many1 ellipseEnd
>> eof
>> return 8
-- | Performan an OCR on a list of digits
recognizeNumber :: (BlackCheckable e, Map m r e) => [m e] -> Maybe Int
recognizeNumber images =
fmap digitsToNumber . sequence
. map ((<|>) <$> recognizeDigit <*> splitAndRecognizeDigits)
$ images
where
digitsToNumber :: [Int] -> Int
digitsToNumber = foldl (\a b -> 10 * a + b) 0
-- | Given image of a potential digit perform an OCR to recognize which one is
-- it.
recognizeDigit :: (BlackCheckable e, Map m r e) => m e -> Maybe Int
recognizeDigit bwMap = (run
width
(choice . map try $ [zero, one, two, four, five, six, seven, eight, nine])
rows)
<|> (run height three columns)
where
(height, width) = mapGetSize bwMap
rows = map (\i -> getBlackGroups . mapGetRow i $ bwMap) [0..height]
columns = map (\i -> getBlackGroups . mapGetColumn i $ bwMap) [0..width]
run size parser = fromRight . ($ size) . runReader . runPT parser [] "Digit"
fromRight (Right t) = return t
fromRight (Left _) = fail ""
-- | Sometimes double digit number are have joined digits and are represented by
-- one picture. This method splits a picture and checks whether its constituents
-- are digits
splitAndRecognizeDigits :: (BlackCheckable e, Map m r e) => m e -> Maybe Int
splitAndRecognizeDigits image = do
m <- recognizeDigit left
n <- recognizeDigit right
return $ m * 10 + n
where
(height, width) = mapGetSize image
tolerance = 2 -- We need to cut some middle to be sure that the digit does
-- not contain fragments from the other
left = submap ((0, 0), (height, width `div` 2 - tolerance)) image
right = submap ((0, width `div` 2 + tolerance), (height, width)) image
getBlackGroups :: (BlackCheckable e, Row r e) => r e -> [(Int, Int)]
getBlackGroups = getBlackGroups' . rowAssocs
getBlackGroups' :: (BlackCheckable e) => [(Int, e)] -> [(Int, Int)]
getBlackGroups' bws =
map (\bs -> (fst . head $ bs, fst . last $ bs))
. filter (isBlack . snd . head)
. groupBy (\(_, c0) (_, c1) -> isBlack c0 == isBlack c1)
$ bws
|
gregorias/pixelosolver
|
src/PixeloSolver/AI/OCR.hs
|
bsd-3-clause
| 22,926 | 0 | 18 | 5,092 | 7,649 | 4,074 | 3,575 | 539 | 6 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main ( main ) where
import Distribution.Nixpkgs.Haskell.FromCabal ( fromGenericPackageDescription )
import Distribution.Nixpkgs.Haskell.FromCabal.Flags
import Distribution.Nixpkgs.Haskell.HackageGit ( readHackage, Hackage )
import Distribution.Nixpkgs.Haskell.Constraint
import Distribution.Nixpkgs.Haskell.FromCabal.Configuration.GHC7102
import Distribution.Nixpkgs.Haskell.PackageSourceSpec
import Control.Monad
import Control.Monad.Par.Combinator
import Control.Monad.Par.IO
import Control.Monad.Trans ( liftIO )
import Data.Function
import Data.List
import Data.Map.Strict ( Map )
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid
import Data.Set ( Set )
import qualified Data.Set as Set
import Distribution.Nixpkgs.Fetch
import Distribution.Nixpkgs.Haskell
import Distribution.Nixpkgs.Meta
import Distribution.Nixpkgs.PackageMap
import Distribution.PackageDescription hiding ( options, buildDepends, extraLibs, buildTools )
import Distribution.Text
import Distribution.Package
import Distribution.Version
import Control.Lens
import Text.PrettyPrint.HughesPJClass hiding ( (<>) )
import Paths_hackage2nix
import Language.Nix
import Options.Applicative
import System.FilePath
defaultConfiguration :: Configuration
defaultConfiguration = ghc7102
type Nixpkgs = PackageMap -- Map String (Set [String])
type PackageSet = Map String Version
type PackageMultiSet = Map String (Set Version)
resolveConstraint :: Constraint -> Hackage -> Version
resolveConstraint c = fromMaybe (error msg) . resolveConstraint' c
where msg = "constraint " ++ display c ++ " cannot be resolved in Hackage"
resolveConstraint' :: Constraint -> Hackage -> Maybe Version
resolveConstraint' (Dependency (PackageName name) vrange) hackage =
Set.findMax . Set.filter (`withinRange` vrange) . Map.keysSet <$> Map.lookup name hackage
data Options = Options
{ hackageRepository :: FilePath
, preferredVersionsFile :: Maybe FilePath
, nixpkgsRepository :: FilePath
}
deriving (Show)
options :: Parser Options
options = Options
<$> strOption (long "hackage" <> help "path to Hackage git repository" <> value "hackage" <> showDefault <> metavar "PATH")
<*> optional (strOption (long "preferred-versions" <> help "path to Hackage preferred-versions file" <> value "hackage/preferred-versions" <> showDefault <> metavar "PATH"))
<*> strOption (long "nixpkgs" <> help "path to Nixpkgs repository" <> value "<nixpkgs>" <> showDefault <> metavar "PATH")
pinfo :: ParserInfo Options
pinfo = info
( helper
<*> infoOption ("hackage2nix " ++ display version) (long "version" <> help "Show version number")
<*> options
)
( fullDesc
<> header "hackage2nix converts a Hackage database into a haskell-packages.nix file."
)
main :: IO ()
main = do
Options {..} <- execParser pinfo
hackage <- readHackage hackageRepository
nixpkgs <- readNixpkgPackageMap nixpkgsRepository Nothing
preferredVersions <- readPreferredVersions (fromMaybe (hackageRepository </> "preferred-versions") preferredVersionsFile)
let fixup = Map.delete "acme-everything" -- TODO: https://github.com/NixOS/cabal2nix/issues/164
. Map.delete "som" -- TODO: https://github.com/NixOS/cabal2nix/issues/164
. Map.delete "type" -- TODO: https://github.com/NixOS/cabal2nix/issues/163
. Map.delete "dictionary-sharing" -- TODO: https://github.com/NixOS/cabal2nix/issues/175
. Map.filter (/= Map.empty)
. Map.mapWithKey (enforcePreferredVersions preferredVersions)
runParIO $ generatePackageSet defaultConfiguration (fixup hackage) nixpkgs
enforcePreferredVersions :: [Constraint] -> String -> Map Version GenericPackageDescription
-> Map Version GenericPackageDescription
enforcePreferredVersions cs pkg = Map.filterWithKey (\v _ -> PackageIdentifier (PackageName pkg) v `satisfiesConstraints` cs)
generatePackageSet :: Configuration -> Hackage -> Nixpkgs -> ParIO ()
generatePackageSet config hackage nixpkgs = do
let
corePackageSet :: PackageSet
corePackageSet = Map.fromList [ (name, v) | PackageIdentifier (PackageName name) v <- corePackages config ++ hardCorePackages config ]
latestVersionSet :: PackageSet
latestVersionSet = Map.map (Set.findMax . Map.keysSet) hackage
defaultPackageOverridesSet :: PackageSet
defaultPackageOverridesSet = Map.fromList [ (name, resolveConstraint c hackage) | c@(Dependency (PackageName name) _) <- defaultPackageOverrides config ]
generatedDefaultPackageSet :: PackageSet
generatedDefaultPackageSet = (defaultPackageOverridesSet `Map.union` latestVersionSet) `Map.difference` corePackageSet
latestCorePackageSet :: PackageSet
latestCorePackageSet = latestVersionSet `Map.intersection` corePackageSet
latestOverridePackageSet :: PackageSet
latestOverridePackageSet = latestVersionSet `Map.intersection` defaultPackageOverridesSet
extraPackageSet :: PackageMultiSet
extraPackageSet = Map.unionsWith Set.union
[ Map.singleton name (Set.singleton (resolveConstraint c hackage)) | c@(Dependency (PackageName name) _) <- extraPackages config ]
db :: PackageMultiSet
db = Map.unionsWith Set.union [ Map.map Set.singleton generatedDefaultPackageSet
, Map.map Set.singleton latestCorePackageSet
, Map.map Set.singleton latestOverridePackageSet
, extraPackageSet
]
haskellResolver :: Dependency -> Bool
haskellResolver (Dependency (PackageName name) vrange)
| Just v <- Map.lookup name corePackageSet = v `withinRange` vrange
| Just v <- Map.lookup name generatedDefaultPackageSet = v `withinRange` vrange
| otherwise = False
nixpkgsResolver :: Identifier -> Maybe Binding
nixpkgsResolver = resolve (Map.map (Set.map (over path ("pkgs":))) nixpkgs)
liftIO $ do putStrLn "/* hackage-packages.nix is an auto-generated file -- DO NOT EDIT! */"
putStrLn ""
putStrLn "{ pkgs, stdenv, callPackage }:"
putStrLn ""
putStrLn "self: {"
putStrLn ""
pkgs <- flip parMapM (Map.toAscList db) $ \(name, vs) -> do
defs <- forM (Set.toAscList vs) $ \pkgversion -> do
let -- TODO: Include list of broken dependencies in the generated output.
descr = hackage Map.! name Map.! pkgversion
flagAssignment :: FlagAssignment
flagAssignment = configureCabalFlags (packageId descr)
drv' :: Derivation
drv' = fromGenericPackageDescription haskellResolver nixpkgsResolver (platform ghc7102) (compilerInfo ghc7102) flagAssignment [] descr
attr :: String
attr | Just v <- Map.lookup name generatedDefaultPackageSet, v == pkgversion = name
| otherwise = name ++ '_' : [ if c == '.' then '_' else c | c <- display pkgversion ]
sha256 :: String
sha256 | Just x <- lookup "X-Package-SHA256" (customFieldsPD (packageDescription descr)) = x
| otherwise = error $ display (packageId descr) ++ " has no hash"
srcSpec <- liftIO $ sourceFromHackage (Certain sha256) (name ++ "-" ++ display pkgversion)
let drv = drv' & src .~ srcSpec
& metaSection.hydraPlatforms %~ (`Set.difference` fromMaybe Set.empty (Map.lookup (PackageName name) (dontDistributePackages ghc7102)))
& metaSection.maintainers .~ fromMaybe Set.empty (Map.lookup (PackageName name) (packageMaintainers config))
isFromHackage :: Binding -> Bool
isFromHackage b = case view (reference . path) b of
["self",_] -> True
_ -> False
overrides :: Doc
overrides = fcat $ punctuate space
[ pPrint b | b <- Set.toList (view (dependencies . each) drv), not (isFromHackage b) ]
return $ nest 2 $ hang (doubleQuotes (text attr) <+> equals <+> text "callPackage") 2 (parens (pPrint drv)) <+> (braces overrides <> semi)
return (intercalate "\n\n" (map render defs))
liftIO $ mapM_ (\pkg -> putStrLn pkg >> putStrLn "") pkgs
liftIO $ putStrLn "}"
readPreferredVersions :: FilePath -> IO [Constraint]
readPreferredVersions p = mapMaybe parsePreferredVersionsLine . lines <$> readFile p
parsePreferredVersionsLine :: String -> Maybe Constraint
parsePreferredVersionsLine [] = Nothing
parsePreferredVersionsLine ('-':'-':_) = Nothing
parsePreferredVersionsLine l = simpleParse l `mplus` error ("invalid preferred-versions line: " ++ show l)
|
gridaphobe/cabal2nix
|
hackage2nix/src/Main.hs
|
bsd-3-clause
| 8,899 | 0 | 29 | 1,945 | 2,254 | 1,166 | 1,088 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module PeerReview.Web.Server
( runServer
, service -- Exposed for testing.
) where
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger
import Web.Spock.Safe
import PeerReview.Types
import PeerReview.Web.Routes as Routes
import PeerReview.Web.Types
-- Run the spock app using given configuration file.
runServer :: AppConfig -> Env -> IO ()
runServer conf env = do
let port = acPort conf
state = AppState env
spockCfg = defaultSpockCfg Nothing PCNoDatabase state
runSpock port $ spock spockCfg service
-- Middlewares for the application.
appMiddleware :: WebApp ()
appMiddleware = do
middleware logStdout
middleware simpleCors
-- Combine different routes for the api.
apiRoutes :: WebApp ()
apiRoutes = do
get root $ text "doc or client"
subcomponent "api" Routes.peerReview
-- Join middlewares and API to spock app.
service :: WebApp ()
service = appMiddleware >> apiRoutes
|
keveri/peer-review-service
|
src/PeerReview/Web/Server.hs
|
bsd-3-clause
| 1,062 | 0 | 10 | 256 | 219 | 117 | 102 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Common where
import Data.Version (Version)
import Distribution.Package (Dependency)
import Distribution.ModuleName (ModuleName)
import Distribution.Text (display,simpleParse)
import qualified Language.Haskell.Exts.Annotated as HSE (Module,SrcSpanInfo)
import Language.Haskell.Exts.Annotated (parseModuleWithMode,ParseResult(ParseOk,ParseFailed))
import Language.Haskell.Exts.Annotated.Fixity (baseFixities)
import Language.Haskell.Exts.Parser (ParseMode(fixities),defaultParseMode)
import Language.Haskell.Exts.Pretty (prettyPrint)
import Language.Haskell.Names (Symbols,Error)
import Language.Haskell.Names.Interfaces ()
import Data.Aeson (
ToJSON(toJSON),object,(.=),
decode,FromJSON(parseJSON),Value(Object),(.:))
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy as ByteString (readFile)
import Data.Maybe (catMaybes)
import Data.Map (Map,traverseWithKey)
import Data.Set (Set)
import qualified Data.Set as Set (map)
import Control.Monad (mzero,mplus,msum)
type Repository a = Map PackageName (Map VersionNumber a)
type SourceRepository = Repository FilePath
type ParsedRepository = Repository FilePath
type PackageName = String
type VersionNumber = Version
type ModuleAST = HSE.Module HSE.SrcSpanInfo
traverseRepository :: (PackageName -> VersionNumber -> a -> IO b) -> Repository a -> IO (Repository b)
traverseRepository f =
traverseWithKey (\packagename ->
traverseWithKey (\versionnumber a ->
f packagename versionnumber a))
data PackageError =
PackageReadingError String |
PackageFinalizationError [Dependency] |
PackageNoLibrary deriving (Eq,Show,Read)
data ModuleError =
ModuleFilteringError String |
ModuleFileNotFound |
MultipleModuleFilesFound |
PreprocessorError String |
ParserError String |
ModuleInformationFileError |
DeclarationsFileError deriving (Eq,Show,Read)
data PackageInformation =
PackageError PackageError |
PackageInformation [ModuleName] [Dependency] deriving (Eq,Show)
instance ToJSON PackageInformation where
toJSON (PackageError packageerror) = object ["packageerror" .= show packageerror]
toJSON (PackageInformation modulenames dependencies) = object [
"modulenames" .= map display modulenames,
"dependencies" .= map display dependencies]
instance FromJSON PackageInformation where
parseJSON value = parsePackageError value `mplus` parsePackageInformation value
parsePackageError :: Value -> Parser PackageInformation
parsePackageError (Object o) = do
packageerrorvalue <- o .: "packageerror"
return (PackageError (read packageerrorvalue))
parsePackageError _ = mzero
parsePackageInformation :: Value -> Parser PackageInformation
parsePackageInformation (Object o) = do
modulenamevalues <- o .: "modulenames"
dependencyvalues <- o .: "dependencies"
modulenames <- maybe mzero return (mapM simpleParse modulenamevalues)
dependencies <- maybe mzero return (mapM simpleParse dependencyvalues)
return (PackageInformation modulenames dependencies)
parsePackageInformation _ = mzero
data ModuleInformation =
ModuleError ModuleError |
ModuleInformation ModuleAST deriving (Eq,Show)
instance ToJSON ModuleInformation where
toJSON (ModuleError moduleerror) = object ["moduleerror" .= show moduleerror]
toJSON (ModuleInformation moduleast) = object ["moduleast" .= prettyPrint moduleast]
instance FromJSON ModuleInformation where
parseJSON value = parseModuleError value `mplus` parseModuleInformation value
parseModuleError :: Value -> Parser ModuleInformation
parseModuleError (Object o) = do
moduleerrorvalue <- o .: "moduleerror"
return (ModuleError (read moduleerrorvalue))
parseModuleError _ = mzero
parseModuleInformation :: Value -> Parser ModuleInformation
parseModuleInformation (Object o) = do
moduleastvalue <- o .: "moduleast"
let mode = defaultParseMode {fixities = Just baseFixities}
case parseModuleWithMode mode moduleastvalue of
ParseOk moduleast -> return (ModuleInformation moduleast)
ParseFailed _ _ -> mzero
parseModuleInformation _ = mzero
type PackagePath = FilePath
loadPackage :: PackagePath -> IO (Maybe PackageInformation)
loadPackage packagepath = ByteString.readFile (packagepath ++ "info.json") >>= return . decode
recoverModules :: PackagePath -> [ModuleName] -> IO [ModuleAST]
recoverModules packagepath modulenames = mapM (recoverModule packagepath) modulenames >>= return . catMaybes
recoverModule :: PackagePath -> ModuleName -> IO (Maybe ModuleAST)
recoverModule packagepath modulename = do
maybemoduleinformation <- loadModuleInformation packagepath modulename
case maybemoduleinformation of
Nothing -> return Nothing
Just (ModuleError _) -> return Nothing
Just (ModuleInformation moduleast) -> return (Just moduleast)
moduleastpath :: PackagePath -> ModuleName -> FilePath
moduleastpath packagepath modulename = concat [
packagepath,
display modulename,
"/",
"ast.json"]
modulenamespath :: PackagePath -> ModuleName -> FilePath
modulenamespath packagepath modulename = concat [
packagepath,
display modulename,
"/",
"names.json"]
loadModuleInformation :: PackagePath -> ModuleName -> IO (Maybe ModuleInformation)
loadModuleInformation packagepath modulename = ByteString.readFile (moduleastpath packagepath modulename) >>= return . decode
data Declaration = Declaration Genre DeclarationAST DeclaredSymbols UsedSymbols deriving (Show,Eq)
data Genre = Value | TypeSignature | Type | TypeClass | ClassInstance | Other deriving (Show,Eq,Read)
type DeclarationAST = String
type DeclaredSymbols = Symbols
type UsedSymbols = Symbols
instance ToJSON Declaration where
toJSON (Declaration genre declarationast declaredsymbols usedsymbols) = object [
"genre" .= show genre,
"declarationast" .= declarationast,
"declaredsymbols" .= declaredsymbols,
"usedsymbols" .= usedsymbols]
instance FromJSON Declaration where
parseJSON (Object o) = do
genre <- o .: "genre" >>= return . read
declarationast <- o .: "declarationast"
declaredsymbols <- o .: "declaredsymbols"
usedsymbols <- o .: "usedsymbols"
return (Declaration genre declarationast declaredsymbols usedsymbols)
parseJSON _ = mzero
declarationsFilePath :: PackagePath -> ModuleName -> FilePath
declarationsFilePath packagepath modulename = concat [
packagepath,
display modulename,
"/",
"declarations.json"]
loadDeclarations :: PackagePath -> ModuleName -> IO (Maybe [Declaration])
loadDeclarations packagepath modulename = ByteString.readFile (declarationsFilePath packagepath modulename) >>= return . decode
data NameErrors =
ResolvingNames |
NameErrors [String]
instance ToJSON NameErrors where
toJSON ResolvingNames = object ["resolvingnames" .= True]
toJSON (NameErrors nameerrors) = object ["nameerrors" .= nameerrors]
instance FromJSON NameErrors where
parseJSON (Object o) = msum [
o .: "resolvingnames" >>= (\True -> return ResolvingNames),
o .: "nameerrors" >>= return . NameErrors]
parseJSON _ = mzero
nameerrorspath :: FilePath -> FilePath
nameerrorspath packagepath = packagepath ++ "nameerrors.json"
loadNameErrors :: PackagePath -> IO (Maybe NameErrors)
loadNameErrors packagepath = ByteString.readFile (nameerrorspath packagepath) >>= return . decode
|
phischu/hackage-analysis
|
src/Common.hs
|
bsd-3-clause
| 7,480 | 0 | 12 | 1,212 | 1,993 | 1,057 | 936 | 158 | 3 |
-- | Modulo de funciones utiles
module Utils.Utiles where
{-
(
-- * Funciones para tuplas
fst3
, snd3
, thd3
, fst4
, snd4
, thd4
, fth4
-- ** aplicar funciones a tuplas
, toTupleFloat
, mapTuple
, funTuple
-- * Funciones para numeros
, toInt
, toFloat
) where
-}
import Data.Char
-- | Comparar 2 strings
compareStrings str1 str2
= str1 == (map toLower str2)
compareStrings' str1 str2
= str1' == str2'
where str1' = map toLower str1
str2' = map toLower str2
-- | Funciones para tuplas de 3
fst3 (v,_,_) = v
snd3 (_,v,_) = v
thd3 (_,_,v) = v
-- | Funciones para tuplas de 4
fst4 (v,_,_,_) = v
snd4 (_,v,_,_) = v
thd4 (_,_,v,_) = v
fth4 (_,_,_,v) = v
-- | convert a float number into int
toInt :: Float -> Int
toInt = read . show . truncate
-- | convert an int number into float
toFloat :: Int -> Float
toFloat = read . show
-- | convert a tuple int into tuple float
toTupleFloat :: (Int, Int) -> (Float, Float)
toTupleFloat (x,y) = (toFloat x, toFloat y)
-- | map for tuples
mapTuple f (a,b) = (f a, f b)
-- | apply a function to a tuple
funTuple f (a,b) = f a b
-- my own version of head function
head' [] = []
head' (x:xs) = x
|
carliros/Simple-San-Simon-Functional-Web-Browser
|
src/Utils/Utiles.hs
|
bsd-3-clause
| 1,170 | 0 | 7 | 275 | 374 | 214 | 160 | 25 | 1 |
module ExampleArmoredConfig where
import ProtoTypes
--the request to send
request = ReqLS [RequestItem OS Name,RequestItem VC Name]
--the conditions that the results must meet
conditions = Or (OneOf OS Name [ValString "Windows", ValString "Linux"])
(And (Equals VC Name (ValString "UniVaccine"))
(GTETV VC Version (ValDouble 5.1))
)
--my privacy policy
policy =[
Reveal [(OS,[Name,Version])]
(Or (OneOf VC Name [ValString "Avast", ValString "Norton",
ValString "Hearsa", ValString "Who"])
(Equals ID Name (ValString "Appraiser")))
]
config = ArmoredConfig request conditions policy
|
armoredsoftware/protocol
|
tpm/mainline/shared/protocolprotocol/ExampleArmoredConfig.hs
|
bsd-3-clause
| 745 | 0 | 12 | 240 | 204 | 108 | 96 | 12 | 1 |
module Sodium.Chloride.JoinMultiIf (joinMultiIf) where
import Control.Lens
import Sodium.Chloride.Program.Scalar
import Sodium.Chloride.Recmap.Scalar
joinMultiIf :: Program -> Program
joinMultiIf = recmapProgram' (recmapper' joinMultiIfStatement)
joinMultiIfStatement
= _MultiIfStatement %~ tryApply joinMultiIfBranch
where joinMultiIfBranch multiIfBranch
= multiIfBranch ^? multiIfElse . bodySingleton . _MultiIfStatement
<&> over multiIfLeafs (view multiIfLeafs multiIfBranch ++)
tryApply :: (a -> Maybe a) -> (a -> a)
tryApply f a = maybe a id (f a)
|
kirbyfan64/sodium
|
src/Sodium/Chloride/JoinMultiIf.hs
|
bsd-3-clause
| 565 | 6 | 10 | 75 | 160 | 86 | 74 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.ID (classifiers) where
import Prelude
import Duckling.Ranking.Types
import qualified Data.HashMap.Strict as HashMap
import Data.String
classifiers :: Classifiers
classifiers = HashMap.fromList []
|
rfranek/duckling
|
Duckling/Ranking/Classifiers/ID.hs
|
bsd-3-clause
| 825 | 0 | 6 | 105 | 66 | 47 | 19 | 8 | 1 |
{-
- Intel Concurrent Collections for Haskell
- Copyright (c) 2010, Intel Corporation.
-
- This program is free software; you can redistribute it and/or modify it
- under the terms and conditions of the GNU Lesser General Public License,
- version 2.1, as published by the Free Software Foundation.
-
- This program is distributed in the hope it will be useful, but WITHOUT
- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
- more details.
-
- You should have received a copy of the GNU Lesser General Public License along with
- this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
-
-}
-- Author: Ryan Newton
-- sched_tree.hs
-- A simple scheduler test that creates a tree of exponentially
-- expanding numbers of step executions (as though it were a binary
-- tree).
import System.Environment
#define MEMOIZE
#define REPEAT_PUT_ALLOWED
#include <haskell_cnc.h>
-- We use lists of booleans as "tree indices":
type Tag = [Bool]
run limit = putStrLn (show v)
where
v = runGraph $
do tags :: TagCol Tag <- newTagCol
items :: ItemCol Tag Int <- newItemCol
prescribe tags
(\ls -> do -- bin tree path as input
if length ls == limit
-- Trivial output: count the "right" steps in the tree path:
then put items ls (length $ Prelude.filter id ls)
else do putt tags (True:ls)
putt tags (False:ls)
)
initialize $
do putt tags []
-- Grab all the leaves of the binary tree:
let grabloop ls =
if length ls == limit
then get items ls
else do x <- grabloop (True:ls)
y <- grabloop (False:ls)
return (x+y)
finalize $ grabloop []
main = do args <- getArgs
case args of
[] -> run 10
[s] -> run (read s)
|
rrnewton/Haskell-CnC
|
examples/sched_tree.hs
|
bsd-3-clause
| 2,013 | 56 | 11 | 570 | 337 | 184 | 153 | -1 | -1 |
module Monitor where
import Job.Scheduler
data Monitor = StatusCodeMonitor
{ mJobId :: String
, mJobName :: String
, mJobStatus :: JobStatus
, mOutput :: String
}
| StdoutMonitor
{ mJobId :: String
, mJobName :: String
, mOutput :: String
}
deriving (Show, Eq)
updateMonitors :: JobStatusUpdate -> [Monitor] -> [Monitor]
updateMonitors newStatus = map updateMonitor
where
updateMonitor monitor@(StatusCodeMonitor {}) =
if sId newStatus == mJobId monitor
then monitor { mJobStatus = sStatus newStatus
, mOutput = sOutput newStatus
}
else monitor
updateMonitor monitor@(StdoutMonitor {}) =
if sId newStatus == mJobId monitor
then monitor { mOutput = sOutput newStatus }
else monitor
|
rickardlindberg/codemonitor
|
src/Monitor.hs
|
bsd-3-clause
| 1,076 | 0 | 10 | 497 | 211 | 120 | 91 | 23 | 4 |
module WASH.Mail.Message where
import WASH.Mail.HeaderField
data Message =
Singlepart
{ getHeaders :: [Header]
, getLines :: [String]
, getDecoded :: [Char]
, getContentType :: ContentType
, getContentTransferEncoding :: ContentTransferEncoding
, getContentDisposition :: ContentDisposition
}
| Multipart
{ getHeaders :: [Header]
, getLines :: [String]
, getParts :: [Message]
, getContentType :: ContentType
, getContentTransferEncoding :: ContentTransferEncoding
, getContentDisposition :: ContentDisposition
}
deriving Show
isSinglePart (Singlepart {}) = True
isSinglePart _ = False
isMultiPart (Multipart {}) = True
isMultiPart _ = False
showHeader (Header (n, v)) = n ++ ": " ++ v
showParameters c_parameters =
foldr (\(n,v) f -> showString " ;" .
showString n .
showString "=\"" .
showString v .
showChar '\"' . f) id c_parameters
data ContentType =
ContentType String -- type
String -- subtype
[(String, String)] -- parameters
instance Show ContentType where
showsPrec i (ContentType c_type c_subtype c_parameters) =
showString "Content-Type: " .
showString c_type .
showChar '/' .
showString c_subtype .
showParameters c_parameters
data ContentTransferEncoding =
ContentTransferEncoding String
instance Show ContentTransferEncoding where
showsPrec i (ContentTransferEncoding cte) =
showString "Content-Transfer-Encoding: " .
showString cte
data ContentDisposition =
ContentDisposition String [(String, String)]
instance Show ContentDisposition where
showsPrec i (ContentDisposition cdn c_parameters) =
showString "Content-Disposition: " .
showString cdn .
showParameters c_parameters
data ContentID =
ContentID String
instance Show ContentID where
showsPrec i (ContentID cid) =
showString "Content-ID: " .
showString cid
data ContentDescription =
ContentDescription String
instance Show ContentDescription where
showsPrec i (ContentDescription txt) =
showString "Content-Description: " .
showString txt
|
nh2/WashNGo
|
WASH/Mail/Message.hs
|
bsd-3-clause
| 2,098 | 46 | 13 | 431 | 582 | 311 | 271 | 65 | 1 |
-- | A series with a start value and consecutive next vaules.
module Data.MediaBus.Basics.Series
( Series (..),
_Next,
_Start,
type Series',
AsSeries (..),
AsSeriesStart (..),
AsSeriesNext (..),
)
where
import Control.DeepSeq
import Control.Lens
import Data.Bifunctor
import GHC.Generics (Generic)
import Test.QuickCheck
-- | A value of a series is either the 'Start' of that series or the 'Next'
-- value in a started series.
data Series a b
= Next {_seriesValue :: !b}
| Start {_seriesStartValue :: !a}
deriving (Eq, Generic)
makePrisms ''Series
-- | A simple version of a series, where the 'Start' value has the same type as
-- the 'Next' value.
type Series' a = Series a a
instance
(NFData a, NFData b) =>
NFData (Series a b)
instance
(Show a, Show b) =>
Show (Series a b)
where
showsPrec d (Start !x) =
showParen (d > 10) $ showString "start: " . showsPrec 11 x
showsPrec d (Next !x) =
showParen (d > 10) $ showString "next: " . showsPrec 11 x
instance
(Ord a, Ord b) =>
Ord (Series a b)
where
compare (Next !l) (Next !r) = compare l r
compare _ _ = EQ
instance
(Arbitrary a, Arbitrary b) =>
Arbitrary (Series a b)
where
arbitrary = do
isNext <- choose (0.0, 1.0)
if isNext < (0.95 :: Double)
then Next <$> arbitrary
else Start <$> arbitrary
instance Functor (Series a) where
fmap = over _Next
instance Bifunctor Series where
first = over _Start
second = over _Next
-- | A class of types with any kind /start/ and /next/ semantics, not
-- necessarily provided by 'Series'.
class AsSeries s a b | s -> a, s -> b where
-- | A simple 'Prim' to extract a /start/ value
seriesStart' :: Prism' s a
-- | A simple 'Prim' to extract a /next/ value
seriesNext' :: Prism' s b
instance AsSeries (Either a b) a b where
seriesStart' = _Left
seriesNext' = _Right
instance AsSeries (Series a b) a b where
seriesNext' = _Next
seriesStart' = _Start
-- | A type class for types that might have a /start/ value.
class
(SetSeriesStart s (GetSeriesStart s) ~ s) =>
AsSeriesStart s where
type GetSeriesStart s
type SetSeriesStart s t
-- | A 'Prism' for /start/ values
seriesStart :: Prism s (SetSeriesStart s n) (GetSeriesStart s) n
instance AsSeriesStart (Either a b) where
type GetSeriesStart (Either a b) = a
type SetSeriesStart (Either a b) n = (Either n b)
seriesStart = _Left
instance AsSeriesStart (Series a b) where
type GetSeriesStart (Series a b) = a
type SetSeriesStart (Series a b) n = (Series n b)
seriesStart = _Start
-- | A type class for types that might have a /next/ value.
class
(SetSeriesNext s (GetSeriesNext s) ~ s) =>
AsSeriesNext s where
type GetSeriesNext s
type SetSeriesNext s t
-- | A 'Prism' for the /next/ values
seriesNext :: Prism s (SetSeriesNext s n) (GetSeriesNext s) n
instance AsSeriesNext (Either a b) where
type GetSeriesNext (Either a b) = b
type SetSeriesNext (Either a b) n = (Either a n)
seriesNext = _Right
instance AsSeriesNext (Series a b) where
type GetSeriesNext (Series a b) = b
type SetSeriesNext (Series a b) n = (Series a n)
seriesNext = _Next
|
lindenbaum/mediabus
|
src/Data/MediaBus/Basics/Series.hs
|
bsd-3-clause
| 3,164 | 0 | 10 | 721 | 998 | 535 | 463 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.Content.K1ba230d92eb8 (Content(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.List.Kb8cd13187198
import qualified Test.ZM.ADT.Char.K066db52af145
data Content = TextMsg (Test.ZM.ADT.List.Kb8cd13187198.List Test.ZM.ADT.Char.K066db52af145.Char)
| Join
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model Content
|
tittoassini/typed
|
test/Test/ZM/ADT/Content/K1ba230d92eb8.hs
|
bsd-3-clause
| 579 | 0 | 9 | 68 | 143 | 93 | 50 | 13 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecursiveDo #-}
module Zero.GDAX.Widget
(
TickerWidget(..)
, tickerWidget
) where
import Data.Proxy (Proxy(..))
import qualified Data.Text as T
import Data.Text (Text)
import Reflex.Dom.Core
import Servant.API
import Servant.Reflex
import Zero.GDAX.API
import Zero.GDAX.Internal
import Zero.Settings.Client
import Zero.Widget (diff24hDyn)
------------------------------------------------------------------------------
data TickerWidget t = TickerWidget {
tw_marketName :: Dynamic t Text
, tw_last :: Dynamic t Double
, tw_24hDiff :: Dynamic t Double
}
tickerWidget :: forall t m. (
SupportsServantReflex t m
, MonadWidget t m
) => Dynamic t (Either Text Text) -> Dynamic t Double -> Event t () -> m (TickerWidget t)
tickerWidget product prevDay evRefresh = do
let (productTicker)
= client gdaxAPI (Proxy :: Proxy m) (Proxy :: Proxy ()) baseUrl
let userAgent = constDyn $ Right "Custom User Agent"
evProductTicker <- productTicker userAgent product evRefresh
let productTickerSuccess = fmapMaybe reqSuccess evProductTicker
productTickerFailure = fmapMaybe reqFailure evProductTicker
productTicker <- holdDyn defaultProductTicker productTickerSuccess
let last = read . T.unpack <$> pt_price <$> productTicker
return TickerWidget {
tw_marketName = constDyn "btcusd"
, tw_last = last
, tw_24hDiff = diff24hDyn prevDay last
}
|
et4te/zero
|
src/Zero/GDAX/Widget.hs
|
bsd-3-clause
| 1,620 | 0 | 13 | 373 | 396 | 214 | 182 | 39 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Description : sublogic analysis for CASL_DL
Copyright : (c) Dominik Luecke 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
Sublogic analysis for CASL_DL
This module provides the sublogic functions (as required by Logic.hs)
for CASL_DL. The functions allow to compute the minimal sublogics needed
by a given element, to check whether an item is part of a given
sublogic, and to project an element into a given sublogic.
-}
module CASL_DL.Sublogics where
import Data.Data
data CASL_DL_SL = SROIQ deriving (Eq, Ord, Typeable, Data)
instance Show CASL_DL_SL where
show SROIQ = "SROIQ"
|
keithodulaigh/Hets
|
CASL_DL/Sublogics.hs
|
gpl-2.0
| 771 | 0 | 6 | 147 | 55 | 31 | 24 | 6 | 0 |
{-# LANGUAGE RecordWildCards, DeriveDataTypeable, FlexibleInstances, TupleSections, OverloadedStrings #-}
{-|
Postings report, used by the register command.
-}
module Hledger.Reports.PostingsReport (
PostingsReport,
PostingsReportItem,
postingsReport,
mkpostingsReportItem,
-- * Tests
tests_Hledger_Reports_PostingsReport
)
where
import Data.List
import Data.Maybe
import Data.Ord (comparing)
-- import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Calendar
import Safe (headMay, lastMay)
import Test.HUnit
import Hledger.Data
import Hledger.Query
import Hledger.Utils
import Hledger.Reports.ReportOptions
-- | A postings report is a list of postings with a running total, a label
-- for the total field, and a little extra transaction info to help with rendering.
-- This is used eg for the register command.
type PostingsReport = (String -- label for the running balance column XXX remove
,[PostingsReportItem] -- line items, one per posting
)
type PostingsReportItem = (Maybe Day -- The posting date, if this is the first posting in a
-- transaction or if it's different from the previous
-- posting's date. Or if this a summary posting, the
-- report interval's start date if this is the first
-- summary posting in the interval.
,Maybe Day -- If this is a summary posting, the report interval's
-- end date if this is the first summary posting in
-- the interval.
,Maybe String -- The posting's transaction's description, if this is the first posting in the transaction.
,Posting -- The posting, possibly with the account name depth-clipped.
,MixedAmount -- The running total after this posting, or with --average,
-- the running average posting amount. With --historical,
-- postings before the report start date are included in
-- the running total/average.
)
-- | Select postings from the journal and add running balance and other
-- information to make a postings report. Used by eg hledger's register command.
postingsReport :: ReportOpts -> Query -> Journal -> PostingsReport
postingsReport opts q j = (totallabel, items)
where
reportspan = adjustReportDates opts q j
whichdate = whichDateFromOpts opts
depth = queryDepth q
-- postings to be included in the report, and similarly-matched postings before the report start date
(precedingps, reportps) = matchedPostingsBeforeAndDuring opts q j reportspan
-- postings or pseudo postings to be displayed
displayps | interval == NoInterval = map (,Nothing) reportps
| otherwise = summarisePostingsByInterval interval whichdate depth showempty reportspan reportps
where
interval = interval_ opts -- XXX
showempty = empty_ opts || average_ opts
-- posting report items ready for display
items = dbg1 "postingsReport items" $ postingsReportItems displayps (nullposting,Nothing) whichdate depth startbal runningcalc startnum
where
historical = balancetype_ opts == HistoricalBalance
precedingsum = sumPostings precedingps
precedingavg | null precedingps = 0
| otherwise = precedingsum `divideMixedAmount` (fromIntegral $ length precedingps)
startbal | average_ opts = if historical then precedingavg else 0
| otherwise = if historical then precedingsum else 0
startnum = if historical then length precedingps + 1 else 1
runningcalc | average_ opts = \i avg amt -> avg + (amt - avg) `divideMixedAmount` (fromIntegral i) -- running average
| otherwise = \_ bal amt -> bal + amt -- running total
totallabel = "Total"
-- | Adjust report start/end dates to more useful ones based on
-- journal data and report intervals. Ie:
-- 1. If the start date is unspecified, use the earliest date in the journal (if any)
-- 2. If the end date is unspecified, use the latest date in the journal (if any)
-- 3. If a report interval is specified, enlarge the dates to enclose whole intervals
adjustReportDates :: ReportOpts -> Query -> Journal -> DateSpan
adjustReportDates opts q j = reportspan
where
-- see also multiBalanceReport
requestedspan = dbg1 "requestedspan" $ queryDateSpan' q -- span specified by -b/-e/-p options and query args
journalspan = dbg1 "journalspan" $ dates `spanUnion` date2s -- earliest and latest dates (or date2s) in the journal
where
dates = journalDateSpan False j
date2s = journalDateSpan True j
requestedspanclosed = dbg1 "requestedspanclosed" $ requestedspan `spanDefaultsFrom` journalspan -- if open-ended, close it using the journal's dates (if any)
intervalspans = dbg1 "intervalspans" $ splitSpan (interval_ opts) requestedspanclosed -- get the whole intervals enclosing that
mreportstart = dbg1 "reportstart" $ maybe Nothing spanStart $ headMay intervalspans -- start of the first interval, or open ended
mreportend = dbg1 "reportend" $ maybe Nothing spanEnd $ lastMay intervalspans -- end of the last interval, or open ended
reportspan = dbg1 "reportspan" $ DateSpan mreportstart mreportend -- the requested span enlarged to whole intervals if possible
-- | Find postings matching a given query, within a given date span,
-- and also any similarly-matched postings before that date span.
-- Date restrictions and depth restrictions in the query are ignored.
-- A helper for the postings report.
matchedPostingsBeforeAndDuring :: ReportOpts -> Query -> Journal -> DateSpan -> ([Posting],[Posting])
matchedPostingsBeforeAndDuring opts q j (DateSpan mstart mend) =
dbg1 "beforeps, duringps" $ span (beforestartq `matchesPosting`) beforeandduringps
where
beforestartq = dbg1 "beforestartq" $ dateqtype $ DateSpan Nothing mstart
beforeandduringps =
dbg1 "ps4" $ sortBy (comparing sortdate) $ -- sort postings by date or date2
dbg1 "ps3" $ map (filterPostingAmount symq) $ -- remove amount parts which the query's cur: terms would exclude
dbg1 "ps2" $ (if related_ opts then concatMap relatedPostings else id) $ -- with -r, replace each with its sibling postings
dbg1 "ps1" $ filter (beforeandduringq `matchesPosting`) $ -- filter postings by the query, with no start date or depth limit
journalPostings $ journalSelectingAmountFromOpts opts j
where
beforeandduringq = dbg1 "beforeandduringq" $ And [depthless $ dateless q, beforeendq]
where
depthless = filterQuery (not . queryIsDepth)
dateless = filterQuery (not . queryIsDateOrDate2)
beforeendq = dateqtype $ DateSpan Nothing mend
sortdate = if date2_ opts then postingDate2 else postingDate
symq = dbg1 "symq" $ filterQuery queryIsSym q
dateqtype
| queryIsDate2 dateq || (queryIsDate dateq && date2_ opts) = Date2
| otherwise = Date
where
dateq = dbg1 "dateq" $ filterQuery queryIsDateOrDate2 $ dbg1 "q" q -- XXX confused by multiple date:/date2: ?
-- | Generate postings report line items from a list of postings or (with
-- non-Nothing dates attached) summary postings.
postingsReportItems :: [(Posting,Maybe Day)] -> (Posting,Maybe Day) -> WhichDate -> Int -> MixedAmount -> (Int -> MixedAmount -> MixedAmount -> MixedAmount) -> Int -> [PostingsReportItem]
postingsReportItems [] _ _ _ _ _ _ = []
postingsReportItems ((p,menddate):ps) (pprev,menddateprev) wd d b runningcalcfn itemnum = i:(postingsReportItems ps (p,menddate) wd d b' runningcalcfn (itemnum+1))
where
i = mkpostingsReportItem showdate showdesc wd menddate p' b'
(showdate, showdesc) | isJust menddate = (menddate /= menddateprev, False)
| otherwise = (isfirstintxn || isdifferentdate, isfirstintxn)
isfirstintxn = ptransaction p /= ptransaction pprev
isdifferentdate = case wd of PrimaryDate -> postingDate p /= postingDate pprev
SecondaryDate -> postingDate2 p /= postingDate2 pprev
p' = p{paccount= clipOrEllipsifyAccountName d $ paccount p}
b' = runningcalcfn itemnum b (pamount p)
-- | Generate one postings report line item, containing the posting,
-- the current running balance, and optionally the posting date and/or
-- the transaction description.
mkpostingsReportItem :: Bool -> Bool -> WhichDate -> Maybe Day -> Posting -> MixedAmount -> PostingsReportItem
mkpostingsReportItem showdate showdesc wd menddate p b =
(if showdate then Just date else Nothing
,menddate
,if showdesc then Just desc else Nothing
,p
,b
)
where
date = case wd of PrimaryDate -> postingDate p
SecondaryDate -> postingDate2 p
desc = T.unpack $ maybe "" tdescription $ ptransaction p
-- | Convert a list of postings into summary postings, one per interval,
-- aggregated to the specified depth if any.
summarisePostingsByInterval :: Interval -> WhichDate -> Int -> Bool -> DateSpan -> [Posting] -> [SummaryPosting]
summarisePostingsByInterval interval wd depth showempty reportspan ps = concatMap summarisespan $ splitSpan interval reportspan
where
summarisespan s = summarisePostingsInDateSpan s wd depth showempty (postingsinspan s)
postingsinspan s = filter (isPostingInDateSpan' wd s) ps
tests_summarisePostingsByInterval = [
"summarisePostingsByInterval" ~: do
summarisePostingsByInterval (Quarters 1) PrimaryDate 99999 False (DateSpan Nothing Nothing) [] ~?= []
]
-- | A summary posting summarises the activity in one account within a report
-- interval. It is currently kludgily represented by a regular Posting with no
-- description, the interval's start date stored as the posting date, and the
-- interval's end date attached with a tuple.
type SummaryPosting = (Posting, Maybe Day)
-- | Given a date span (representing a report interval) and a list of
-- postings within it, aggregate the postings into one summary posting per
-- account.
--
-- When a depth argument is present, postings to accounts of greater
-- depth are also aggregated where possible. If the depth is 0, all
-- postings in the span are aggregated into a single posting with
-- account name "...".
--
-- The showempty flag includes spans with no postings and also postings
-- with 0 amount.
--
summarisePostingsInDateSpan :: DateSpan -> WhichDate -> Int -> Bool -> [Posting] -> [SummaryPosting]
summarisePostingsInDateSpan (DateSpan b e) wd depth showempty ps
| null ps && (isNothing b || isNothing e) = []
| null ps && showempty = [(summaryp, Just e')]
| otherwise = summarypes
where
postingdate = if wd == PrimaryDate then postingDate else postingDate2
b' = fromMaybe (maybe nulldate postingdate $ headMay ps) b
e' = fromMaybe (maybe (addDays 1 nulldate) postingdate $ lastMay ps) e
summaryp = nullposting{pdate=Just b'}
clippedanames | depth > 0 = nub $ map (clipAccountName depth) anames
| otherwise = ["..."]
summaryps | depth > 0 = [summaryp{paccount=a,pamount=balance a} | a <- clippedanames]
| otherwise = [summaryp{paccount="...",pamount=sum $ map pamount ps}]
summarypes = map (, Just e') $ (if showempty then id else filter (not . isZeroMixedAmount . pamount)) summaryps
anames = sort $ nub $ map paccount ps
-- aggregate balances by account, like ledgerFromJournal, then do depth-clipping
accts = accountsFromPostings ps
balance a = maybe nullmixedamt bal $ lookupAccount a accts
where
bal = if isclipped a then aibalance else aebalance
isclipped a = accountNameLevel a >= depth
-- tests_summarisePostingsInDateSpan = [
-- "summarisePostingsInDateSpan" ~: do
-- let gives (b,e,depth,showempty,ps) =
-- (summarisePostingsInDateSpan (mkdatespan b e) depth showempty ps `is`)
-- let ps =
-- [
-- nullposting{lpdescription="desc",lpaccount="expenses:food:groceries",lpamount=Mixed [usd 1]}
-- ,nullposting{lpdescription="desc",lpaccount="expenses:food:dining", lpamount=Mixed [usd 2]}
-- ,nullposting{lpdescription="desc",lpaccount="expenses:food", lpamount=Mixed [usd 4]}
-- ,nullposting{lpdescription="desc",lpaccount="expenses:food:dining", lpamount=Mixed [usd 8]}
-- ]
-- ("2008/01/01","2009/01/01",0,9999,False,[]) `gives`
-- []
-- ("2008/01/01","2009/01/01",0,9999,True,[]) `gives`
-- [
-- nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31"}
-- ]
-- ("2008/01/01","2009/01/01",0,9999,False,ts) `gives`
-- [
-- nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="expenses:food", lpamount=Mixed [usd 4]}
-- ,nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="expenses:food:dining", lpamount=Mixed [usd 10]}
-- ,nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="expenses:food:groceries",lpamount=Mixed [usd 1]}
-- ]
-- ("2008/01/01","2009/01/01",0,2,False,ts) `gives`
-- [
-- nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="expenses:food",lpamount=Mixed [usd 15]}
-- ]
-- ("2008/01/01","2009/01/01",0,1,False,ts) `gives`
-- [
-- nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="expenses",lpamount=Mixed [usd 15]}
-- ]
-- ("2008/01/01","2009/01/01",0,0,False,ts) `gives`
-- [
-- nullposting{lpdate=parsedate "2008/01/01",lpdescription="- 2008/12/31",lpaccount="",lpamount=Mixed [usd 15]}
-- ]
tests_postingsReport = [
"postingsReport" ~: do
-- with the query specified explicitly
let (query, journal) `gives` n = (length $ snd $ postingsReport defreportopts query journal) `is` n
(Any, nulljournal) `gives` 0
(Any, samplejournal) `gives` 13
-- register --depth just clips account names
(Depth 2, samplejournal) `gives` 13
(And [Depth 1, StatusQ Cleared, Acct "expenses"], samplejournal) `gives` 2
(And [And [Depth 1, StatusQ Cleared], Acct "expenses"], samplejournal) `gives` 2
-- with query and/or command-line options
assertEqual "" 13 (length $ snd $ postingsReport defreportopts Any samplejournal)
assertEqual "" 11 (length $ snd $ postingsReport defreportopts{interval_=Months 1} Any samplejournal)
assertEqual "" 20 (length $ snd $ postingsReport defreportopts{interval_=Months 1, empty_=True} Any samplejournal)
assertEqual "" 5 (length $ snd $ postingsReport defreportopts (Acct "assets:bank:checking") samplejournal)
-- (defreportopts, And [Acct "a a", Acct "'b"], samplejournal2) `gives` 0
-- [(Just (parsedate "2008-01-01","income"),assets:bank:checking $1,$1)
-- ,(Nothing,income:salary $-1,0)
-- ,(Just (2008-06-01,"gift"),assets:bank:checking $1,$1)
-- ,(Nothing,income:gifts $-1,0)
-- ,(Just (2008-06-02,"save"),assets:bank:saving $1,$1)
-- ,(Nothing,assets:bank:checking $-1,0)
-- ,(Just (2008-06-03,"eat & shop"),expenses:food $1,$1)
-- ,(Nothing,expenses:supplies $1,$2)
-- ,(Nothing,assets:cash $-2,0)
-- ,(Just (2008-12-31,"pay off"),liabilities:debts $1,$1)
-- ,(Nothing,assets:bank:checking $-1,0)
-- ]
{-
let opts = defreportopts
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/01/01 income assets:bank:checking $1 $1"
," income:salary $-1 0"
,"2008/06/01 gift assets:bank:checking $1 $1"
," income:gifts $-1 0"
,"2008/06/02 save assets:bank:saving $1 $1"
," assets:bank:checking $-1 0"
,"2008/06/03 eat & shop expenses:food $1 $1"
," expenses:supplies $1 $2"
," assets:cash $-2 0"
,"2008/12/31 pay off liabilities:debts $1 $1"
," assets:bank:checking $-1 0"
]
,"postings report with cleared option" ~:
do
let opts = defreportopts{cleared_=True}
j <- readJournal' sample_journal_str
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/06/03 eat & shop expenses:food $1 $1"
," expenses:supplies $1 $2"
," assets:cash $-2 0"
,"2008/12/31 pay off liabilities:debts $1 $1"
," assets:bank:checking $-1 0"
]
,"postings report with uncleared option" ~:
do
let opts = defreportopts{uncleared_=True}
j <- readJournal' sample_journal_str
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/01/01 income assets:bank:checking $1 $1"
," income:salary $-1 0"
,"2008/06/01 gift assets:bank:checking $1 $1"
," income:gifts $-1 0"
,"2008/06/02 save assets:bank:saving $1 $1"
," assets:bank:checking $-1 0"
]
,"postings report sorts by date" ~:
do
j <- readJournal' $ unlines
["2008/02/02 a"
," b 1"
," c"
,""
,"2008/01/01 d"
," e 1"
," f"
]
let opts = defreportopts
registerdates (postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` ["2008/01/01","2008/02/02"]
,"postings report with account pattern" ~:
do
j <- samplejournal
let opts = defreportopts{patterns_=["cash"]}
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/06/03 eat & shop assets:cash $-2 $-2"
]
,"postings report with account pattern, case insensitive" ~:
do
j <- samplejournal
let opts = defreportopts{patterns_=["cAsH"]}
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/06/03 eat & shop assets:cash $-2 $-2"
]
,"postings report with display expression" ~:
do
j <- samplejournal
let gives displayexpr =
(registerdates (postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is`)
where opts = defreportopts{display_=Just displayexpr}
"d<[2008/6/2]" `gives` ["2008/01/01","2008/06/01"]
"d<=[2008/6/2]" `gives` ["2008/01/01","2008/06/01","2008/06/02"]
"d=[2008/6/2]" `gives` ["2008/06/02"]
"d>=[2008/6/2]" `gives` ["2008/06/02","2008/06/03","2008/12/31"]
"d>[2008/6/2]" `gives` ["2008/06/03","2008/12/31"]
,"postings report with period expression" ~:
do
j <- samplejournal
let periodexpr `gives` dates = do
j' <- samplejournal
registerdates (postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j') `is` dates
where opts = defreportopts{period_=maybePeriod date1 periodexpr}
"" `gives` ["2008/01/01","2008/06/01","2008/06/02","2008/06/03","2008/12/31"]
"2008" `gives` ["2008/01/01","2008/06/01","2008/06/02","2008/06/03","2008/12/31"]
"2007" `gives` []
"june" `gives` ["2008/06/01","2008/06/02","2008/06/03"]
"monthly" `gives` ["2008/01/01","2008/06/01","2008/12/01"]
"quarterly" `gives` ["2008/01/01","2008/04/01","2008/10/01"]
let opts = defreportopts{period_=maybePeriod date1 "yearly"}
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/01/01 - 2008/12/31 assets:bank:saving $1 $1"
," assets:cash $-2 $-1"
," expenses:food $1 0"
," expenses:supplies $1 $1"
," income:gifts $-1 0"
," income:salary $-1 $-1"
," liabilities:debts $1 0"
]
let opts = defreportopts{period_=maybePeriod date1 "quarterly"}
registerdates (postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` ["2008/01/01","2008/04/01","2008/10/01"]
let opts = defreportopts{period_=maybePeriod date1 "quarterly",empty_=True}
registerdates (postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` ["2008/01/01","2008/04/01","2008/07/01","2008/10/01"]
]
, "postings report with depth arg" ~:
do
j <- samplejournal
let opts = defreportopts{depth_=Just 2}
(postingsReportAsText opts $ postingsReport opts (queryFromOpts date1 opts) j) `is` unlines
["2008/01/01 income assets:bank $1 $1"
," income:salary $-1 0"
,"2008/06/01 gift assets:bank $1 $1"
," income:gifts $-1 0"
,"2008/06/02 save assets:bank $1 $1"
," assets:bank $-1 0"
,"2008/06/03 eat & shop expenses:food $1 $1"
," expenses:supplies $1 $2"
," assets:cash $-2 0"
,"2008/12/31 pay off liabilities:debts $1 $1"
," assets:bank $-1 0"
]
-}
]
tests_Hledger_Reports_PostingsReport :: Test
tests_Hledger_Reports_PostingsReport = TestList $
tests_summarisePostingsByInterval
++ tests_postingsReport
|
mstksg/hledger
|
hledger-lib/Hledger/Reports/PostingsReport.hs
|
gpl-3.0
| 23,824 | 0 | 18 | 7,438 | 2,681 | 1,447 | 1,234 | 141 | 4 |
module Util
( loadGrammar
, loadSndOrderGrammar
) where
import Control.Monad
import qualified System.FilePath as FilePath
import System.Exit (exitFailure)
import Category.TypedGraph (TypedGraphMorphism)
import Category.TypedGraphRule (RuleMorphism)
import GlobalOptions
import Abstract.Rewriting.DPO
import qualified XML.GGXReader as XML
import qualified XML.GPRReader.GXLReader as GPR
loadGrammar :: GlobalOptions -> IO (Grammar (TypedGraphMorphism a b), String, [(String, String)])
loadGrammar globalOpts = do
let file = inputFile globalOpts
case FilePath.takeExtension file of
".ggx" -> do
(fstOrderGG, _, _) <- XML.readGrammar file (useConstraints globalOpts) (morphismsConf globalOpts)
ggName <- XML.readGGName (inputFile globalOpts)
names <- XML.readNames (inputFile globalOpts)
return (fstOrderGG, ggName, names)
".gps" -> do
(fstOrderGG, names) <- GPR.readGrammar file
let ggName = GPR.readGGName file
return (fstOrderGG, ggName, names)
_ -> do
putStrLn ("Input file has unsupported type: " ++ file)
putStrLn "Only .ggx and .gps are supported."
exitFailure
loadSndOrderGrammar :: GlobalOptions -> Bool
-> IO (Grammar (TypedGraphMorphism a b), Grammar (RuleMorphism a b), String, [(String, String)])
loadSndOrderGrammar globalOpts shouldPrintSafetyNacs = do
(fstOrderGG, sndOrderGG, printNewNacs) <- XML.readGrammar (inputFile globalOpts) (useConstraints globalOpts) (morphismsConf globalOpts)
ggName <- XML.readGGName (inputFile globalOpts)
names <- XML.readNames (inputFile globalOpts)
when shouldPrintSafetyNacs $ do
putStrLn "Adding minimal safety NACs to second-order rules..."
mapM_ putStrLn (XML.showMinimalSafetyNacsLog printNewNacs)
putStrLn "Added all minimal safety NACs!"
putStrLn ""
return (fstOrderGG, sndOrderGG, ggName, names)
|
rodrigo-machado/verigraph
|
src/CLI/Util.hs
|
gpl-3.0
| 1,970 | 0 | 16 | 420 | 539 | 277 | 262 | 41 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CancelReservedInstancesListing
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Cancels the specified Reserved Instance listing in the Reserved Instance
-- Marketplace.
--
-- For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ri-market-general.html Reserved Instance Marketplace> in the /AmazonElastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CancelReservedInstancesListing.html>
module Network.AWS.EC2.CancelReservedInstancesListing
(
-- * Request
CancelReservedInstancesListing
-- ** Request constructor
, cancelReservedInstancesListing
-- ** Request lenses
, crilReservedInstancesListingId
-- * Response
, CancelReservedInstancesListingResponse
-- ** Response constructor
, cancelReservedInstancesListingResponse
-- ** Response lenses
, crilrReservedInstancesListings
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
newtype CancelReservedInstancesListing = CancelReservedInstancesListing
{ _crilReservedInstancesListingId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'CancelReservedInstancesListing' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'crilReservedInstancesListingId' @::@ 'Text'
--
cancelReservedInstancesListing :: Text -- ^ 'crilReservedInstancesListingId'
-> CancelReservedInstancesListing
cancelReservedInstancesListing p1 = CancelReservedInstancesListing
{ _crilReservedInstancesListingId = p1
}
-- | The ID of the Reserved Instance listing.
crilReservedInstancesListingId :: Lens' CancelReservedInstancesListing Text
crilReservedInstancesListingId =
lens _crilReservedInstancesListingId
(\s a -> s { _crilReservedInstancesListingId = a })
newtype CancelReservedInstancesListingResponse = CancelReservedInstancesListingResponse
{ _crilrReservedInstancesListings :: List "item" ReservedInstancesListing
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'CancelReservedInstancesListingResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'crilrReservedInstancesListings' @::@ ['ReservedInstancesListing']
--
cancelReservedInstancesListingResponse :: CancelReservedInstancesListingResponse
cancelReservedInstancesListingResponse = CancelReservedInstancesListingResponse
{ _crilrReservedInstancesListings = mempty
}
-- | The Reserved Instance listing.
crilrReservedInstancesListings :: Lens' CancelReservedInstancesListingResponse [ReservedInstancesListing]
crilrReservedInstancesListings =
lens _crilrReservedInstancesListings
(\s a -> s { _crilrReservedInstancesListings = a })
. _List
instance ToPath CancelReservedInstancesListing where
toPath = const "/"
instance ToQuery CancelReservedInstancesListing where
toQuery CancelReservedInstancesListing{..} = mconcat
[ "ReservedInstancesListingId" =? _crilReservedInstancesListingId
]
instance ToHeaders CancelReservedInstancesListing
instance AWSRequest CancelReservedInstancesListing where
type Sv CancelReservedInstancesListing = EC2
type Rs CancelReservedInstancesListing = CancelReservedInstancesListingResponse
request = post "CancelReservedInstancesListing"
response = xmlResponse
instance FromXML CancelReservedInstancesListingResponse where
parseXML x = CancelReservedInstancesListingResponse
<$> x .@? "reservedInstancesListingsSet" .!@ mempty
|
kim/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/CancelReservedInstancesListing.hs
|
mpl-2.0
| 4,592 | 0 | 10 | 806 | 431 | 264 | 167 | 58 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Ambiata.Cli (
uploadCommand
, downloadCommand
, doUpload
, doDownload
) where
import Ambiata.Cli.Api
import Ambiata.Cli.Data
import Ambiata.Cli.Downloads
import Ambiata.Cli.Incoming
import Ambiata.Cli.Processing
import Ambiata.Cli.Rest
import qualified Data.Text as T
import Data.Time.Clock
import Control.Exception
import Control.Monad.IO.Class (liftIO)
import P
import System.Exit
import System.FileLock
import System.FilePath
import System.IO
import System.Log.Logger
import Twine.Snooze (minutes, snooze)
import X.Control.Monad.Trans.Either
orErrorAndDie :: Text -> (e -> Text) -> EitherT e IO a -> IO a
orErrorAndDie ctx render act =
runEitherT act >>= either errorAndDie pure
where errorAndDie err = do
errorM (T.unpack ctx) (T.unpack $ render err)
exitWith (ExitFailure 1)
uploadCommand :: UploadEnv -> IO ()
uploadCommand e@(UploadEnv d _ c) = runAction verb "upload" mode (unDir d </> ".ambiata-upload.lock") upload'
where upload' = doUpload e
>>= liftIO . infoM (T.unpack logCtx) . T.unpack . renderUploadResult
mode = runMode c
verb = verbosity c
logCtx = "Ambiata.uploadCommand"
downloadCommand :: DownloadEnv -> IO ()
downloadCommand e@(DownloadEnv d _ _ c) = runAction verb "download" mode (unDownloadDir d </> ".ambiata-download.lock") download'
where download' = doDownload e
>>= liftIO . infoM (T.unpack logCtx) . T.unpack . renderDownloadResult
mode = runMode c
verb = verbosity c
logCtx = "Ambiata.downloadCommand"
runAction :: Verbosity -> Text -> RunMode -> FilePath -> EitherT AmbiataError IO a -> IO a
runAction verb name m lockf act = do
updateGlobalLogger rootLoggerName (setLevel verbLevel)
debugM (T.unpack logCtx) $ "Starting " <> (T.unpack name)
bracket (getLock lockf) unlockFile (go m)
where go OneShot _ = act'
go Daemon _ = forever $ act' >> snooze (minutes 1)
getLock lp = orErrorAndDie logCtx id $
eitherTFromMaybe ("Unable to lock " <> T.pack lp <> " - is another process using it?")
$ tryLockFile lp Exclusive
verbLevel
| verb == Verbose = DEBUG
| otherwise = INFO
logCtx = "Ambiata.runAction"
act' = orErrorAndDie logCtx renderClientError act
doDownload :: DownloadEnv -> EitherT AmbiataError IO DownloadResult
doDownload (DownloadEnv dir o e c) = do
creds <- bimapEitherT AmbiataApiError id
. apiCall (envCredential c) (apiEndpoint c)
$ obtainCredentialsForDownload o e
downloadReady dir (awsRegion c) creds
doUpload :: UploadEnv -> EitherT AmbiataError IO UploadResult
doUpload (UploadEnv dir retention c) = do
-- Connect to the API before doing anything else, so we fail fast in
-- the case of a configuration error.
creds <- bimapEitherT AmbiataApiError id
. apiCall (envCredential c) (apiEndpoint c)
$ obtainCredentialsForUpload
prepareDir dir
now <- liftIO $ getCurrentTime
(incoming, processing, bads) <- processDir dir (NoChangeAfter $ twoMinutesBefore now)
liftIO $ mapM_ warnBadFile bads
uploaded <- fmap concat $ mapM (const $ uploadReady dir (awsRegion c) creds) processing
liftIO $ debugM logCtx "Cleaning up archived files..."
cleaned <- cleanUpArchived dir retention now
liftIO . infoM logCtx $ "Cleaned up archived files: " <> (renderArchived cleaned)
pure $ UploadResult incoming processing uploaded
where
warnBadFile st = warningM logCtx . T.unpack . T.concat $
[ "Not processing "
, renderBadFileState st
]
logCtx = "Ambiata.doUpload"
renderArchived = T.unpack . renderFileList . fmap (unArchivedFile)
twoMinutesBefore :: UTCTime -> UTCTime
twoMinutesBefore now = addUTCTime (-120) now
|
ambiata/tatooine-cli
|
src/Ambiata/Cli.hs
|
apache-2.0
| 4,300 | 0 | 14 | 1,248 | 1,145 | 577 | 568 | 88 | 2 |
{-# LANGUAGE TemplateHaskellQuotes #-}
module TH_StringLift where
import Language.Haskell.TH.Syntax
foo :: Quote m => String -> m (TExp String)
foo x = [|| x ||]
foo2 :: Quote m => String -> m Exp
foo2 x = [| x |]
|
sdiehl/ghc
|
testsuite/tests/th/TH_StringLift.hs
|
bsd-3-clause
| 217 | 0 | 9 | 43 | 48 | 28 | 20 | -1 | -1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module defines the scoping rules for value- and type-level
-- names in Cryptol.
module Cryptol.Parser.Names where
import Cryptol.Parser.AST
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Foldable (fold)
modExports :: Module -> ExportSpec
modExports m = fold (concat [ exportedNames d | d <- mDecls m ])
where
names by td = [ td { tlValue = thing n } | n <- fst (by (tlValue td)) ]
exportedNames (Decl td) = map exportBind (names namesD td)
++ map exportType (names tnamesD td)
exportedNames (TDNewtype nt) = map exportType (names tnamesNT nt)
exportedNames (Include {}) = []
-- | The names defined by a newtype.
tnamesNT :: Newtype -> ([Located QName], ())
tnamesNT x = ([ nName x ], ())
-- | The names defined and used by a group of mutually recursive declarations.
namesDs :: [Decl] -> ([Located QName], Set QName)
namesDs ds = (defs, boundNames defs (Set.unions frees))
where
defs = concat defss
(defss,frees) = unzip (map namesD ds)
-- | The names defined and used by a single declarations.
namesD :: Decl -> ([Located QName], Set QName)
namesD decl =
case decl of
DBind b -> namesB b
DPatBind p e -> (namesP p, namesE e)
DSignature {} -> ([],Set.empty)
DPragma {} -> ([],Set.empty)
DType {} -> ([],Set.empty)
DLocated d _ -> namesD d
-- | The names defined and used by a single declarations in such a way
-- that they cannot be duplicated in a file. For example, it is fine
-- to use @x@ on the RHS of two bindings, but not on the LHS of two
-- type signatures.
allNamesD :: Decl -> [Located QName]
allNamesD decl =
case decl of
DBind b -> fst (namesB b)
DPatBind p _ -> namesP p
DSignature ns _ -> ns
DPragma ns _ -> ns
DType ts -> [tsName ts]
DLocated d _ -> allNamesD d
tsName :: TySyn -> Located QName
tsName (TySyn lqn _ _) = lqn
-- | The names defined and used by a single binding.
namesB :: Bind -> ([Located QName], Set QName)
namesB b = ([bName b], boundNames (namesPs (bParams b)) (namesE (bDef b)))
-- | The names used by an expression.
namesE :: Expr -> Set QName
namesE expr =
case expr of
EVar x -> Set.singleton x
ECon _ -> Set.empty
ELit _ -> Set.empty
ETuple es -> Set.unions (map namesE es)
ERecord fs -> Set.unions (map (namesE . value) fs)
ESel e _ -> namesE e
EList es -> Set.unions (map namesE es)
EFromTo _ _ _ -> Set.empty
EComp e arms -> let (dss,uss) = unzip (map namesArm arms)
in Set.union (boundNames (concat dss) (namesE e))
(Set.unions uss)
EApp e1 e2 -> Set.union (namesE e1) (namesE e2)
EAppT e _ -> namesE e
EIf e1 e2 e3 -> Set.union (namesE e1) (Set.union (namesE e2) (namesE e3))
EWhere e ds -> let (bs,xs) = namesDs ds
in Set.union (boundNames bs (namesE e)) xs
ETyped e _ -> namesE e
ETypeVal _ -> Set.empty
EFun ps e -> boundNames (namesPs ps) (namesE e)
ELocated e _ -> namesE e
-- | The names defined by a group of patterns.
namesPs :: [Pattern] -> [Located QName]
namesPs = concatMap namesP
-- | The names defined by a pattern. These will always be unqualified names.
namesP :: Pattern -> [Located QName]
namesP pat =
case pat of
PVar x -> [fmap mkUnqual x]
PWild -> []
PTuple ps -> namesPs ps
PRecord fs -> namesPs (map value fs)
PList ps -> namesPs ps
PTyped p _ -> namesP p
PSplit p1 p2 -> namesPs [p1,p2]
PLocated p _ -> namesP p
-- | The names defined and used by a match.
namesM :: Match -> ([Located QName], Set QName)
namesM (Match p e) = (namesP p, namesE e)
namesM (MatchLet b) = namesB b
-- | The names defined and used by an arm of alist comprehension.
namesArm :: [Match] -> ([Located QName], Set QName)
namesArm = foldr combine ([],Set.empty) . map namesM
where combine (ds1,fs1) (ds2,fs2) =
( filter ((`notElem` map thing ds2) . thing) ds1 ++ ds2
, Set.union fs1 (boundNames ds1 fs2)
)
-- | Remove some defined variables from a set of free variables.
boundNames :: [Located QName] -> Set QName -> Set QName
boundNames bs xs = Set.difference xs (Set.fromList (map thing bs))
-- | Given the set of type variables that are in scope,
-- compute the type synonyms used by a type.
namesT :: Set QName -> Type -> Set QName
namesT vs = go
where
go ty =
case ty of
TWild -> Set.empty
TFun t1 t2 -> Set.union (go t1) (go t2)
TSeq t1 t2 -> Set.union (go t1) (go t2)
TBit -> Set.empty
TNum _ -> Set.empty
TChar _ -> Set.empty
TInf -> Set.empty
TApp _ ts -> Set.unions (map go ts)
TTuple ts -> Set.unions (map go ts)
TRecord fs -> Set.unions (map (go . value) fs)
TLocated t _ -> go t
TUser x [] | x `Set.member` vs
-> Set.empty
TUser x ts -> Set.insert x (Set.unions (map go ts))
-- | The type names defined and used by a group of mutually recursive declarations.
tnamesDs :: [Decl] -> ([Located QName], Set QName)
tnamesDs ds = (defs, boundNames defs (Set.unions frees))
where
defs = concat defss
(defss,frees) = unzip (map tnamesD ds)
-- | The type names defined and used by a single declaration.
tnamesD :: Decl -> ([Located QName], Set QName)
tnamesD decl =
case decl of
DSignature _ s -> ([], tnamesS s)
DPragma {} -> ([], Set.empty)
DBind b -> ([], tnamesB b)
DPatBind _ e -> ([], tnamesE e)
DLocated d _ -> tnamesD d
DType (TySyn n ps t) -> ([n], Set.difference (tnamesT t) (Set.fromList (map tpQName ps)))
-- | The type names used by a single binding.
tnamesB :: Bind -> Set QName
tnamesB b = Set.unions [setS, setP, setE]
where
setS = maybe Set.empty tnamesS (bSignature b)
setP = Set.unions (map tnamesP (bParams b))
setE = tnamesE (bDef b)
-- | The type names used by an expression.
tnamesE :: Expr -> Set QName
tnamesE expr =
case expr of
EVar _ -> Set.empty
ECon _ -> Set.empty
ELit _ -> Set.empty
ETuple es -> Set.unions (map tnamesE es)
ERecord fs -> Set.unions (map (tnamesE . value) fs)
ESel e _ -> tnamesE e
EList es -> Set.unions (map tnamesE es)
EFromTo a b c -> Set.union (tnamesT a)
(Set.union (maybe Set.empty tnamesT b) (maybe Set.empty tnamesT c))
EComp e mss -> Set.union (tnamesE e) (Set.unions (map tnamesM (concat mss)))
EApp e1 e2 -> Set.union (tnamesE e1) (tnamesE e2)
EAppT e fs -> Set.union (tnamesE e) (Set.unions (map tnamesTI fs))
EIf e1 e2 e3 -> Set.union (tnamesE e1) (Set.union (tnamesE e2) (tnamesE e3))
EWhere e ds -> let (bs,xs) = tnamesDs ds
in Set.union (boundNames bs (tnamesE e)) xs
ETyped e t -> Set.union (tnamesE e) (tnamesT t)
ETypeVal t -> tnamesT t
EFun ps e -> Set.union (Set.unions (map tnamesP ps)) (tnamesE e)
ELocated e _ -> tnamesE e
tnamesTI :: TypeInst -> Set QName
tnamesTI (NamedInst f) = tnamesT (value f)
tnamesTI (PosInst t) = tnamesT t
-- | The type names used by a pattern.
tnamesP :: Pattern -> Set QName
tnamesP pat =
case pat of
PVar _ -> Set.empty
PWild -> Set.empty
PTuple ps -> Set.unions (map tnamesP ps)
PRecord fs -> Set.unions (map (tnamesP . value) fs)
PList ps -> Set.unions (map tnamesP ps)
PTyped p t -> Set.union (tnamesP p) (tnamesT t)
PSplit p1 p2 -> Set.union (tnamesP p1) (tnamesP p2)
PLocated p _ -> tnamesP p
-- | The type names used by a match.
tnamesM :: Match -> Set QName
tnamesM (Match p e) = Set.union (tnamesP p) (tnamesE e)
tnamesM (MatchLet b) = tnamesB b
-- | The type names used by a type schema.
tnamesS :: Schema -> Set QName
tnamesS (Forall params props ty _) =
Set.difference (Set.union (Set.unions (map tnamesC props)) (tnamesT ty))
(Set.fromList (map tpQName params))
-- | The type names used by a prop.
tnamesC :: Prop -> Set QName
tnamesC prop =
case prop of
CFin t -> tnamesT t
CEqual t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
CGeq t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
CArith t -> tnamesT t
CCmp t -> tnamesT t
CLocated p _ -> tnamesC p
-- | Compute the type synonyms/type variables used by a type.
tnamesT :: Type -> Set QName
tnamesT ty =
case ty of
TWild -> Set.empty
TFun t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
TSeq t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
TBit -> Set.empty
TNum _ -> Set.empty
TChar __ -> Set.empty
TInf -> Set.empty
TApp _ ts -> Set.unions (map tnamesT ts)
TTuple ts -> Set.unions (map tnamesT ts)
TRecord fs -> Set.unions (map (tnamesT . value) fs)
TLocated t _ -> tnamesT t
TUser x ts -> Set.insert x (Set.unions (map tnamesT ts))
|
TomMD/cryptol
|
src/Cryptol/Parser/Names.hs
|
bsd-3-clause
| 9,337 | 0 | 14 | 2,719 | 3,586 | 1,765 | 1,821 | 189 | 17 |
{- |
Module : $Header$
Description : folding functions for VSE progams
Copyright : (c) Christian Maeder, DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
folding functions for VSE progams
-}
module VSE.Fold where
import qualified Data.Set as Set
import CASL.AS_Basic_CASL
import VSE.As
-- | fold record
data FoldRec a = FoldRec
{ foldAbort :: Program -> a
, foldSkip :: Program -> a
, foldAssign :: Program -> VAR -> TERM () -> a
, foldCall :: Program -> FORMULA () -> a
, foldReturn :: Program -> TERM () -> a
, foldBlock :: Program -> [VAR_DECL] -> a -> a
, foldSeq :: Program -> a -> a -> a
, foldIf :: Program -> FORMULA () -> a -> a -> a
, foldWhile :: Program -> FORMULA () -> a -> a }
-- | fold function
foldProg :: FoldRec a -> Program -> a
foldProg r p = case unRanged p of
Abort -> foldAbort r p
Skip -> foldSkip r p
Assign v t -> foldAssign r p v t
Call f -> foldCall r p f
Return t -> foldReturn r p t
Block vs q -> foldBlock r p vs $ foldProg r q
Seq p1 p2 -> foldSeq r p (foldProg r p1) $ foldProg r p2
If f p1 p2 -> foldIf r p f (foldProg r p1) $ foldProg r p2
While f q -> foldWhile r p f $ foldProg r q
mapRec :: FoldRec Program
mapRec = FoldRec
{ foldAbort = id
, foldSkip = id
, foldAssign = \ (Ranged _ r) v t -> Ranged (Assign v t) r
, foldCall = \ (Ranged _ r) f -> Ranged (Call f) r
, foldReturn = \ (Ranged _ r) t -> Ranged (Return t) r
, foldBlock = \ (Ranged _ r) vs p -> Ranged (Block vs p) r
, foldSeq = \ (Ranged _ r) p1 p2 -> Ranged (Seq p1 p2) r
, foldIf = \ (Ranged _ r) c p1 p2 -> Ranged (If c p1 p2) r
, foldWhile = \ (Ranged _ r) c p -> Ranged (While c p) r }
mapProg :: (TERM () -> TERM ()) -> (FORMULA () -> FORMULA ())
-> FoldRec Program
mapProg mt mf = mapRec
{ foldAssign = \ (Ranged _ r) v t -> Ranged (Assign v $ mt t) r
, foldCall = \ (Ranged _ r) f -> Ranged (Call $ mf f) r
, foldReturn = \ (Ranged _ r) t -> Ranged (Return $ mt t) r
, foldIf = \ (Ranged _ r) c p1 p2 -> Ranged (If (mf c) p1 p2) r
, foldWhile = \ (Ranged _ r) c p -> Ranged (While (mf c) p) r }
-- | collect i.e. variables to be universally bound on the top level
constProg :: (TERM () -> a) -> (FORMULA () -> a) -> ([a] -> a) -> a -> FoldRec a
constProg ft ff join c = FoldRec
{ foldAbort = const c
, foldSkip = const c
, foldAssign = \ _ _ t -> ft t
, foldCall = \ _ f -> ff f
, foldReturn = \ _ t -> ft t
, foldBlock = \ _ _ p -> p
, foldSeq = \ _ p1 p2 -> join [p1, p2]
, foldIf = \ _ f p1 p2 -> join [ff f, p1, p2]
, foldWhile = \ _ f p -> join [ff f, p] }
progToSetRec :: Ord a => (TERM () -> Set.Set a) -> (FORMULA () -> Set.Set a)
-> FoldRec (Set.Set a)
progToSetRec ft ff = constProg ft ff Set.unions Set.empty
|
keithodulaigh/Hets
|
VSE/Fold.hs
|
gpl-2.0
| 2,862 | 0 | 13 | 766 | 1,309 | 681 | 628 | 58 | 9 |
-- (C) Copyright Chris Banks 2011
-- This file is part of The Continuous Pi-calculus Workbench (CPiWB).
-- CPiWB is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- CPiWB is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with CPiWB. If not, see <http://www.gnu.org/licenses/>.
{-# OPTIONS_GHC -XDeriveDataTypeable -XTypeSynonymInstances -XFlexibleInstances #-}
module CPi.Lib
(CpiException(..),
Species(..),
Process(..),
Prefix(..),
PrefixSpecies,
AffNet(..),
Aff(..),
Definition(..),
Conc,
Env,
Name,
OutNames,
InNames,
Rate,
Pretty,
Nf,
pretty,
prettys,
prettyNames,
prettyList,
aff,
tri1,
tri2,
tri3,
s2d,
d2s,
lookupDef,
revLookupDef,
lookupProcName,
lookupSpecName,
speciesInProc,
compose,
card,
nf,
sites,
sub,
remove,
replace,
ifnotnil,
netUnion,
fn, bn,
rename,
vecRename,
netRename,
specName,
infty,
(/\),(\/),(#),(##),(\\)
) where
import qualified Data.List as L
--import qualified Data.Map as Map
--import Data.Map (Map)
import qualified Control.Exception as X
import qualified Data.Typeable as T
data CpiException = CpiException String
deriving (Show,T.Typeable)
instance X.Exception CpiException
----------------------
--Data structures:
-----------------------
type Name = String
type OutNames = [Name]
type InNames = [Name]
type Rate = Double
type Conc = Double
type PrefixSpecies = (Prefix,Species)
infty = 1/0
data Prefix = Comm Name OutNames InNames
| Tau Rate
deriving (Eq, Ord, Show)
data Aff = Aff ((Name,Name),Rate)
deriving(Eq, Ord, Show)
data AffNet = AffNet [Aff]
deriving(Eq, Ord, Show)
data Species = Nil
| Def String [Name]
| Sum [PrefixSpecies]
| Par [Species]
| New AffNet Species
deriving (Ord, Show)
data Process = Process [(Species,Conc)] AffNet
deriving (Eq, Ord, Show)
data Definition = SpeciesDef Name [Name] Species
| ProcessDef Name Process
deriving (Eq,Show)
type Env = [Definition] -- Env(ironment) is a list of definitions
-- Referential equality of Species (e.g. for lookup).
instance Eq Species where
Nil == Nil = True
(Def s ns) == (Def s' ns') = (s==s')&&(ns==ns')
(Sum ss) == (Sum ss') = ss==ss'
(Par ss) == (Par ss') = ss==ss'
(New n s) == (New n' s') = (n==n')&&(s==s')
_ == _ = False
----------------------
-- Pretty printing:
----------------------
-- Instances of Pretty class are pretty-printable
-- CPi components.
class (Show a) => Pretty a where
pretty :: a -> String
pretty x = show x
instance Pretty Process where
pretty (Process x@((s,c):scs) n)
| null x = ""
| length x == 1
= "["++(show c)++"] "++(pretty s)
| otherwise
= (pretty (Process [(s,c)] n))++" || "++(pretty (Process scs n))
pretty (Process [] _) = "<Empty process>"
instance Pretty Species where
pretty Nil = "0"
pretty (Def i ns) = i ++"("++(prettyNames ns)++")"
-- NOTE: temp removed def params!
pretty x'@(Sum x@((p,s):pss))
| (length x == 1)
= (pretty p)++(prettyPs s x')
| otherwise
= (pretty $ Sum [(p,s)])++" + "++(pretty $ Sum pss)
pretty (Sum []) = "<Empty Sum>"
pretty x'@(Par x@(s:ss))
| (null x)
= ""
| (length x == 1)
= pretty s
| otherwise
= (prettyPs s x')++" | "++(prettyPs (Par ss) x')
pretty (Par []) = "<Empty Par>"
pretty (New n s) = (pretty n)++" "++(pretty s)
instance Pretty Prefix where
pretty (Tau r) = "tau<"++(show r)++">."
pretty (Comm n [] []) = n++"."
pretty (Comm n [] is) = n++"("++(prettyNames is)++")."
pretty (Comm n os []) = n++"<"++(prettyNames os)++">."
pretty (Comm n os is) = n++"("++(prettyNames os)++";"
++(prettyNames is)++")."
instance Pretty Aff where
pretty (Aff ((n1,n2),r)) = n1++"-"++n2++"@"++(show r)
prettyAffs affs = concat(L.intersperse ", " (map pretty affs))
instance Pretty AffNet where
pretty (AffNet affs) = "{"++(prettyAffs affs)++"}"
instance Pretty Definition where
pretty (SpeciesDef n fns s)
= n++"("++(prettyNames fns)++") = "++(pretty s)
pretty (ProcessDef n p)
= n++" = "++(pretty p)
-- Ordering for Definitions:
instance Ord Definition where
-- Any SpeciesDef < any ProcessDef
compare (SpeciesDef _ _ _) (ProcessDef _ _) = LT
compare (ProcessDef _ _) (SpeciesDef _ _ _) = GT
-- Both ProcessDef and SpeciesDef sort by name.
compare (SpeciesDef n _ _) (SpeciesDef n' _ _) = compare n n'
compare (ProcessDef n _) (ProcessDef n' _) = compare n n'
-- |Pretty print a list of Names.
prettyNames :: [Name] -> String
prettyNames ns = concat(L.intersperse "," ns)
-- Parenthesisation
prettyPs :: Species -> Species -> String
prettyPs x x'
| ((prio x)<=(prio x'))
= (pretty x)
| otherwise
= "("++(pretty x)++")"
where prio Nil = 10
prio (Def _ _) = 10
prio (Sum ss)
| (length ss == 1) = 10
| otherwise = 20
prio (Par _) = 30
prio (New _ _) = 40
----------------------
--Functions:
----------------------
--AffNet funs:
sites :: AffNet -> [Name]
sites net = sites' net []
where
sites' :: AffNet -> [Name] -> [Name]
sites' (AffNet ((Aff ((s1,s2),_)):affs)) r
= sites' (AffNet affs) (s1:s2:r)
sites' (AffNet []) r = L.nub r
aff :: AffNet -> (Name,Name) -> Maybe Rate
aff (AffNet affs) (n,n')
| (a /= Nothing) = a
| otherwise = (aff' (n',n) affs)
where a = (aff' (n,n') affs)
aff' _ [] = Nothing
aff' k ((Aff ((x,y),r)):affs')
| k == (x,y) = Just r
| otherwise = aff' k affs'
netUnion :: AffNet -> AffNet -> AffNet
netUnion (AffNet n) (AffNet n') = AffNet (n \/ n')
--Free/Bound names:
fn :: Species -> [Name]
fn Nil = []
fn (Def l ns) = ns
fn (Par []) = []
fn (Par (x:xs)) = (fn x) \/ (fn (Par xs))
fn (New n s) = (fn s) \\ (sites n)
fn (Sum []) = []
fn (Sum (((Tau r),s):xs)) = (fn s) \/ (fn (Sum xs))
fn (Sum (((Comm n o i),s):xs)) = [n] \/ o \/ ((fn s) \\ i) \/ (fn (Sum xs))
bn :: Species -> [Name]
bn Nil = []
bn (Def _ _) = []
bn (Par []) = []
bn (Par (x:xs)) = (bn x) \/ (bn (Par xs))
bn (New n s) = (bn s) \/ ((fn s) /\ (sites n))
bn (Sum []) = []
bn (Sum (((Tau r),s):xs)) = (bn s) \/ (bn (Sum xs))
bn (Sum (((Comm n o i),s):xs)) = (bn s) \/ ((fn s) /\ i) \/ (bn (Sum xs))
-- rename the given name in a species:
rename :: (Name,Name) -> Species -> Species
rename _ Nil = Nil
rename r (Def l ns) = Def l (vecRename r ns)
rename r (Par ss) = Par (map (rename r) ss)
rename r (New net s) = New (netRename r net) (rename r s)
rename r (Sum pfxs) = Sum (pfxRename r pfxs)
-- Renaming on name vectors
vecRename :: (Name,Name) -> [Name] -> [Name]
vecRename _ [] = []
vecRename r@(old,new) (n:ns)
| n == old = new : vecRename r ns
| otherwise = n : vecRename r ns
-- Renaming on affinity networks:
netRename :: (Name,Name) -> AffNet -> AffNet
netRename r (AffNet affs) = AffNet (netRename' r affs)
where
netRename' _ [] = []
netRename' r@(old,new) ((Aff ((n,n'),p)):affs)
| n == old = (Aff ((new,n'),p)):(netRename' r affs)
| n' == old = (Aff ((n,new),p)):(netRename' r affs)
| otherwise = (Aff ((n,n'),p)):(netRename' r affs)
-- Renaming on PrefixSpecies
pfxRename :: (Name,Name) -> [PrefixSpecies] -> [PrefixSpecies]
pfxRename _ [] = []
pfxRename r ((pfx,s):pfxs)
= ((pfxRename' r pfx),(rename r s)):(pfxRename r pfxs)
where
pfxRename' _ (Tau rate) = Tau rate
pfxRename' r@(old,new) (Comm n ons ins)
| n == old = Comm new (vecRename r ons) (vecRename r ins)
| otherwise = Comm n (vecRename r ons) (vecRename r ins)
-- Substitution of free names in a Species:
-- sub [(n,n')] s = find free Names n in Species s
-- and replace with New Names n'
-- Substitution is capture avoiding (will alpha-convert to avouid name capture)
sub :: [(Name,Name)] -> Species -> Species
sub [] s = s
sub ((old,new):rs) s
-- name to be replaced is free and replacement is not bound
-- then go ahead and substitute
| (old `elem` (fn s)) && (not(new `elem` (bn s)))
= sub rs (rename (old,new) s)
-- name to be replaced is free, but replacement is bound
-- then alpha-convert the bound name before substituting
| (old `elem` (fn s)) && (new `elem` (bn s))
= sub ((old,new):rs) (aconv (new,(renaming new s)) s)
-- name to be replaced is not in the term, ignore
| (not(old `elem` (fn s))) && (not(old `elem` (bn s)))
= sub rs s
| otherwise
-- name to be replaced is not free -- error!
= X.throw $ CpiException
"CPi.Lib.sub: Tried to substitute a non-free name."
-- alpha-conversion of species
aconv :: (Name,Name) -> Species -> Species
aconv (old,new) s
| (not(old `elem` (fn s))) && (not(new `elem` (fn s)))
= rename (old,new) s
| (new `elem` (fn s))
= X.throw $ CpiException
"CPi.Lib.aconv: Tried to alpha-convert to an existing free name."
| otherwise
= X.throw $ CpiException
"CPi.Lib.aconv: Tried to alpha-convert a non-bound name."
-- a fresh renaming of a name in s
renaming :: Name -> Species -> Name
renaming n s = renaming' (renames n) s
where
renaming' (n:ns) s
| not(n `elem` (fn s)) = n
| otherwise = renaming' ns s
renaming' [] _
= X.throw $ CpiException
"CPi.Lib.renaming: Renaming stream has been exhausted."
-- a stream of possible renamings for a given name
renames x = [x++p | p <- iterate (++"'") "'"]
-- Fresh-for tests for restrictions
(#) :: AffNet -> Species -> Bool
net#s = ((sites net)/\(fn s)) == []
(##) :: AffNet -> AffNet -> Bool
net##net' = ((sites net)/\(sites net')) == []
-- Definition lookup:
lookupDef :: Env -> Species -> Maybe Species
lookupDef [] (Def _ _) = Nothing
lookupDef ((SpeciesDef i ps s):env) def@(Def x ns)
| i == x = Just (sub (zip ps ns) s)
| otherwise = lookupDef env def
lookupDef ((ProcessDef _ _):env) def = lookupDef env def
lookupDef _ _ = X.throw $ CpiException
"Unexpected pattern: CPi.Lib.lookupDef expects a Def!"
-- Reverse definition lookup:
revLookupDef :: Env -> Species -> Maybe Species
revLookupDef [] _ = Nothing
revLookupDef ((SpeciesDef i ps s):env) spec
| nf spec == nf s = Just (Def i ps)
| otherwise = revLookupDef env spec
revLookupDef ((ProcessDef _ _):env) spec = revLookupDef env spec
-- Process lookup by name:
lookupProcName :: Env -> String -> Maybe Process
lookupProcName [] _ = Nothing
lookupProcName ((SpeciesDef _ _ _):env) str = lookupProcName env str
lookupProcName ((ProcessDef name proc):env) str
| (str == name) = Just proc
| (otherwise) = lookupProcName env str
-- Species lookup by name:
lookupSpecName :: Env -> String -> Maybe Species
lookupSpecName [] _ = Nothing
lookupSpecName ((ProcessDef _ _):env) str = lookupSpecName env str
lookupSpecName ((SpeciesDef name ns _):env) str
| (str == name) = Just (Def name ns)
| (otherwise) = lookupSpecName env str
-- Process composition (p1,p2) -> p1||p2:
compose :: Process -> Process -> Process
compose (Process p1 a1) (Process p2 a2)
= Process (compSpec p1 p2) (netUnion a1 a2)
where
compSpec s' ((s,c):ss)
| lookup s s' == Nothing
= compSpec ((s,c):s') ss
| otherwise
= compSpec (incSpec s c s') ss
compSpec s' [] = s'
incSpec s' c' ((s,c):ss)
| s == s'
= (s,(c + c')) : ss
| otherwise
= (s,c) : incSpec s' c' ss
incSpec s' c' [] = [(s',c')]
speciesInProc :: Process -> [Species]
-- List of species in a process
speciesInProc (Process scs _) = [s | (s,c)<-scs]
-- A simplified string representation of a species
specName :: Species -> String
specName Nil = "0"
specName (Def n _) = n
specName (Par ss) = concat(L.intersperse "|" (map specName ss))
specName (Sum ps) = concat(L.intersperse "+" (map (specName . snd) ps))
specName (New _ s) = "(new)" ++ specName s
------------------
-- Normal form
------------------
-- The "normal form" here is reduction by structural congruence
-- (not including alpha equiv.) and alphanumeric ordering of term lists
-- This allows us to define a smaller equivalence based on the referential
-- equality (see above).
class Nf a where
nf :: a -> a
-- normal form for species
instance Nf Species where
nf s
| result==s = result
| otherwise = nf result
where
result = nf' s
nf' Nil = Nil
nf' (Def s ns) = Def s ns
nf' (Sum []) = Nil
-- commutativity and associativity of Sum
nf' (Sum pfs) = Sum (L.sort (map nf pfs))
nf' (Par []) = Nil
nf' (Par [s]) = nf s
-- commutativity and associativity of Par
-- and 0|A = A
nf' (Par ss) = Par (L.sort (dropNils (flatten (map nf ss))))
where
dropNils = filter (\x->x/=Nil)
flatten [] = []
flatten (x:xs) = (f x)++(flatten xs)
where
f (Par ss) = ss
f s = [s]
-- (new M)(new N)A = (new MUN)A when M#N and ¬(M#A or N#A)
nf' (New net@(AffNet ns) (New net'@(AffNet ns') s))
| (net##net') && not(net#s || net'#s)
= nf (New (net `netUnion` net') s)
| net#s
= nf (New net' s)
| net'#s
= nf (New net s)
| otherwise
= (New (AffNet (L.sort ns)) (New (AffNet (L.sort ns')) (nf s)))
-- (new M)(A|B) = A|(new M)B when M#A
nf' (New net (Par ss)) = liftfps net ss [] []
where
liftfps :: AffNet -> [Species] -> [Species] -> [Species] -> Species
liftfps net [] [] ins
= New net (nf (Par ins))
liftfps net [] outs []
= nf (Par outs)
liftfps net [] outs ins
= Par ((New net (nf (Par ins))):(map nf outs))
liftfps net (s:ss) outs ins
| net#s = liftfps net ss (s:outs) ins
| otherwise = liftfps net ss outs (s:ins)
-- (new M)A = A when M#A
nf' (New net@(AffNet ns) s)
| net#s = nf s
| otherwise = New (AffNet (L.sort ns)) (nf s)
instance Nf PrefixSpecies where
nf (p,s) = (p,(nf s))
---------------------
-- Utility functions:
---------------------
--Set operations:
-- | Set union.
(\/) :: (Eq a) => [a] -> [a] -> [a]
(\/) = L.union
-- | Set intersection.
(/\) :: (Eq a) => [a] -> [a] -> [a]
(/\) = L.intersect
-- | Set difference.
(\\) :: (Eq a) => [a] -> [a] -> [a]
(\\) = (L.\\)
--Real<->String
d2s :: Double -> String
d2s x = show x
s2d :: String -> Double
s2d x = read x :: Double
-- | If List is not nil then apply Function else return Default.
ifnotnil :: [a] -- ^ List
-> ([a] -> b) -- ^ Function
-> b -- ^ Default
-> b
ifnotnil [] f b = b
ifnotnil xs f b = f xs
-- | Pretty print a list of pretty printable expressions.
prettys :: (Pretty a) => [a] -> String
prettys x = concat $ map (\z->(pretty z)++"\n") x
-- | Pretty print a list
prettyList x = L.concat $ L.intersperse "\n" x
-- | Replace first matched element of a list with something else.
replace :: (Eq a) => a -> a -> [a] -> [a]
replace _ _ [] = []
replace src dst (x:xs)
| x==src = dst:xs
| otherwise = x:(replace src dst xs)
-- | Remove first matched element of a list.
remove _ [] = []
remove m (x:xs)
| x==m = xs
| otherwise = x:(remove m xs)
-- | Count of an element in a list.
card :: (Eq a) => a -> [a] -> Integer
card e l = toInteger $ length $ filter (\a->a==e) l
-- | First element of a triple.
tri1 (x,_,_) = x
-- | Second element of a triple.
tri2 (_,x,_) = x
-- | Third element of a triple.
tri3 (_,_,x) = x
|
chrisbanks/cpiwb
|
CPi/Lib.hs
|
gpl-3.0
| 17,088 | 1 | 18 | 5,146 | 6,805 | 3,591 | 3,214 | 376 | 5 |
{-
This module handles generation of position independent code and
dynamic-linking related issues for the native code generator.
This depends both the architecture and OS, so we define it here
instead of in one of the architecture specific modules.
Things outside this module which are related to this:
+ module CLabel
- PIC base label (pretty printed as local label 1)
- DynamicLinkerLabels - several kinds:
CodeStub, SymbolPtr, GotSymbolPtr, GotSymbolOffset
- labelDynamic predicate
+ module Cmm
- The GlobalReg datatype has a PicBaseReg constructor
- The CmmLit datatype has a CmmLabelDiffOff constructor
+ codeGen & RTS
- When tablesNextToCode, no absolute addresses are stored in info tables
any more. Instead, offsets from the info label are used.
- For Win32 only, SRTs might contain addresses of __imp_ symbol pointers
because Win32 doesn't support external references in data sections.
TODO: make sure this still works, it might be bitrotted
+ NCG
- The cmmToCmm pass in AsmCodeGen calls cmmMakeDynamicReference for all
labels.
- nativeCodeGen calls pprImportedSymbol and pprGotDeclaration to output
all the necessary stuff for imported symbols.
- The NCG monad keeps track of a list of imported symbols.
- MachCodeGen invokes initializePicBase to generate code to initialize
the PIC base register when needed.
- MachCodeGen calls cmmMakeDynamicReference whenever it uses a CLabel
that wasn't in the original Cmm code (e.g. floating point literals).
-}
module PIC (
cmmMakeDynamicReference,
CmmMakeDynamicReferenceM(..),
ReferenceKind(..),
needImportedSymbols,
pprImportedSymbol,
pprGotDeclaration,
initializePicBase_ppc,
initializePicBase_x86
)
where
import qualified PPC.Instr as PPC
import qualified PPC.Regs as PPC
import qualified X86.Instr as X86
import Platform
import Instruction
import Reg
import NCGMonad
import Hoopl
import Cmm
import CLabel ( CLabel, ForeignLabelSource(..), pprCLabel,
mkDynamicLinkerLabel, DynamicLinkerLabelInfo(..),
dynamicLinkerLabelInfo, mkPicBaseLabel,
labelDynamic, externallyVisibleCLabel )
import CLabel ( mkForeignLabel )
import BasicTypes
import Module
import Outputable
import DynFlags
import FastString
--------------------------------------------------------------------------------
-- It gets called by the cmmToCmm pass for every CmmLabel in the Cmm
-- code. It does The Right Thing(tm) to convert the CmmLabel into a
-- position-independent, dynamic-linking-aware reference to the thing
-- in question.
-- Note that this also has to be called from MachCodeGen in order to
-- access static data like floating point literals (labels that were
-- created after the cmmToCmm pass).
-- The function must run in a monad that can keep track of imported symbols
-- A function for recording an imported symbol must be passed in:
-- - addImportCmmOpt for the CmmOptM monad
-- - addImportNat for the NatM monad.
data ReferenceKind
= DataReference
| CallReference
| JumpReference
deriving(Eq)
class Monad m => CmmMakeDynamicReferenceM m where
addImport :: CLabel -> m ()
getThisModule :: m Module
instance CmmMakeDynamicReferenceM NatM where
addImport = addImportNat
getThisModule = getThisModuleNat
cmmMakeDynamicReference
:: CmmMakeDynamicReferenceM m
=> DynFlags
-> ReferenceKind -- whether this is the target of a jump
-> CLabel -- the label
-> m CmmExpr
cmmMakeDynamicReference dflags referenceKind lbl
| Just _ <- dynamicLinkerLabelInfo lbl
= return $ CmmLit $ CmmLabel lbl -- already processed it, pass through
| otherwise
= do this_mod <- getThisModule
case howToAccessLabel
dflags
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
this_mod
referenceKind lbl of
AccessViaStub -> do
let stub = mkDynamicLinkerLabel CodeStub lbl
addImport stub
return $ CmmLit $ CmmLabel stub
AccessViaSymbolPtr -> do
let symbolPtr = mkDynamicLinkerLabel SymbolPtr lbl
addImport symbolPtr
return $ CmmLoad (cmmMakePicReference dflags symbolPtr) (bWord dflags)
AccessDirectly -> case referenceKind of
-- for data, we might have to make some calculations:
DataReference -> return $ cmmMakePicReference dflags lbl
-- all currently supported processors support
-- PC-relative branch and call instructions,
-- so just jump there if it's a call or a jump
_ -> return $ CmmLit $ CmmLabel lbl
-- -----------------------------------------------------------------------------
-- Create a position independent reference to a label.
-- (but do not bother with dynamic linking).
-- We calculate the label's address by adding some (platform-dependent)
-- offset to our base register; this offset is calculated by
-- the function picRelative in the platform-dependent part below.
cmmMakePicReference :: DynFlags -> CLabel -> CmmExpr
cmmMakePicReference dflags lbl
-- Windows doesn't need PIC,
-- everything gets relocated at runtime
| OSMinGW32 <- platformOS $ targetPlatform dflags
= CmmLit $ CmmLabel lbl
-- both ABI versions default to medium code model
| ArchPPC_64 _ <- platformArch $ targetPlatform dflags
= CmmMachOp (MO_Add W32) -- code model medium
[ CmmReg (CmmGlobal PicBaseReg)
, CmmLit $ picRelative
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
lbl ]
| (gopt Opt_PIC dflags || WayDyn `elem` ways dflags) && absoluteLabel lbl
= CmmMachOp (MO_Add (wordWidth dflags))
[ CmmReg (CmmGlobal PicBaseReg)
, CmmLit $ picRelative
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
lbl ]
| otherwise
= CmmLit $ CmmLabel lbl
absoluteLabel :: CLabel -> Bool
absoluteLabel lbl
= case dynamicLinkerLabelInfo lbl of
Just (GotSymbolPtr, _) -> False
Just (GotSymbolOffset, _) -> False
_ -> True
--------------------------------------------------------------------------------
-- Knowledge about how special dynamic linker labels like symbol
-- pointers, code stubs and GOT offsets look like is located in the
-- module CLabel.
-- We have to decide which labels need to be accessed
-- indirectly or via a piece of stub code.
data LabelAccessStyle
= AccessViaStub
| AccessViaSymbolPtr
| AccessDirectly
howToAccessLabel
:: DynFlags -> Arch -> OS -> Module -> ReferenceKind -> CLabel -> LabelAccessStyle
-- Windows
-- In Windows speak, a "module" is a set of objects linked into the
-- same Portable Exectuable (PE) file. (both .exe and .dll files are PEs).
--
-- If we're compiling a multi-module program then symbols from other modules
-- are accessed by a symbol pointer named __imp_SYMBOL. At runtime we have the
-- following.
--
-- (in the local module)
-- __imp_SYMBOL: addr of SYMBOL
--
-- (in the other module)
-- SYMBOL: the real function / data.
--
-- To access the function at SYMBOL from our local module, we just need to
-- dereference the local __imp_SYMBOL.
--
-- If not compiling with -dynamic we assume that all our code will be linked
-- into the same .exe file. In this case we always access symbols directly,
-- and never use __imp_SYMBOL.
--
howToAccessLabel dflags _ OSMinGW32 this_mod _ lbl
-- Assume all symbols will be in the same PE, so just access them directly.
| WayDyn `notElem` ways dflags
= AccessDirectly
-- If the target symbol is in another PE we need to access it via the
-- appropriate __imp_SYMBOL pointer.
| labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
-- Target symbol is in the same PE as the caller, so just access it directly.
| otherwise
= AccessDirectly
-- Mach-O (Darwin, Mac OS X)
--
-- Indirect access is required in the following cases:
-- * things imported from a dynamic library
-- * (not on x86_64) data from a different module, if we're generating PIC code
-- It is always possible to access something indirectly,
-- even when it's not necessary.
--
howToAccessLabel dflags arch OSDarwin this_mod DataReference lbl
-- data access to a dynamic library goes via a symbol pointer
| labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
-- when generating PIC code, all cross-module data references must
-- must go via a symbol pointer, too, because the assembler
-- cannot generate code for a label difference where one
-- label is undefined. Doesn't apply t x86_64.
-- Unfortunately, we don't know whether it's cross-module,
-- so we do it for all externally visible labels.
-- This is a slight waste of time and space, but otherwise
-- we'd need to pass the current Module all the way in to
-- this function.
| arch /= ArchX86_64
, gopt Opt_PIC dflags && externallyVisibleCLabel lbl
= AccessViaSymbolPtr
| otherwise
= AccessDirectly
howToAccessLabel dflags arch OSDarwin this_mod JumpReference lbl
-- dyld code stubs don't work for tailcalls because the
-- stack alignment is only right for regular calls.
-- Therefore, we have to go via a symbol pointer:
| arch == ArchX86 || arch == ArchX86_64
, labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
howToAccessLabel dflags arch OSDarwin this_mod _ lbl
-- Code stubs are the usual method of choice for imported code;
-- not needed on x86_64 because Apple's new linker, ld64, generates
-- them automatically.
| arch /= ArchX86_64
, labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaStub
| otherwise
= AccessDirectly
-- ELF (Linux)
--
-- ELF tries to pretend to the main application code that dynamic linking does
-- not exist. While this may sound convenient, it tends to mess things up in
-- very bad ways, so we have to be careful when we generate code for the main
-- program (-dynamic but no -fPIC).
--
-- Indirect access is required for references to imported symbols
-- from position independent code. It is also required from the main program
-- when dynamic libraries containing Haskell code are used.
howToAccessLabel _ (ArchPPC_64 _) os _ kind _
| osElfTarget os
= case kind of
-- ELF PPC64 (powerpc64-linux), AIX, MacOS 9, BeOS/PPC
DataReference -> AccessViaSymbolPtr
-- RTLD does not generate stubs for function descriptors
-- in tail calls. Create a symbol pointer and generate
-- the code to load the function descriptor at the call site.
JumpReference -> AccessViaSymbolPtr
-- regular calls are handled by the runtime linker
_ -> AccessDirectly
howToAccessLabel dflags _ os _ _ _
-- no PIC -> the dynamic linker does everything for us;
-- if we don't dynamically link to Haskell code,
-- it actually manages to do so without messing things up.
| osElfTarget os
, not (gopt Opt_PIC dflags) && WayDyn `notElem` ways dflags
= AccessDirectly
howToAccessLabel dflags arch os this_mod DataReference lbl
| osElfTarget os
= case () of
-- A dynamic label needs to be accessed via a symbol pointer.
_ | labelDynamic dflags (thisPackage dflags) this_mod lbl
-> AccessViaSymbolPtr
-- For PowerPC32 -fPIC, we have to access even static data
-- via a symbol pointer (see below for an explanation why
-- PowerPC32 Linux is especially broken).
| arch == ArchPPC
, gopt Opt_PIC dflags
-> AccessViaSymbolPtr
| otherwise
-> AccessDirectly
-- In most cases, we have to avoid symbol stubs on ELF, for the following reasons:
-- on i386, the position-independent symbol stubs in the Procedure Linkage Table
-- require the address of the GOT to be loaded into register %ebx on entry.
-- The linker will take any reference to the symbol stub as a hint that
-- the label in question is a code label. When linking executables, this
-- will cause the linker to replace even data references to the label with
-- references to the symbol stub.
-- This leaves calling a (foreign) function from non-PIC code
-- (AccessDirectly, because we get an implicit symbol stub)
-- and calling functions from PIC code on non-i386 platforms (via a symbol stub)
howToAccessLabel dflags arch os this_mod CallReference lbl
| osElfTarget os
, labelDynamic dflags (thisPackage dflags) this_mod lbl && not (gopt Opt_PIC dflags)
= AccessDirectly
| osElfTarget os
, arch /= ArchX86
, labelDynamic dflags (thisPackage dflags) this_mod lbl && gopt Opt_PIC dflags
= AccessViaStub
howToAccessLabel dflags _ os this_mod _ lbl
| osElfTarget os
= if labelDynamic dflags (thisPackage dflags) this_mod lbl
then AccessViaSymbolPtr
else AccessDirectly
-- all other platforms
howToAccessLabel dflags _ _ _ _ _
| not (gopt Opt_PIC dflags)
= AccessDirectly
| otherwise
= panic "howToAccessLabel: PIC not defined for this platform"
-- -------------------------------------------------------------------
-- | Says what we we have to add to our 'PIC base register' in order to
-- get the address of a label.
picRelative :: Arch -> OS -> CLabel -> CmmLit
-- Darwin, but not x86_64:
-- The PIC base register points to the PIC base label at the beginning
-- of the current CmmDecl. We just have to use a label difference to
-- get the offset.
-- We have already made sure that all labels that are not from the current
-- module are accessed indirectly ('as' can't calculate differences between
-- undefined labels).
picRelative arch OSDarwin lbl
| arch /= ArchX86_64
= CmmLabelDiffOff lbl mkPicBaseLabel 0
-- PowerPC Linux:
-- The PIC base register points to our fake GOT. Use a label difference
-- to get the offset.
-- We have made sure that *everything* is accessed indirectly, so this
-- is only used for offsets from the GOT to symbol pointers inside the
-- GOT.
picRelative ArchPPC os lbl
| osElfTarget os
= CmmLabelDiffOff lbl gotLabel 0
-- Most Linux versions:
-- The PIC base register points to the GOT. Use foo@got for symbol
-- pointers, and foo@gotoff for everything else.
-- Linux and Darwin on x86_64:
-- The PIC base register is %rip, we use foo@gotpcrel for symbol pointers,
-- and a GotSymbolOffset label for other things.
-- For reasons of tradition, the symbol offset label is written as a plain label.
picRelative arch os lbl
| osElfTarget os || (os == OSDarwin && arch == ArchX86_64)
= let result
| Just (SymbolPtr, lbl') <- dynamicLinkerLabelInfo lbl
= CmmLabel $ mkDynamicLinkerLabel GotSymbolPtr lbl'
| otherwise
= CmmLabel $ mkDynamicLinkerLabel GotSymbolOffset lbl
in result
picRelative _ _ _
= panic "PositionIndependentCode.picRelative undefined for this platform"
--------------------------------------------------------------------------------
needImportedSymbols :: DynFlags -> Arch -> OS -> Bool
needImportedSymbols dflags arch os
| os == OSDarwin
, arch /= ArchX86_64
= True
-- PowerPC Linux: -fPIC or -dynamic
| osElfTarget os
, arch == ArchPPC
= gopt Opt_PIC dflags || WayDyn `elem` ways dflags
-- PowerPC 64 Linux: always
| osElfTarget os
, arch == ArchPPC_64 ELF_V1 || arch == ArchPPC_64 ELF_V2
= True
-- i386 (and others?): -dynamic but not -fPIC
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
= WayDyn `elem` ways dflags && not (gopt Opt_PIC dflags)
| otherwise
= False
-- gotLabel
-- The label used to refer to our "fake GOT" from
-- position-independent code.
gotLabel :: CLabel
gotLabel
-- HACK: this label isn't really foreign
= mkForeignLabel
(fsLit ".LCTOC1")
Nothing ForeignLabelInThisPackage IsData
--------------------------------------------------------------------------------
-- We don't need to declare any offset tables.
-- However, for PIC on x86, we need a small helper function.
pprGotDeclaration :: DynFlags -> Arch -> OS -> SDoc
pprGotDeclaration dflags ArchX86 OSDarwin
| gopt Opt_PIC dflags
= vcat [
text ".section __TEXT,__textcoal_nt,coalesced,no_toc",
text ".weak_definition ___i686.get_pc_thunk.ax",
text ".private_extern ___i686.get_pc_thunk.ax",
text "___i686.get_pc_thunk.ax:",
text "\tmovl (%esp), %eax",
text "\tret" ]
pprGotDeclaration _ _ OSDarwin
= empty
-- PPC 64 ELF v1needs a Table Of Contents (TOC) on Linux
pprGotDeclaration _ (ArchPPC_64 ELF_V1) OSLinux
= text ".section \".toc\",\"aw\""
-- In ELF v2 we also need to tell the assembler that we want ABI
-- version 2. This would normally be done at the top of the file
-- right after a file directive, but I could not figure out how
-- to do that.
pprGotDeclaration _ (ArchPPC_64 ELF_V2) OSLinux
= vcat [ text ".abiversion 2",
text ".section \".toc\",\"aw\""
]
pprGotDeclaration _ (ArchPPC_64 _) _
= panic "pprGotDeclaration: ArchPPC_64 only Linux supported"
-- Emit GOT declaration
-- Output whatever needs to be output once per .s file.
pprGotDeclaration dflags arch os
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
, not (gopt Opt_PIC dflags)
= empty
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
= vcat [
-- See Note [.LCTOC1 in PPC PIC code]
text ".section \".got2\",\"aw\"",
text ".LCTOC1 = .+32768" ]
pprGotDeclaration _ _ _
= panic "pprGotDeclaration: no match"
--------------------------------------------------------------------------------
-- On Darwin, we have to generate our own stub code for lazy binding..
-- For each processor architecture, there are two versions, one for PIC
-- and one for non-PIC.
--
-- Whenever you change something in this assembler output, make sure
-- the splitter in driver/split/ghc-split.lprl recognizes the new output
pprImportedSymbol :: DynFlags -> Platform -> CLabel -> SDoc
pprImportedSymbol dflags platform@(Platform { platformArch = ArchPPC, platformOS = OSDarwin }) importedLbl
| Just (CodeStub, lbl) <- dynamicLinkerLabelInfo importedLbl
= case gopt Opt_PIC dflags of
False ->
vcat [
text ".symbol_stub",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tlis r11,ha16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)",
text "\tlwz r12,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)(r11)",
text "\tmtctr r12",
text "\taddi r11,r11,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)",
text "\tbctr"
]
True ->
vcat [
text ".section __TEXT,__picsymbolstub1,"
<> text "symbol_stubs,pure_instructions,32",
text "\t.align 2",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tmflr r0",
text "\tbcl 20,31,L0$" <> pprCLabel platform lbl,
text "L0$" <> pprCLabel platform lbl <> char ':',
text "\tmflr r11",
text "\taddis r11,r11,ha16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr-L0$" <> pprCLabel platform lbl <> char ')',
text "\tmtlr r0",
text "\tlwzu r12,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr-L0$" <> pprCLabel platform lbl
<> text ")(r11)",
text "\tmtctr r12",
text "\tbctr"
]
$+$ vcat [
text ".lazy_symbol_pointer",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$lazy_ptr:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long dyld_stub_binding_helper"]
| Just (SymbolPtr, lbl) <- dynamicLinkerLabelInfo importedLbl
= vcat [
text ".non_lazy_symbol_pointer",
char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr:",
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long\t0"]
| otherwise
= empty
pprImportedSymbol dflags platform@(Platform { platformArch = ArchX86, platformOS = OSDarwin }) importedLbl
| Just (CodeStub, lbl) <- dynamicLinkerLabelInfo importedLbl
= case gopt Opt_PIC dflags of
False ->
vcat [
text ".symbol_stub",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tjmp *L" <> pprCLabel platform lbl
<> text "$lazy_ptr",
text "L" <> pprCLabel platform lbl
<> text "$stub_binder:",
text "\tpushl $L" <> pprCLabel platform lbl
<> text "$lazy_ptr",
text "\tjmp dyld_stub_binding_helper"
]
True ->
vcat [
text ".section __TEXT,__picsymbolstub2,"
<> text "symbol_stubs,pure_instructions,25",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tcall ___i686.get_pc_thunk.ax",
text "1:",
text "\tmovl L" <> pprCLabel platform lbl
<> text "$lazy_ptr-1b(%eax),%edx",
text "\tjmp *%edx",
text "L" <> pprCLabel platform lbl
<> text "$stub_binder:",
text "\tlea L" <> pprCLabel platform lbl
<> text "$lazy_ptr-1b(%eax),%eax",
text "\tpushl %eax",
text "\tjmp dyld_stub_binding_helper"
]
$+$ vcat [ text ".section __DATA, __la_sym_ptr"
<> (if gopt Opt_PIC dflags then int 2 else int 3)
<> text ",lazy_symbol_pointers",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$lazy_ptr:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long L" <> pprCLabel platform lbl
<> text "$stub_binder"]
| Just (SymbolPtr, lbl) <- dynamicLinkerLabelInfo importedLbl
= vcat [
text ".non_lazy_symbol_pointer",
char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr:",
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long\t0"]
| otherwise
= empty
pprImportedSymbol _ (Platform { platformOS = OSDarwin }) _
= empty
-- ELF / Linux
--
-- In theory, we don't need to generate any stubs or symbol pointers
-- by hand for Linux.
--
-- Reality differs from this in two areas.
--
-- 1) If we just use a dynamically imported symbol directly in a read-only
-- section of the main executable (as GCC does), ld generates R_*_COPY
-- relocations, which are fundamentally incompatible with reversed info
-- tables. Therefore, we need a table of imported addresses in a writable
-- section.
-- The "official" GOT mechanism (label@got) isn't intended to be used
-- in position dependent code, so we have to create our own "fake GOT"
-- when not Opt_PIC && WayDyn `elem` ways dflags.
--
-- 2) PowerPC Linux is just plain broken.
-- While it's theoretically possible to use GOT offsets larger
-- than 16 bit, the standard crt*.o files don't, which leads to
-- linker errors as soon as the GOT size exceeds 16 bit.
-- Also, the assembler doesn't support @gotoff labels.
-- In order to be able to use a larger GOT, we have to circumvent the
-- entire GOT mechanism and do it ourselves (this is also what GCC does).
-- When needImportedSymbols is defined,
-- the NCG will keep track of all DynamicLinkerLabels it uses
-- and output each of them using pprImportedSymbol.
pprImportedSymbol _ platform@(Platform { platformArch = ArchPPC_64 _ })
importedLbl
| osElfTarget (platformOS platform)
= case dynamicLinkerLabelInfo importedLbl of
Just (SymbolPtr, lbl)
-> vcat [
text ".section \".toc\", \"aw\"",
text ".LC_" <> pprCLabel platform lbl <> char ':',
text "\t.quad" <+> pprCLabel platform lbl ]
_ -> empty
pprImportedSymbol dflags platform importedLbl
| osElfTarget (platformOS platform)
= case dynamicLinkerLabelInfo importedLbl of
Just (SymbolPtr, lbl)
-> let symbolSize = case wordWidth dflags of
W32 -> sLit "\t.long"
W64 -> sLit "\t.quad"
_ -> panic "Unknown wordRep in pprImportedSymbol"
in vcat [
text ".section \".got2\", \"aw\"",
text ".LC_" <> pprCLabel platform lbl <> char ':',
ptext symbolSize <+> pprCLabel platform lbl ]
-- PLT code stubs are generated automatically by the dynamic linker.
_ -> empty
pprImportedSymbol _ _ _
= panic "PIC.pprImportedSymbol: no match"
--------------------------------------------------------------------------------
-- Generate code to calculate the address that should be put in the
-- PIC base register.
-- This is called by MachCodeGen for every CmmProc that accessed the
-- PIC base register. It adds the appropriate instructions to the
-- top of the CmmProc.
-- It is assumed that the first NatCmmDecl in the input list is a Proc
-- and the rest are CmmDatas.
-- Darwin is simple: just fetch the address of a local label.
-- The FETCHPC pseudo-instruction is expanded to multiple instructions
-- during pretty-printing so that we don't have to deal with the
-- local label:
-- PowerPC version:
-- bcl 20,31,1f.
-- 1: mflr picReg
-- i386 version:
-- call 1f
-- 1: popl %picReg
-- Get a pointer to our own fake GOT, which is defined on a per-module basis.
-- This is exactly how GCC does it in linux.
initializePicBase_ppc
:: Arch -> OS -> Reg
-> [NatCmmDecl CmmStatics PPC.Instr]
-> NatM [NatCmmDecl CmmStatics PPC.Instr]
initializePicBase_ppc ArchPPC os picReg
(CmmProc info lab live (ListGraph blocks) : statics)
| osElfTarget os
= do
let
gotOffset = PPC.ImmConstantDiff
(PPC.ImmCLbl gotLabel)
(PPC.ImmCLbl mkPicBaseLabel)
blocks' = case blocks of
[] -> []
(b:bs) -> fetchPC b : map maybeFetchPC bs
maybeFetchPC b@(BasicBlock bID _)
| bID `mapMember` info = fetchPC b
| otherwise = b
-- GCC does PIC prologs thusly:
-- bcl 20,31,.L1
-- .L1:
-- mflr 30
-- addis 30,30,.LCTOC1-.L1@ha
-- addi 30,30,.LCTOC1-.L1@l
-- TODO: below we use it over temporary register,
-- it can and should be optimised by picking
-- correct PIC reg.
fetchPC (BasicBlock bID insns) =
BasicBlock bID (PPC.FETCHPC picReg
: PPC.ADDIS picReg picReg (PPC.HA gotOffset)
: PPC.ADDI picReg picReg (PPC.LO gotOffset)
: PPC.MR PPC.r30 picReg
: insns)
return (CmmProc info lab live (ListGraph blocks') : statics)
initializePicBase_ppc ArchPPC OSDarwin picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics) -- just one entry because of splitting
= return (CmmProc info lab live (ListGraph (b':blocks)) : statics)
where BasicBlock bID insns = entry
b' = BasicBlock bID (PPC.FETCHPC picReg : insns)
-------------------------------------------------------------------------
-- Load TOC into register 2
-- PowerPC 64-bit ELF ABI 2.0 requires the address of the callee
-- in register 12.
-- We pass the label to FETCHTOC and create a .localentry too.
-- TODO: Explain this better and refer to ABI spec!
{-
We would like to do approximately this, but spill slot allocation
might be added before the first BasicBlock. That violates the ABI.
For now we will emit the prologue code in the pretty printer,
which is also what we do for ELF v1.
initializePicBase_ppc (ArchPPC_64 ELF_V2) OSLinux picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics)
= do
bID <-getUniqueM
return (CmmProc info lab live (ListGraph (b':entry:blocks))
: statics)
where BasicBlock entryID _ = entry
b' = BasicBlock bID [PPC.FETCHTOC picReg lab,
PPC.BCC PPC.ALWAYS entryID]
-}
initializePicBase_ppc _ _ _ _
= panic "initializePicBase_ppc: not needed"
-- We cheat a bit here by defining a pseudo-instruction named FETCHGOT
-- which pretty-prints as:
-- call 1f
-- 1: popl %picReg
-- addl __GLOBAL_OFFSET_TABLE__+.-1b, %picReg
-- (See PprMach.hs)
initializePicBase_x86
:: Arch -> OS -> Reg
-> [NatCmmDecl (Alignment, CmmStatics) X86.Instr]
-> NatM [NatCmmDecl (Alignment, CmmStatics) X86.Instr]
initializePicBase_x86 ArchX86 os picReg
(CmmProc info lab live (ListGraph blocks) : statics)
| osElfTarget os
= return (CmmProc info lab live (ListGraph blocks') : statics)
where blocks' = case blocks of
[] -> []
(b:bs) -> fetchGOT b : map maybeFetchGOT bs
-- we want to add a FETCHGOT instruction to the beginning of
-- every block that is an entry point, which corresponds to
-- the blocks that have entries in the info-table mapping.
maybeFetchGOT b@(BasicBlock bID _)
| bID `mapMember` info = fetchGOT b
| otherwise = b
fetchGOT (BasicBlock bID insns) =
BasicBlock bID (X86.FETCHGOT picReg : insns)
initializePicBase_x86 ArchX86 OSDarwin picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics)
= return (CmmProc info lab live (ListGraph (block':blocks)) : statics)
where BasicBlock bID insns = entry
block' = BasicBlock bID (X86.FETCHPC picReg : insns)
initializePicBase_x86 _ _ _ _
= panic "initializePicBase_x86: not needed"
|
mcschroeder/ghc
|
compiler/nativeGen/PIC.hs
|
bsd-3-clause
| 32,682 | 0 | 20 | 9,917 | 4,670 | 2,374 | 2,296 | 409 | 8 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>GraalVM JavaScript</title>
<maps>
<homeID>graaljs</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/graaljs/src/main/javahelp/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 999 | 77 | 66 | 156 | 463 | 232 | 231 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Sequence Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/sequence/src/main/javahelp/org/zaproxy/zap/extension/sequence/resources/help_hi_IN/helpset_hi_IN.hs
|
apache-2.0
| 977 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Foo where
import Data.Kind
import Data.Proxy
type family T1 (x :: f (a :: Type))
class C (a :: Type) where
type T2 (x :: f a)
class C2 (a :: Type) (b :: Proxy a) (c :: Proxy b) where
type T3 (x :: Proxy '(a, (c :: Proxy b)))
-- NB: we have to put (c :: Proxy b) so that 'b' is Specified
-- in the kind of T3; else 'b' is Inferred and comes
-- first, which is ill-scoped
-- no CUSK
class C3 (a :: Type) (b :: Proxy a) (c :: Proxy b) d where
type T4 (x :: Proxy '(a, (c :: Proxy b)))
-- Ditto to T3
class C4 (a :: Type) b where
type T5 (x :: f a)
class C5 a where
type T6 (x :: f a)
|
sdiehl/ghc
|
testsuite/tests/ghci/scripts/T15591.hs
|
bsd-3-clause
| 756 | 0 | 13 | 200 | 260 | 153 | 107 | -1 | -1 |
{-@ LIQUID "--short-names" @-}
{-@ LIQUID "--no-warnings" @-}
{-@ LIQUID "--no-termination" @-}
{-@ LIQUID "--smtsolver=cvc4" @-}
module Refinements where
import Prelude hiding (abs)
divide :: Int -> Int -> Int
-----------------------------------------------------------------------
-- | Simple Refinement Types
-----------------------------------------------------------------------
{-@ six :: {v:Int | v = 6} @-}
six = 6 :: Int
-----------------------------------------------------------------------
-- | Type Aliases are nice, we're gonna be liberal in our use of them
-----------------------------------------------------------------------
{-@ type Nat = {v:Int | v >= 0} @-}
{-@ type Pos = {v:Int | v > 0} @-}
{-@ type NonZero = {v:Int | v /= 0} @-}
-----------------------------------------------------------------------
-- | Subtyping via Implication
-----------------------------------------------------------------------
{-@ six' :: NonZero @-}
six' = six
-- {v:Int | v = 6} <: {v:Int | v /= 0}
-- ==>
-- v = 6 => v /= 0
-----------------------------------------------------------------------
-- | Function Contracts: Preconditions & Dead Code
-----------------------------------------------------------------------
{-@ die :: {v:_ | false} -> a @-}
die msg = error msg
-- Precondition says, there are **NO** valid inputs for @die@.
-- If program type-checks, means @die@ is **NEVER** called at run-time.
-----------------------------------------------------------------------
-- | Function Contracts: Safe Division
-----------------------------------------------------------------------
divide x 0 = die "divide-by-zero"
divide x n = x `div` n
-- | What's the problem above? Nothing to *prevent*
-- us from calling `divide` with 0. Oops.
-- How shall we fix it?
avg2 x y = divide (x + y) 2
avg3 x y z = divide (x + y + z) 3
-----------------------------------------------------------------------
-- | But whats the problem here?
-----------------------------------------------------------------------
avg xs = divide total n
where
total = sum xs
n = length xs
--------------------------------------------------------------
-- | CHEAT AREA ----------------------------------------------
--------------------------------------------------------------
-- # START Errors 1 (divide)
-- # END Errors 1 (avg)
{- divide :: Int -> NonZero -> Int @-}
|
ssaavedra/liquidhaskell
|
docs/slides/Galois2014/000_Refinements.hs
|
bsd-3-clause
| 2,490 | 0 | 8 | 436 | 202 | 126 | 76 | 13 | 1 |
import Control.Monad
import Foreign
import Foreign.Ptr
type CInt = Int32
type CSize = Word32
foreign import ccall "wrapper"
mkComparator :: (Ptr Int -> Ptr Int -> IO CInt)
-> IO (Ptr (Ptr Int -> Ptr Int -> IO CInt))
foreign import ccall
qsort :: Ptr Int -> CSize -> CSize -> Ptr (Ptr Int -> Ptr Int -> IO CInt)
-> IO ()
compareInts :: Ptr Int -> Ptr Int -> IO CInt
compareInts a1 a2 = do
i1 <- peek a1
i2 <- peek a2
return (fromIntegral (i1 - i2 :: Int))
main :: IO ()
main = do
let values = [ 12, 56, 90, 34, 78 ] :: [Int]
n = length values
buf <- mallocArray n
zipWithM_ (pokeElemOff buf) [ 0 .. ] values
c <- mkComparator compareInts
qsort buf (fromIntegral n) (fromIntegral (sizeOf (head values))) c
mapM (peekElemOff buf) [ 0 .. n-1 ] >>= (print :: [Int] -> IO ())
|
urbanslug/ghc
|
testsuite/tests/ffi/should_run/fed001.hs
|
bsd-3-clause
| 827 | 21 | 11 | 213 | 381 | 199 | 182 | 25 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.