code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-
Defines the basic data types such as sorts, terms and types.
See Chapters 2 and 4.
Includes functions needed to make instances of Show, as well as
common definitions used to construct and manipulate formulas.
-}
module HOCHC.DataTypes where
import Data.Maybe(fromJust,fromMaybe)
import Data.List
import Data.Char(toLower)
import Control.Applicative(liftA2)
--Datatype definitions
---------------
data Sort = Arrow Sort Sort
| Int | Bool
deriving (Eq)
instance Show Sort where
show = prns
type Variable = String
type Constant = String
data Term = Variable Variable
| Constant Constant
| Apply Term Term
| If Term Term Term -- condition, then, else
--those below are not currently supported as input
| Lambda Variable Sort Term
| ExistsT Variable MonoType Term --Type guards (Section 4.3)
deriving (Eq)
instance Show Term where
show = prnt
type Definition = (Variable, [Variable], Term)
--Note that a monotype is simply called a type in the report
data MonoType = ArrowT Variable MonoType MonoType
| IntT | BoolT Term
deriving (Eq)
instance Show MonoType where
show = prnty
type DeltaEnv = [(Variable,Sort)] --sort environment
type Gamma = [(Variable,MonoType)] --type environment
--functions for working with DataTypes
---------------
--Gives the sort corresponding to a monotype
flat :: MonoType -> Sort
flat (ArrowT _ a b) = Arrow (flat a) (flat b)
flat IntT = Int
flat (BoolT _) = Bool
-- replaces all unbound occurences of a variable in a MonoType
-- this affects instances of Bool<t>
replaceInMT :: [(Variable,Term)] -> MonoType -> MonoType
replaceInMT rs IntT = IntT
replaceInMT rs (BoolT t) = BoolT (replaceInTerm rs t)
replaceInMT rs (ArrowT x t1 t2) = --may cause problems if a variable in rs becomes bound
ArrowT x (replaceInMT rs t1) (replaceInMT rs_ t2)
where rs_ = filter (\(a,b) -> a/=x) rs
replaceInTerm :: [(Variable,Term)] -> Term -> Term
replaceInTerm rs (Apply t1 t2) = Apply (replaceInTerm rs t1) (replaceInTerm rs t2)
replaceInTerm rs (Lambda x s t) = --may cause problems if a variable in rs becomes bound
Lambda x s (replaceInTerm (filter (\(a,b) -> a/=x) rs) t)
replaceInTerm rs (Variable v) = fromMaybe (Variable v) (lookup v rs)
replaceInTerm rs (Constant c) = (Constant c)
-- separate leading quantifiers from a term
getQuants :: Term -> ([(Variable,Sort)],Term)
getQuants (Apply (Constant "∀") (Lambda x s t)) = ((x,s):vss, t1) where (vss,t1) = getQuants t
getQuants x = ([],x)
--List the free variables in a term
freeVars :: Term -> [Variable]
freeVars (Variable x) = [x]
freeVars (Constant _) = []
freeVars (Apply t1 t2) = union (freeVars t1) (freeVars t2)
freeVars (Lambda x s t) = filter (/=x) $ freeVars t
freeVarsOfTy :: MonoType -> [Variable]
freeVarsOfTy = freeVarsOfTy' []
freeVarsOfTy' :: [Variable] -> MonoType -> [Variable]
freeVarsOfTy' xs IntT = []
freeVarsOfTy' xs (BoolT t) = freeVars t \\ xs
freeVarsOfTy' xs (ArrowT x t1 t2) = union x1s x2s
where x1s = freeVarsOfTy' xs t1
x2s = freeVarsOfTy' (x:xs) t2
--Symbols
---------------
logicalConstants = ["true","false"]
--assignmentOp = ":="
logicalUnary = ["¬"]
logicalBinary = ["⇒","∨","∧","⇔"]
quantifiers = ["λ"]
logicalQuantifiers = ["∃","∀"]
logicalSymbols = logicalUnary ++ logicalBinary ++ logicalConstants ++ logicalQuantifiers
constants = "assert":logicalConstants
ilaOps = ["+","-"]
ilaRels = [">=","<=",">","<", "=", "≠"]
binaryOps = ilaOps++ilaRels++logicalBinary
isIlaSymbol :: String -> Bool
isIlaSymbol = liftA2 (||) (`elem` (ilaOps++ilaRels++logicalConstants)) isIntegerConstant
isIntegerConstant :: String -> Bool
isIntegerConstant = all (`elem` ['0'..'9'])
--is the symbol a non relational ILA symbol
isIlaFn :: String -> Bool
isIlaFn = liftA2 (||) (`elem` (ilaOps)) isIntegerConstant
baseEnv = zip logicalBinary (repeat (Arrow Bool . Arrow Bool $ Bool)) ++
zip logicalUnary (repeat (Arrow Bool Bool)) ++
zip logicalConstants (repeat Bool)
ilaEnv :: [(Constant,Sort)] --DeltaEnv
ilaEnv = zip ilaOps (repeat (Arrow Int . Arrow Int $ Int)) ++
zip ilaRels (repeat (Arrow Int . Arrow Int $ Bool)) ++
baseEnv
mainEnv = [("assert", Arrow Bool Bool)] ++ ilaEnv
--printing functions
---------------
--prints in detail
printt :: Term -> String
printt (Variable v) = v
printt (Constant c) = c
printt (Apply t1 t2) = '(' : printt t1 ++" "++ printt t2 ++ ")"
printt (Lambda v s t) = 'λ' : v++":"++prints s++"."++ printt t
prints :: Sort -> String
prints (Arrow a b) = '(' : prints a++ "->" ++ prints b ++ ")"
prints x = prns x
--somewhat inefficient
prns = map toLower . prnS
--prints nicely
prnS :: Sort -> String
prnS = prnS' False
prnS' :: Bool -> Sort -> String
prnS' _ Int = "Int"
prnS' _ Bool = "Bool"
prnS' True x = parise $ prnS' False x
prnS' False (Arrow a b) = prnS' True a++"->"++prnS' False b
prnt :: Term -> String
prnt t = prnt' 0 0 t
parise :: String->String
parise s = "("++s++")"
prnt' :: Int -> Int -> Term -> String
prnt' lp rp (Apply (Apply (Constant c) t1) t2)
| c `elem` binaryOps = (\ (f,l,r) -> f $ prnt' l p t1++" "++c++" "++prnt' p r t2)
(if p<=lp || p<rp then (parise,0,0) else (id,lp,rp))
where p = fromJust $ lookup c getprec
prnt' lp rp (Apply (Constant c) t)
| c `elem` logicalUnary = (let p=fromJust $ lookup c getprec in
if p<rp then parise (c++prnt' p 0 t)
else c++prnt' p rp t)
| c `elem` logicalQuantifiers = case t of
(Lambda a s body) -> (if rp==0 then id else parise) $
(c++a++":"++prns s++". "++prnt' 0 0 body)
_ -> error "bad quantifier"
prnt' lp rp (Lambda a s body) = (if rp==0 then id else parise) $
("λ"++a++":"++prns s++"."++prnt' 0 0 body)
prnt' lp rp (Variable v) = v
prnt' lp rp (Constant c) = c
prnt' lp rp (Apply a b) = if maxPrec<=lp
then parise (prnt' 0 maxPrec a ++ " " ++prnt' maxPrec 0 b)
else prnt' lp maxPrec a ++ " " ++prnt' maxPrec rp b
prnt' lp rp (If cond thn els) = (if rp==0 then id else parise) $
"if "++prnt' 0 0 cond
++" then "++prnt' 0 0 thn
++" else "++prnt' 0 0 els
prnty :: MonoType -> String
prnty = prnty' False
prnty' _ IntT = "Int"
prnty' _ (BoolT s) = "Bool["++prnt s++"]"
prnty' b (ArrowT "_" t1 t2) =
(if b then parise else id) (prnty' True t1 ++ "->" ++ prnty' False t2)
prnty' b (ArrowT x t1 t2) =
(if b then parise else id) (x++":"++prnty' True t1 ++ "->" ++ prnty' False t2)
opsByPrec = map return logicalBinary ++ [logicalUnary,ilaRels,ilaOps]
getprec = getprec' 1 opsByPrec
getprec' n [] = []
getprec' n (ops:rest) = map (flip (,) n) ops ++ getprec' (n+1) rest
getprec2 = foldl (++) [] (map (uncurry $ map. flip (,)) (zip [1..] opsByPrec))
maxPrec = length opsByPrec + 1
--apply a (typically recursive) function uniformly across unchecked cases
appdown :: (Term -> Term) -> Term -> Term
appdown f (Lambda v s t) = (Lambda v s (f t))
appdown f (Apply a b) = Apply (f a) (f b)
appdown f (ExistsT v ty t) = (ExistsT v ty (f t))
appdown f (If c t1 t2) = If (f c) (f t1) (f t2)
appdown f t = t --Variable or Constant
-- Helpful constructors (apply 'and', apply 'or', etc)
aand t1 t2 = (Apply (Apply (Constant "∧") t1) t2)
aor t1 t2 = (Apply (Apply (Constant "∨") t1) t2)
aforall x s t = (Apply (Constant "∀") (Lambda x s t))
aimplies t1 t2 = (Apply (Apply (Constant "⇒") t1) t2)
aequals t1 t2 = (Apply (Apply (Constant "=") t1) t2)
aexists x s t = (Apply (Constant "∃") (Lambda x s t))
| penteract/HigherOrderHornRefinement | HOCHC/DataTypes.hs | bsd-3-clause | 7,789 | 0 | 15 | 1,846 | 3,110 | 1,644 | 1,466 | 155 | 8 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Instrument.Utils
( formatDecimal,
formatInt,
showT,
showBS,
collect,
noDots,
encodeCompress,
decodeCompress,
indefinitely,
seconds,
milliseconds,
for,
)
where
-------------------------------------------------------------------------------
import Codec.Compression.GZip
import Control.Applicative ((<|>))
import Control.Concurrent (threadDelay)
import Control.Exception (SomeException)
import Control.Monad
import Control.Monad.Catch (Handler (..))
import Control.Retry
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Lazy (fromStrict, toStrict)
import qualified Data.Map as M
import qualified Data.Map.Strict as MS
import qualified Data.SafeCopy as SC
import Data.Serialize
import Data.Text (Text)
import qualified Data.Text as T
import Numeric
import System.IO
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
collect ::
(Ord b) =>
[a] ->
(a -> b) ->
(a -> c) ->
M.Map b [c]
collect as mkKey mkVal = foldr step M.empty as
where
step x acc = MS.insertWith (++) (mkKey x) ([mkVal x]) acc
-------------------------------------------------------------------------------
noDots :: Text -> Text
noDots = T.intercalate "_" . T.splitOn "."
-------------------------------------------------------------------------------
showT :: Show a => a -> Text
showT = T.pack . show
showBS :: Show a => a -> B.ByteString
showBS = B.pack . show
-------------------------------------------------------------------------------
formatInt :: RealFrac a => a -> Text
formatInt i = showT ((floor i) :: Int)
-------------------------------------------------------------------------------
formatDecimal ::
RealFloat a =>
-- | Digits after the point
Int ->
-- | Add thousands sep?
Bool ->
-- | Number
a ->
Text
formatDecimal n th i =
let res = T.pack . showFFloat (Just n) i $ ""
in if th then addThousands res else res
-------------------------------------------------------------------------------
addThousands :: Text -> Text
addThousands t = T.concat [n', dec]
where
(n, dec) = T.span (/= '.') t
n' = T.reverse . T.intercalate "," . T.chunksOf 3 . T.reverse $ n
-------------------------------------------------------------------------------
-- | Serialize and compress with GZip in that order. This is the only
-- function we use for serializing to Redis.
encodeCompress :: SC.SafeCopy a => a -> B.ByteString
encodeCompress = toStrict . compress . runPutLazy . SC.safePut
-------------------------------------------------------------------------------
-- | Decompress from GZip and deserialize in that order. Tries to
-- decode SafeCopy first and falls back to Serialize if that fails to
-- account for old data. Note that encodeCompress only serializes to
-- SafeCopy so writes will be updated.
decodeCompress :: (SC.SafeCopy a, Serialize a) => B.ByteString -> Either String a
decodeCompress = decodeWithFallback . decompress . fromStrict
where
decodeWithFallback lbs = runGetLazy SC.safeGet lbs <|> decodeLazy lbs
-------------------------------------------------------------------------------
-- | Run an IO repeatedly with the given delay in microseconds. If
-- there are exceptions in the inner loop, they are logged to stderr,
-- prefixed with the given string context and retried at an exponential
-- backoff capped at 60 seconds between.
indefinitely :: String -> Int -> IO () -> IO ()
indefinitely ctx n = forever . delayed . logAndBackoff ctx
where
delayed = (>> threadDelay n)
-------------------------------------------------------------------------------
logAndBackoff :: String -> IO () -> IO ()
logAndBackoff ctx = recovering policy [h] . const
where
policy = capDelay (seconds 60) (exponentialBackoff (milliseconds 50))
h _ = Handler (\e -> logError e >> return True)
logError :: SomeException -> IO ()
logError e = hPutStrLn stderr msg
where
msg = "Caught exception in " ++ ctx ++ ": " ++ show e ++ ". Retrying..."
-------------------------------------------------------------------------------
-- | Convert seconds to microseconds
seconds :: Int -> Int
seconds = (* milliseconds 1000)
-------------------------------------------------------------------------------
-- | Convert milliseconds to microseconds
milliseconds :: Int -> Int
milliseconds = (* 1000)
-------------------------------------------------------------------------------
for :: (Functor f) => f a -> (a -> b) -> f b
for = flip fmap
| Soostone/instrument | instrument/src/Instrument/Utils.hs | bsd-3-clause | 4,669 | 0 | 13 | 732 | 1,031 | 572 | 459 | 82 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Scheduler.Lib
( someFunc
, jobTable
, createJob
, header
, runJobsButton
, scheduleInput
, pureButton
) where
import Lucid
import ClassyPrelude hiding (for_)
import Control.Arrow
import Scheduler.Types
import Control.Lens
someFunc :: IO ()
someFunc = putStrLn "someFunc"
jobTable :: JobQueue a -> Html ()
jobTable jobs = do
table_ [class_ "pure-table"] $ do
thead_ $ tr_ $ do
th_ "#"
th_ "Job Name"
th_ "Status"
th_ mempty
tbody_ $ mapM_ (tr_ . dispJob) $ zip [1..] (jobs ^. qJobs)
addJobButton
where
dispJob (n, job) = td_ (toHtml $ tshow n) <> td_ (toHtml $ job ^. jobName) <> (td_ $ toHtml $ tshow $ job ^. jobStatus) <> (td_ $ modLinks n)
modLinks :: Int -> Html ()
modLinks n = do
a_ [href_ ("/remJob?idx=" <> tshow (n-1))] $ img_ [src_ "icon/remove"]
createJob :: Html ()
createJob = form_ [class_ "pure-form pure-form-stacked", action_ "/addJob", method_ "post", enctype_ "application/json"] $ fieldset_ $ do
legend_ "Add a job to the queue"
label_ [for_ "job-val"] "Config File Path"
input_ [name_ "job-val", type_ "text", class_ "pure-u-23-24"]
button_ [type_ "submit", class_ "pure-button pure-button-primary"] "Add"
header :: Html ()
header = head_ $ link_ [rel_ "stylesheet", href_ "http://yui.yahooapis.com/pure/0.6.0/pure-min.css"]
addJobButton :: Html ()
addJobButton = a_ [class_ "pure-button pure-button-primary", href_ "/new"] "Add Job"
runJobsButton :: Html ()
runJobsButton = a_ [class_ "pure-button", href_ "/runJobs"] "Run Jobs Now"
dryRunButton :: Html ()
dryRunButton = a_ [class_ "pure-button", href_ "/dryRun"] "Dry Run"
scheduleInput :: Html ()
scheduleInput = do
form_ [class_ "pure-form pure-form-stacked", action_ "/schedule", method_ "post", enctype_ "application/json"] $
fieldset_ $ do
label_ [for_ "when"] "Start jobs at:"
input_ [id_ "when", name_ "date", type_ "date"]
input_ [id_ "when", name_ "time", type_ "time"]
button_ [type_ "submit", class_ "pure-button pure-button-primary"] "Schedule"
pureButton :: Term [Attribute] result => Text -> result
pureButton ref = a_ [class_ "pure-button pure-button-error", href_ ref]
| limaner2002/EPC-tools | scheduler-ui/src/Scheduler/Lib.hs | bsd-3-clause | 2,303 | 0 | 17 | 445 | 763 | 376 | 387 | 56 | 1 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.Vulkan.DeviceInitialization where
import Graphics.Vulkan.Device( VkPhysicalDeviceFeatures(..)
, VkPhysicalDevice(..)
, VkDevice(..)
)
import Data.Word( Word8
, Word64
, Word32
)
import Foreign.Ptr( Ptr
, FunPtr
, plusPtr
)
import Data.Int( Int32
)
import Data.Vector.Fixed.Cont( ToPeano
)
import Data.Bits( Bits
, FiniteBits
)
import Foreign.Storable( Storable(..)
)
import Graphics.Vulkan.Constants( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE
, VK_MAX_MEMORY_HEAPS
, VK_UUID_SIZE
, VK_MAX_MEMORY_TYPES
)
import Data.Void( Void
)
import Graphics.Vulkan.Memory( VkInternalAllocationType(..)
, PFN_vkAllocationFunction
, PFN_vkReallocationFunction
, PFN_vkInternalAllocationNotification
, VkAllocationCallbacks(..)
, VkSystemAllocationScope(..)
, PFN_vkFreeFunction
, PFN_vkInternalFreeNotification
)
import Graphics.Vulkan.Sampler( VkSampleCountFlagBits(..)
, VkSampleCountFlags(..)
)
import Graphics.Vulkan.Image( VkImageUsageFlags(..)
, VkImageType(..)
, VkImageUsageFlagBits(..)
, VkImageCreateFlags(..)
, VkImageTiling(..)
, VkImageCreateFlagBits(..)
)
import Data.Vector.Fixed.Storable( Vec
)
import Graphics.Vulkan.Core( VkExtent3D(..)
, VkResult(..)
, VkDeviceSize(..)
, VkBool32(..)
, VkFlags(..)
, VkFormat(..)
, VkStructureType(..)
)
import Foreign.C.Types( CSize
, CFloat
, CFloat(..)
, CChar
, CSize(..)
)
-- ** VkPhysicalDeviceType
newtype VkPhysicalDeviceType = VkPhysicalDeviceType Int32
deriving (Eq, Storable)
pattern VK_PHYSICAL_DEVICE_TYPE_OTHER = VkPhysicalDeviceType 0
pattern VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = VkPhysicalDeviceType 1
pattern VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = VkPhysicalDeviceType 2
pattern VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = VkPhysicalDeviceType 3
pattern VK_PHYSICAL_DEVICE_TYPE_CPU = VkPhysicalDeviceType 4
data VkInstanceCreateInfo =
VkInstanceCreateInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkFlags :: VkInstanceCreateFlags
, vkPApplicationInfo :: Ptr VkApplicationInfo
, vkEnabledLayerCount :: Word32
, vkPpEnabledLayerNames :: Ptr (Ptr CChar)
, vkEnabledExtensionCount :: Word32
, vkPpEnabledExtensionNames :: Ptr (Ptr CChar)
}
deriving (Eq)
instance Storable VkInstanceCreateInfo where
sizeOf ~_ = 64
alignment ~_ = 8
peek ptr = VkInstanceCreateInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 40)
<*> peek (ptr `plusPtr` 48)
<*> peek (ptr `plusPtr` 56)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 16) (vkFlags (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 24) (vkPApplicationInfo (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 32) (vkEnabledLayerCount (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 40) (vkPpEnabledLayerNames (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 48) (vkEnabledExtensionCount (poked :: VkInstanceCreateInfo))
*> poke (ptr `plusPtr` 56) (vkPpEnabledExtensionNames (poked :: VkInstanceCreateInfo))
-- ** vkGetPhysicalDeviceImageFormatProperties
foreign import ccall "vkGetPhysicalDeviceImageFormatProperties" vkGetPhysicalDeviceImageFormatProperties ::
VkPhysicalDevice ->
VkFormat ->
VkImageType ->
VkImageTiling ->
VkImageUsageFlags ->
VkImageCreateFlags -> Ptr VkImageFormatProperties -> IO VkResult
type PFN_vkVoidFunction = FunPtr (IO ())
data VkApplicationInfo =
VkApplicationInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkPApplicationName :: Ptr CChar
, vkApplicationVersion :: Word32
, vkPEngineName :: Ptr CChar
, vkEngineVersion :: Word32
, vkApiVersion :: Word32
}
deriving (Eq)
instance Storable VkApplicationInfo where
sizeOf ~_ = 48
alignment ~_ = 8
peek ptr = VkApplicationInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 40)
<*> peek (ptr `plusPtr` 44)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 16) (vkPApplicationName (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 24) (vkApplicationVersion (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 32) (vkPEngineName (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 40) (vkEngineVersion (poked :: VkApplicationInfo))
*> poke (ptr `plusPtr` 44) (vkApiVersion (poked :: VkApplicationInfo))
data VkPhysicalDeviceLimits =
VkPhysicalDeviceLimits{ vkMaxImageDimension :: Word32
, vkMaxImageDimension :: Word32
, vkMaxImageDimension :: Word32
, vkMaxImageDimensionCube :: Word32
, vkMaxImageArrayLayers :: Word32
, vkMaxTexelBufferElements :: Word32
, vkMaxUniformBufferRange :: Word32
, vkMaxStorageBufferRange :: Word32
, vkMaxPushConstantsSize :: Word32
, vkMaxMemoryAllocationCount :: Word32
, vkMaxSamplerAllocationCount :: Word32
, vkBufferImageGranularity :: VkDeviceSize
, vkSparseAddressSpaceSize :: VkDeviceSize
, vkMaxBoundDescriptorSets :: Word32
, vkMaxPerStageDescriptorSamplers :: Word32
, vkMaxPerStageDescriptorUniformBuffers :: Word32
, vkMaxPerStageDescriptorStorageBuffers :: Word32
, vkMaxPerStageDescriptorSampledImages :: Word32
, vkMaxPerStageDescriptorStorageImages :: Word32
, vkMaxPerStageDescriptorInputAttachments :: Word32
, vkMaxPerStageResources :: Word32
, vkMaxDescriptorSetSamplers :: Word32
, vkMaxDescriptorSetUniformBuffers :: Word32
, vkMaxDescriptorSetUniformBuffersDynamic :: Word32
, vkMaxDescriptorSetStorageBuffers :: Word32
, vkMaxDescriptorSetStorageBuffersDynamic :: Word32
, vkMaxDescriptorSetSampledImages :: Word32
, vkMaxDescriptorSetStorageImages :: Word32
, vkMaxDescriptorSetInputAttachments :: Word32
, vkMaxVertexInputAttributes :: Word32
, vkMaxVertexInputBindings :: Word32
, vkMaxVertexInputAttributeOffset :: Word32
, vkMaxVertexInputBindingStride :: Word32
, vkMaxVertexOutputComponents :: Word32
, vkMaxTessellationGenerationLevel :: Word32
, vkMaxTessellationPatchSize :: Word32
, vkMaxTessellationControlPerVertexInputComponents :: Word32
, vkMaxTessellationControlPerVertexOutputComponents :: Word32
, vkMaxTessellationControlPerPatchOutputComponents :: Word32
, vkMaxTessellationControlTotalOutputComponents :: Word32
, vkMaxTessellationEvaluationInputComponents :: Word32
, vkMaxTessellationEvaluationOutputComponents :: Word32
, vkMaxGeometryShaderInvocations :: Word32
, vkMaxGeometryInputComponents :: Word32
, vkMaxGeometryOutputComponents :: Word32
, vkMaxGeometryOutputVertices :: Word32
, vkMaxGeometryTotalOutputComponents :: Word32
, vkMaxFragmentInputComponents :: Word32
, vkMaxFragmentOutputAttachments :: Word32
, vkMaxFragmentDualSrcAttachments :: Word32
, vkMaxFragmentCombinedOutputResources :: Word32
, vkMaxComputeSharedMemorySize :: Word32
, vkMaxComputeWorkGroupCount :: Vec (ToPeano 3) Word32
, vkMaxComputeWorkGroupInvocations :: Word32
, vkMaxComputeWorkGroupSize :: Vec (ToPeano 3) Word32
, vkSubPixelPrecisionBits :: Word32
, vkSubTexelPrecisionBits :: Word32
, vkMipmapPrecisionBits :: Word32
, vkMaxDrawIndexedIndexValue :: Word32
, vkMaxDrawIndirectCount :: Word32
, vkMaxSamplerLodBias :: CFloat
, vkMaxSamplerAnisotropy :: CFloat
, vkMaxViewports :: Word32
, vkMaxViewportDimensions :: Vec (ToPeano 2) Word32
, vkViewportBoundsRange :: Vec (ToPeano 2) CFloat
, vkViewportSubPixelBits :: Word32
, vkMinMemoryMapAlignment :: CSize
, vkMinTexelBufferOffsetAlignment :: VkDeviceSize
, vkMinUniformBufferOffsetAlignment :: VkDeviceSize
, vkMinStorageBufferOffsetAlignment :: VkDeviceSize
, vkMinTexelOffset :: Int32
, vkMaxTexelOffset :: Word32
, vkMinTexelGatherOffset :: Int32
, vkMaxTexelGatherOffset :: Word32
, vkMinInterpolationOffset :: CFloat
, vkMaxInterpolationOffset :: CFloat
, vkSubPixelInterpolationOffsetBits :: Word32
, vkMaxFramebufferWidth :: Word32
, vkMaxFramebufferHeight :: Word32
, vkMaxFramebufferLayers :: Word32
, vkFramebufferColorSampleCounts :: VkSampleCountFlags
, vkFramebufferDepthSampleCounts :: VkSampleCountFlags
, vkFramebufferStencilSampleCounts :: VkSampleCountFlags
, vkFramebufferNoAttachmentsSampleCounts :: VkSampleCountFlags
, vkMaxColorAttachments :: Word32
, vkSampledImageColorSampleCounts :: VkSampleCountFlags
, vkSampledImageIntegerSampleCounts :: VkSampleCountFlags
, vkSampledImageDepthSampleCounts :: VkSampleCountFlags
, vkSampledImageStencilSampleCounts :: VkSampleCountFlags
, vkStorageImageSampleCounts :: VkSampleCountFlags
, vkMaxSampleMaskWords :: Word32
, vkTimestampComputeAndGraphics :: VkBool32
, vkTimestampPeriod :: CFloat
, vkMaxClipDistances :: Word32
, vkMaxCullDistances :: Word32
, vkMaxCombinedClipAndCullDistances :: Word32
, vkDiscreteQueuePriorities :: Word32
, vkPointSizeRange :: Vec (ToPeano 2) CFloat
, vkLineWidthRange :: Vec (ToPeano 2) CFloat
, vkPointSizeGranularity :: CFloat
, vkLineWidthGranularity :: CFloat
, vkStrictLines :: VkBool32
, vkStandardSampleLocations :: VkBool32
, vkOptimalBufferCopyOffsetAlignment :: VkDeviceSize
, vkOptimalBufferCopyRowPitchAlignment :: VkDeviceSize
, vkNonCoherentAtomSize :: VkDeviceSize
}
deriving (Eq)
instance Storable VkPhysicalDeviceLimits where
sizeOf ~_ = 504
alignment ~_ = 8
peek ptr = VkPhysicalDeviceLimits <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 20)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 28)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 36)
<*> peek (ptr `plusPtr` 40)
<*> peek (ptr `plusPtr` 48)
<*> peek (ptr `plusPtr` 56)
<*> peek (ptr `plusPtr` 64)
<*> peek (ptr `plusPtr` 68)
<*> peek (ptr `plusPtr` 72)
<*> peek (ptr `plusPtr` 76)
<*> peek (ptr `plusPtr` 80)
<*> peek (ptr `plusPtr` 84)
<*> peek (ptr `plusPtr` 88)
<*> peek (ptr `plusPtr` 92)
<*> peek (ptr `plusPtr` 96)
<*> peek (ptr `plusPtr` 100)
<*> peek (ptr `plusPtr` 104)
<*> peek (ptr `plusPtr` 108)
<*> peek (ptr `plusPtr` 112)
<*> peek (ptr `plusPtr` 116)
<*> peek (ptr `plusPtr` 120)
<*> peek (ptr `plusPtr` 124)
<*> peek (ptr `plusPtr` 128)
<*> peek (ptr `plusPtr` 132)
<*> peek (ptr `plusPtr` 136)
<*> peek (ptr `plusPtr` 140)
<*> peek (ptr `plusPtr` 144)
<*> peek (ptr `plusPtr` 148)
<*> peek (ptr `plusPtr` 152)
<*> peek (ptr `plusPtr` 156)
<*> peek (ptr `plusPtr` 160)
<*> peek (ptr `plusPtr` 164)
<*> peek (ptr `plusPtr` 168)
<*> peek (ptr `plusPtr` 172)
<*> peek (ptr `plusPtr` 176)
<*> peek (ptr `plusPtr` 180)
<*> peek (ptr `plusPtr` 184)
<*> peek (ptr `plusPtr` 188)
<*> peek (ptr `plusPtr` 192)
<*> peek (ptr `plusPtr` 196)
<*> peek (ptr `plusPtr` 200)
<*> peek (ptr `plusPtr` 204)
<*> peek (ptr `plusPtr` 208)
<*> peek (ptr `plusPtr` 212)
<*> peek (ptr `plusPtr` 216)
<*> peek (ptr `plusPtr` 220)
<*> peek (ptr `plusPtr` 232)
<*> peek (ptr `plusPtr` 236)
<*> peek (ptr `plusPtr` 248)
<*> peek (ptr `plusPtr` 252)
<*> peek (ptr `plusPtr` 256)
<*> peek (ptr `plusPtr` 260)
<*> peek (ptr `plusPtr` 264)
<*> peek (ptr `plusPtr` 268)
<*> peek (ptr `plusPtr` 272)
<*> peek (ptr `plusPtr` 276)
<*> peek (ptr `plusPtr` 280)
<*> peek (ptr `plusPtr` 288)
<*> peek (ptr `plusPtr` 296)
<*> peek (ptr `plusPtr` 304)
<*> peek (ptr `plusPtr` 312)
<*> peek (ptr `plusPtr` 320)
<*> peek (ptr `plusPtr` 328)
<*> peek (ptr `plusPtr` 336)
<*> peek (ptr `plusPtr` 340)
<*> peek (ptr `plusPtr` 344)
<*> peek (ptr `plusPtr` 348)
<*> peek (ptr `plusPtr` 352)
<*> peek (ptr `plusPtr` 356)
<*> peek (ptr `plusPtr` 360)
<*> peek (ptr `plusPtr` 364)
<*> peek (ptr `plusPtr` 368)
<*> peek (ptr `plusPtr` 372)
<*> peek (ptr `plusPtr` 376)
<*> peek (ptr `plusPtr` 380)
<*> peek (ptr `plusPtr` 384)
<*> peek (ptr `plusPtr` 388)
<*> peek (ptr `plusPtr` 392)
<*> peek (ptr `plusPtr` 396)
<*> peek (ptr `plusPtr` 400)
<*> peek (ptr `plusPtr` 404)
<*> peek (ptr `plusPtr` 408)
<*> peek (ptr `plusPtr` 412)
<*> peek (ptr `plusPtr` 416)
<*> peek (ptr `plusPtr` 420)
<*> peek (ptr `plusPtr` 424)
<*> peek (ptr `plusPtr` 428)
<*> peek (ptr `plusPtr` 432)
<*> peek (ptr `plusPtr` 436)
<*> peek (ptr `plusPtr` 440)
<*> peek (ptr `plusPtr` 444)
<*> peek (ptr `plusPtr` 452)
<*> peek (ptr `plusPtr` 460)
<*> peek (ptr `plusPtr` 464)
<*> peek (ptr `plusPtr` 468)
<*> peek (ptr `plusPtr` 472)
<*> peek (ptr `plusPtr` 480)
<*> peek (ptr `plusPtr` 488)
<*> peek (ptr `plusPtr` 496)
poke ptr poked = poke (ptr `plusPtr` 0) (vkMaxImageDimension (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 4) (vkMaxImageDimension (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 8) (vkMaxImageDimension (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 12) (vkMaxImageDimensionCube (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 16) (vkMaxImageArrayLayers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 20) (vkMaxTexelBufferElements (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 24) (vkMaxUniformBufferRange (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 28) (vkMaxStorageBufferRange (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 32) (vkMaxPushConstantsSize (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 36) (vkMaxMemoryAllocationCount (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 40) (vkMaxSamplerAllocationCount (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 48) (vkBufferImageGranularity (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 56) (vkSparseAddressSpaceSize (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 64) (vkMaxBoundDescriptorSets (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 68) (vkMaxPerStageDescriptorSamplers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 72) (vkMaxPerStageDescriptorUniformBuffers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 76) (vkMaxPerStageDescriptorStorageBuffers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 80) (vkMaxPerStageDescriptorSampledImages (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 84) (vkMaxPerStageDescriptorStorageImages (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 88) (vkMaxPerStageDescriptorInputAttachments (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 92) (vkMaxPerStageResources (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 96) (vkMaxDescriptorSetSamplers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 100) (vkMaxDescriptorSetUniformBuffers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 104) (vkMaxDescriptorSetUniformBuffersDynamic (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 108) (vkMaxDescriptorSetStorageBuffers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 112) (vkMaxDescriptorSetStorageBuffersDynamic (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 116) (vkMaxDescriptorSetSampledImages (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 120) (vkMaxDescriptorSetStorageImages (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 124) (vkMaxDescriptorSetInputAttachments (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 128) (vkMaxVertexInputAttributes (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 132) (vkMaxVertexInputBindings (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 136) (vkMaxVertexInputAttributeOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 140) (vkMaxVertexInputBindingStride (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 144) (vkMaxVertexOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 148) (vkMaxTessellationGenerationLevel (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 152) (vkMaxTessellationPatchSize (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 156) (vkMaxTessellationControlPerVertexInputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 160) (vkMaxTessellationControlPerVertexOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 164) (vkMaxTessellationControlPerPatchOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 168) (vkMaxTessellationControlTotalOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 172) (vkMaxTessellationEvaluationInputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 176) (vkMaxTessellationEvaluationOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 180) (vkMaxGeometryShaderInvocations (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 184) (vkMaxGeometryInputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 188) (vkMaxGeometryOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 192) (vkMaxGeometryOutputVertices (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 196) (vkMaxGeometryTotalOutputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 200) (vkMaxFragmentInputComponents (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 204) (vkMaxFragmentOutputAttachments (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 208) (vkMaxFragmentDualSrcAttachments (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 212) (vkMaxFragmentCombinedOutputResources (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 216) (vkMaxComputeSharedMemorySize (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 220) (vkMaxComputeWorkGroupCount (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 232) (vkMaxComputeWorkGroupInvocations (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 236) (vkMaxComputeWorkGroupSize (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 248) (vkSubPixelPrecisionBits (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 252) (vkSubTexelPrecisionBits (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 256) (vkMipmapPrecisionBits (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 260) (vkMaxDrawIndexedIndexValue (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 264) (vkMaxDrawIndirectCount (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 268) (vkMaxSamplerLodBias (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 272) (vkMaxSamplerAnisotropy (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 276) (vkMaxViewports (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 280) (vkMaxViewportDimensions (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 288) (vkViewportBoundsRange (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 296) (vkViewportSubPixelBits (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 304) (vkMinMemoryMapAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 312) (vkMinTexelBufferOffsetAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 320) (vkMinUniformBufferOffsetAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 328) (vkMinStorageBufferOffsetAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 336) (vkMinTexelOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 340) (vkMaxTexelOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 344) (vkMinTexelGatherOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 348) (vkMaxTexelGatherOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 352) (vkMinInterpolationOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 356) (vkMaxInterpolationOffset (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 360) (vkSubPixelInterpolationOffsetBits (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 364) (vkMaxFramebufferWidth (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 368) (vkMaxFramebufferHeight (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 372) (vkMaxFramebufferLayers (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 376) (vkFramebufferColorSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 380) (vkFramebufferDepthSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 384) (vkFramebufferStencilSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 388) (vkFramebufferNoAttachmentsSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 392) (vkMaxColorAttachments (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 396) (vkSampledImageColorSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 400) (vkSampledImageIntegerSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 404) (vkSampledImageDepthSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 408) (vkSampledImageStencilSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 412) (vkStorageImageSampleCounts (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 416) (vkMaxSampleMaskWords (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 420) (vkTimestampComputeAndGraphics (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 424) (vkTimestampPeriod (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 428) (vkMaxClipDistances (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 432) (vkMaxCullDistances (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 436) (vkMaxCombinedClipAndCullDistances (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 440) (vkDiscreteQueuePriorities (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 444) (vkPointSizeRange (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 452) (vkLineWidthRange (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 460) (vkPointSizeGranularity (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 464) (vkLineWidthGranularity (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 468) (vkStrictLines (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 472) (vkStandardSampleLocations (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 480) (vkOptimalBufferCopyOffsetAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 488) (vkOptimalBufferCopyRowPitchAlignment (poked :: VkPhysicalDeviceLimits))
*> poke (ptr `plusPtr` 496) (vkNonCoherentAtomSize (poked :: VkPhysicalDeviceLimits))
data VkMemoryHeap =
VkMemoryHeap{ vkSize :: VkDeviceSize
, vkFlags :: VkMemoryHeapFlags
}
deriving (Eq)
instance Storable VkMemoryHeap where
sizeOf ~_ = 16
alignment ~_ = 8
peek ptr = VkMemoryHeap <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSize (poked :: VkMemoryHeap))
*> poke (ptr `plusPtr` 8) (vkFlags (poked :: VkMemoryHeap))
-- ** vkEnumeratePhysicalDevices
foreign import ccall "vkEnumeratePhysicalDevices" vkEnumeratePhysicalDevices ::
VkInstance -> Ptr Word32 -> Ptr VkPhysicalDevice -> IO VkResult
-- ** vkGetDeviceProcAddr
foreign import ccall "vkGetDeviceProcAddr" vkGetDeviceProcAddr ::
VkDevice -> Ptr CChar -> IO PFN_vkVoidFunction
-- ** vkCreateInstance
foreign import ccall "vkCreateInstance" vkCreateInstance ::
Ptr VkInstanceCreateInfo ->
Ptr VkAllocationCallbacks -> Ptr VkInstance -> IO VkResult
-- ** VkFormatFeatureFlags
newtype VkFormatFeatureFlagBits = VkFormatFeatureFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkFormatFeatureFlagBits
type VkFormatFeatureFlags = VkFormatFeatureFlagBits
-- | Format can be used for sampled images (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = VkFormatFeatureFlagBits 0x1
-- | Format can be used for storage images (STORAGE_IMAGE descriptor type)
pattern VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = VkFormatFeatureFlagBits 0x2
-- | Format supports atomic operations in case it's used for storage images
pattern VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = VkFormatFeatureFlagBits 0x4
-- | Format can be used for uniform texel buffers (TBOs)
pattern VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = VkFormatFeatureFlagBits 0x8
-- | Format can be used for storage texel buffers (IBOs)
pattern VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = VkFormatFeatureFlagBits 0x10
-- | Format supports atomic operations in case it's used for storage texel buffers
pattern VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = VkFormatFeatureFlagBits 0x20
-- | Format can be used for vertex buffers (VBOs)
pattern VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = VkFormatFeatureFlagBits 0x40
-- | Format can be used for color attachment images
pattern VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = VkFormatFeatureFlagBits 0x80
-- | Format supports blending in case it's used for color attachment images
pattern VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = VkFormatFeatureFlagBits 0x100
-- | Format can be used for depth/stencil attachment images
pattern VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = VkFormatFeatureFlagBits 0x200
-- | Format can be used as the source image of blits with vkCmdBlitImage
pattern VK_FORMAT_FEATURE_BLIT_SRC_BIT = VkFormatFeatureFlagBits 0x400
-- | Format can be used as the destination image of blits with vkCmdBlitImage
pattern VK_FORMAT_FEATURE_BLIT_DST_BIT = VkFormatFeatureFlagBits 0x800
-- | Format can be filtered with VK_FILTER_LINEAR when being sampled
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = VkFormatFeatureFlagBits 0x1000
data VkPhysicalDeviceMemoryProperties =
VkPhysicalDeviceMemoryProperties{ vkMemoryTypeCount :: Word32
, vkMemoryTypes :: Vec (ToPeano VK_MAX_MEMORY_TYPES) VkMemoryType
, vkMemoryHeapCount :: Word32
, vkMemoryHeaps :: Vec (ToPeano VK_MAX_MEMORY_HEAPS) VkMemoryHeap
}
deriving (Eq)
instance Storable VkPhysicalDeviceMemoryProperties where
sizeOf ~_ = 520
alignment ~_ = 8
peek ptr = VkPhysicalDeviceMemoryProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 260)
<*> peek (ptr `plusPtr` 264)
poke ptr poked = poke (ptr `plusPtr` 0) (vkMemoryTypeCount (poked :: VkPhysicalDeviceMemoryProperties))
*> poke (ptr `plusPtr` 4) (vkMemoryTypes (poked :: VkPhysicalDeviceMemoryProperties))
*> poke (ptr `plusPtr` 260) (vkMemoryHeapCount (poked :: VkPhysicalDeviceMemoryProperties))
*> poke (ptr `plusPtr` 264) (vkMemoryHeaps (poked :: VkPhysicalDeviceMemoryProperties))
data VkInstance_T
type VkInstance = Ptr VkInstance_T
-- ** VkMemoryHeapFlags
newtype VkMemoryHeapFlagBits = VkMemoryHeapFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkMemoryHeapFlagBits
type VkMemoryHeapFlags = VkMemoryHeapFlagBits
-- | If set, heap represents device memory
pattern VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = VkMemoryHeapFlagBits 0x1
data VkQueueFamilyProperties =
VkQueueFamilyProperties{ vkQueueFlags :: VkQueueFlags
, vkQueueCount :: Word32
, vkTimestampValidBits :: Word32
, vkMinImageTransferGranularity :: VkExtent3D
}
deriving (Eq)
instance Storable VkQueueFamilyProperties where
sizeOf ~_ = 24
alignment ~_ = 4
peek ptr = VkQueueFamilyProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
poke ptr poked = poke (ptr `plusPtr` 0) (vkQueueFlags (poked :: VkQueueFamilyProperties))
*> poke (ptr `plusPtr` 4) (vkQueueCount (poked :: VkQueueFamilyProperties))
*> poke (ptr `plusPtr` 8) (vkTimestampValidBits (poked :: VkQueueFamilyProperties))
*> poke (ptr `plusPtr` 12) (vkMinImageTransferGranularity (poked :: VkQueueFamilyProperties))
data VkImageFormatProperties =
VkImageFormatProperties{ vkMaxExtent :: VkExtent3D
, vkMaxMipLevels :: Word32
, vkMaxArrayLayers :: Word32
, vkSampleCounts :: VkSampleCountFlags
, vkMaxResourceSize :: VkDeviceSize
}
deriving (Eq)
instance Storable VkImageFormatProperties where
sizeOf ~_ = 32
alignment ~_ = 8
peek ptr = VkImageFormatProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 12)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 20)
<*> peek (ptr `plusPtr` 24)
poke ptr poked = poke (ptr `plusPtr` 0) (vkMaxExtent (poked :: VkImageFormatProperties))
*> poke (ptr `plusPtr` 12) (vkMaxMipLevels (poked :: VkImageFormatProperties))
*> poke (ptr `plusPtr` 16) (vkMaxArrayLayers (poked :: VkImageFormatProperties))
*> poke (ptr `plusPtr` 20) (vkSampleCounts (poked :: VkImageFormatProperties))
*> poke (ptr `plusPtr` 24) (vkMaxResourceSize (poked :: VkImageFormatProperties))
data VkPhysicalDeviceSparseProperties =
VkPhysicalDeviceSparseProperties{ vkResidencyStandard :: VkBool32
, vkResidencyStandard :: VkBool32
, vkResidencyStandard :: VkBool32
, vkResidencyAlignedMipSize :: VkBool32
, vkResidencyNonResidentStrict :: VkBool32
}
deriving (Eq)
instance Storable VkPhysicalDeviceSparseProperties where
sizeOf ~_ = 20
alignment ~_ = 4
peek ptr = VkPhysicalDeviceSparseProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
<*> peek (ptr `plusPtr` 16)
poke ptr poked = poke (ptr `plusPtr` 0) (vkResidencyStandard (poked :: VkPhysicalDeviceSparseProperties))
*> poke (ptr `plusPtr` 4) (vkResidencyStandard (poked :: VkPhysicalDeviceSparseProperties))
*> poke (ptr `plusPtr` 8) (vkResidencyStandard (poked :: VkPhysicalDeviceSparseProperties))
*> poke (ptr `plusPtr` 12) (vkResidencyAlignedMipSize (poked :: VkPhysicalDeviceSparseProperties))
*> poke (ptr `plusPtr` 16) (vkResidencyNonResidentStrict (poked :: VkPhysicalDeviceSparseProperties))
-- ** vkGetPhysicalDeviceFeatures
foreign import ccall "vkGetPhysicalDeviceFeatures" vkGetPhysicalDeviceFeatures ::
VkPhysicalDevice -> Ptr VkPhysicalDeviceFeatures -> IO ()
-- ** vkGetPhysicalDeviceMemoryProperties
foreign import ccall "vkGetPhysicalDeviceMemoryProperties" vkGetPhysicalDeviceMemoryProperties ::
VkPhysicalDevice -> Ptr VkPhysicalDeviceMemoryProperties -> IO ()
data VkPhysicalDeviceProperties =
VkPhysicalDeviceProperties{ vkApiVersion :: Word32
, vkDriverVersion :: Word32
, vkVendorID :: Word32
, vkDeviceID :: Word32
, vkDeviceType :: VkPhysicalDeviceType
, vkDeviceName :: Vec (ToPeano VK_MAX_PHYSICAL_DEVICE_NAME_SIZE) CChar
, vkPipelineCacheUUID :: Vec (ToPeano VK_UUID_SIZE) Word8
, vkLimits :: VkPhysicalDeviceLimits
, vkSparseProperties :: VkPhysicalDeviceSparseProperties
}
deriving (Eq)
instance Storable VkPhysicalDeviceProperties where
sizeOf ~_ = 824
alignment ~_ = 8
peek ptr = VkPhysicalDeviceProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 20)
<*> peek (ptr `plusPtr` 276)
<*> peek (ptr `plusPtr` 296)
<*> peek (ptr `plusPtr` 800)
poke ptr poked = poke (ptr `plusPtr` 0) (vkApiVersion (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 4) (vkDriverVersion (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 8) (vkVendorID (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 12) (vkDeviceID (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 16) (vkDeviceType (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 20) (vkDeviceName (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 276) (vkPipelineCacheUUID (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 296) (vkLimits (poked :: VkPhysicalDeviceProperties))
*> poke (ptr `plusPtr` 800) (vkSparseProperties (poked :: VkPhysicalDeviceProperties))
-- ** vkGetPhysicalDeviceQueueFamilyProperties
foreign import ccall "vkGetPhysicalDeviceQueueFamilyProperties" vkGetPhysicalDeviceQueueFamilyProperties ::
VkPhysicalDevice ->
Ptr Word32 -> Ptr VkQueueFamilyProperties -> IO ()
data VkMemoryType =
VkMemoryType{ vkPropertyFlags :: VkMemoryPropertyFlags
, vkHeapIndex :: Word32
}
deriving (Eq)
instance Storable VkMemoryType where
sizeOf ~_ = 8
alignment ~_ = 4
peek ptr = VkMemoryType <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
poke ptr poked = poke (ptr `plusPtr` 0) (vkPropertyFlags (poked :: VkMemoryType))
*> poke (ptr `plusPtr` 4) (vkHeapIndex (poked :: VkMemoryType))
-- ** vkGetInstanceProcAddr
foreign import ccall "vkGetInstanceProcAddr" vkGetInstanceProcAddr ::
VkInstance -> Ptr CChar -> IO PFN_vkVoidFunction
-- ** VkMemoryPropertyFlags
newtype VkMemoryPropertyFlagBits = VkMemoryPropertyFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkMemoryPropertyFlagBits
type VkMemoryPropertyFlags = VkMemoryPropertyFlagBits
-- | If otherwise stated, then allocate memory on device
pattern VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = VkMemoryPropertyFlagBits 0x1
-- | Memory is mappable by host
pattern VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = VkMemoryPropertyFlagBits 0x2
-- | Memory will have i/o coherency. If not set, application may need to use vkFlushMappedMemoryRanges and vkInvalidateMappedMemoryRanges to flush/invalidate host cache
pattern VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = VkMemoryPropertyFlagBits 0x4
-- | Memory will be cached by the host
pattern VK_MEMORY_PROPERTY_HOST_CACHED_BIT = VkMemoryPropertyFlagBits 0x8
-- | Memory may be allocated by the driver when it is required
pattern VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = VkMemoryPropertyFlagBits 0x10
-- ** vkDestroyInstance
foreign import ccall "vkDestroyInstance" vkDestroyInstance ::
VkInstance -> Ptr VkAllocationCallbacks -> IO ()
-- ** VkQueueFlags
newtype VkQueueFlagBits = VkQueueFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkQueueFlagBits
type VkQueueFlags = VkQueueFlagBits
-- | Queue supports graphics operations
pattern VK_QUEUE_GRAPHICS_BIT = VkQueueFlagBits 0x1
-- | Queue supports compute operations
pattern VK_QUEUE_COMPUTE_BIT = VkQueueFlagBits 0x2
-- | Queue supports transfer operations
pattern VK_QUEUE_TRANSFER_BIT = VkQueueFlagBits 0x4
-- | Queue supports sparse resource memory management operations
pattern VK_QUEUE_SPARSE_BINDING_BIT = VkQueueFlagBits 0x8
-- ** vkGetPhysicalDeviceProperties
foreign import ccall "vkGetPhysicalDeviceProperties" vkGetPhysicalDeviceProperties ::
VkPhysicalDevice -> Ptr VkPhysicalDeviceProperties -> IO ()
-- ** VkInstanceCreateFlags
-- | Opaque flag
newtype VkInstanceCreateFlags = VkInstanceCreateFlags VkFlags
deriving (Eq, Storable)
-- ** vkGetPhysicalDeviceFormatProperties
foreign import ccall "vkGetPhysicalDeviceFormatProperties" vkGetPhysicalDeviceFormatProperties ::
VkPhysicalDevice -> VkFormat -> Ptr VkFormatProperties -> IO ()
data VkFormatProperties =
VkFormatProperties{ vkLinearTilingFeatures :: VkFormatFeatureFlags
, vkOptimalTilingFeatures :: VkFormatFeatureFlags
, vkBufferFeatures :: VkFormatFeatureFlags
}
deriving (Eq)
instance Storable VkFormatProperties where
sizeOf ~_ = 12
alignment ~_ = 4
peek ptr = VkFormatProperties <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
poke ptr poked = poke (ptr `plusPtr` 0) (vkLinearTilingFeatures (poked :: VkFormatProperties))
*> poke (ptr `plusPtr` 4) (vkOptimalTilingFeatures (poked :: VkFormatProperties))
*> poke (ptr `plusPtr` 8) (vkBufferFeatures (poked :: VkFormatProperties))
| oldmanmike/vulkan | src/Graphics/Vulkan/DeviceInitialization.hs | bsd-3-clause | 47,766 | 0 | 114 | 16,915 | 10,085 | 5,740 | 4,345 | -1 | -1 |
module ParserSpec (
parserTests
) where
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
import Text.Parsec
import Text.Parsec.String (Parser)
import Text.ParserCombinators.Parsec.Error(ParseError, Message, errorMessages, messageEq)
import Data.Char
import Data.Either (isLeft)
import Numeric
import Data.Hednist.Types
import Data.Hednist.Parser
parserTests = testGroup "Parser Tests"
[ nilTests
, characterTests
, symbolTests
, keywordTests
, intTests
, floatTests
, listTests
, vectorTests
, stringTests
, mapTests ]
-- Nil block
nilTests = testGroup "Nil Tests"
[ testCase "nil is recognized" testNilValid
, testCase "non-nils are not recognized" testNilInvalid ]
testNilValid = parse nil "" "nil" @?= Right EDNNil
testNilInvalid = isLeft (parse nil "" "null") @?= True
-- Character block
characterTests = testGroup "Character Tests"
[ testProperty "basic characters are recognized" checkCharRecognized
, testCase "newline is recognized" testNewlineRecognized
, testCase "return is recognized" testReturnRecognized
, testCase "space is recognized" testSpaceRecognized
, testCase "tab is recognized" testTabRecognized
, testCase "unicode space is recognized" testUnicodeRecognized
, testCase "unicode space is recognized by character" testUnicodeCharacterRecognized ]
checkCharRecognized :: Char -> Bool
checkCharRecognized char = parse character "" ("\\" ++ [char]) == Right (EDNChar char)
testNewlineRecognized = parse character "" "\\newline" @?= Right (EDNChar '\n')
testReturnRecognized = parse character "" "\\return" @?= Right (EDNChar '\r')
testSpaceRecognized = parse character "" "\\space" @?= Right (EDNChar ' ')
testTabRecognized = parse character "" "\\tab" @?= Right (EDNChar '\t')
testUnicodeRecognized = parse unicode "" "u0020" @?= Right ' '
testUnicodeCharacterRecognized = parse character "" "\\u0020" @?= Right (EDNChar ' ')
symbolTests = testGroup "Symbol Tests"
[ testCase "simple symbols are recognized" testNakedSymbolRecognized
, testCase "symbols with odd characters are recognized" testComplexSymbolRecognized
, testCase "symbols with namespaces are recognized" testNamespaceSymbolRecognized ]
testNakedSymbolRecognized = parse symbol "" "aqrz.119" @?= Right (EDNSymbol Nothing "aqrz.119")
testComplexSymbolRecognized = parse symbol "" "+a564#" @?= Right (EDNSymbol Nothing "+a564#")
testNamespaceSymbolRecognized = parse symbol "" "+a5/abc" @?= Right (EDNSymbol (Just "+a5") "abc")
keywordTests = testGroup "Keyword Tests"
[ testCase "simple keywords are recognized" testSimpleKeyword
, testCase "namespaced keywords are recognized" testNamespacedKeyword
, testCase "funky keywords are recognized" testFunkyKeyword ]
testSimpleKeyword = parse keyword "" ":a" @?= Right (EDNKeyword Nothing "a")
testNamespacedKeyword = parse keyword "" ":a.d/c" @?= Right (EDNKeyword (Just "a.d") "c")
testFunkyKeyword = parse keyword "" ":+a1d.f*/_q" @?= Right (EDNKeyword (Just "+a1d.f*") "_q")
intTests = testGroup "Int Tests"
[ testProperty "ints are recognized" checkRecognizesIntegerSmall
, testProperty "integers are recognized" checkRecognizesIntegerBig ]
checkRecognizesIntegerSmall :: Int -> Bool
checkRecognizesIntegerSmall i = parse integer "" (show i) == Right (EDNInt i)
checkRecognizesIntegerBig :: Integer -> Bool
checkRecognizesIntegerBig i = parse integer "" (show i ++ "N") == Right (EDNInteger i)
floatTests = testGroup "Float Tests"
[ testProperty "coerced floats are recognized" checkRecognizesCast
, testProperty "floats are recognized" checkRecognizesFloat
, testProperty "exponents are recognized" checkRecognizesExponent ]
checkRecognizesCast :: Int -> Bool
checkRecognizesCast i = parse float "" (show i ++ "M") == Right (EDNFloat $ realToFrac i)
checkRecognizesFloat :: Double -> Bool
checkRecognizesFloat f = parse float "" (show f) == Right (EDNFloat f)
checkRecognizesExponent :: Int -> NonNegative Int -> Int -> Bool
checkRecognizesExponent f (NonNegative d) e =
parse float "" str == Right (EDNFloat $ head * (10 ** realToFrac e))
where head = read (show f ++ "." ++ show d)
str = show head ++ "E" ++ show e
listTests = testGroup "List Tests"
[ testCase "empty lists are recognized" testEmptyList
, testCase "lists with elements are recognized" testFilledList ]
testEmptyList = parse list "" "()" @?= Right (EDNList [])
testFilledList = parse list "" "(1 2)" @?= Right (EDNList [EDNInt 1, EDNInt 2])
vectorTests = testGroup "Vector Tests"
[ testCase "empty vectors are recognized" testEmptyVector
, testCase "vectors with elements are recognized" testFilledVector ]
testEmptyVector = parse Data.Hednist.Parser.vector "" "[]" @?= Right (EDNVector [])
testFilledVector = parse Data.Hednist.Parser.vector "" "[1 2]" @?= Right (EDNVector [EDNInt 1, EDNInt 2])
stringTests = testGroup "String Tests"
[ testCase "simple strings are recognized" testSimpleString ]
testSimpleString = parse str "" "\"hi\"" @?= Right (EDNString "hi")
-- TODO: Generate strings based on the specification and ensure they are parsed
mapTests = testGroup "Map Tests"
[ testCase "simple maps are recognized" testSimpleMap ]
testSimpleMap = parse dict "" "{:a 1, :b 2}" @?= Right (EDNMap [(EDNKeyword Nothing "a", EDNInt 1), (EDNKeyword Nothing "b", EDNInt 2)])
| arzig/hednist | test/ParserSpec.hs | bsd-3-clause | 5,336 | 0 | 11 | 823 | 1,381 | 702 | 679 | 95 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Distance.HR.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Distance.Types
import Duckling.Lang
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = HR}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (DistanceValue Kilometre 3)
[ "3 kilometra"
, "3 km"
, "3km"
, "3k"
]
, examples (DistanceValue Kilometre 3.0)
[ "3,0 km"
]
, examples (DistanceValue Mile 8)
[ "8 milja"
]
, examples (DistanceValue M 9)
[ "9m"
]
, examples (DistanceValue Centimetre 2)
[ "2cm"
, "2 centimetra"
]
]
| rfranek/duckling | Duckling/Distance/HR/Corpus.hs | bsd-3-clause | 1,133 | 0 | 9 | 347 | 197 | 117 | 80 | 27 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE DefaultSignatures #-}
-------------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Empty
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
-------------------------------------------------------------------------------
module Control.Lens.Empty
(
AsEmpty(..)
) where
import Control.Lens.Iso
import Control.Lens.Prism
import Control.Lens.Review
import Data.ByteString as StrictB
import Data.ByteString.Lazy as LazyB
import Data.HashMap.Lazy as HashMap
import Data.HashSet as HashSet
import Data.IntMap as IntMap
import Data.IntSet as IntSet
import Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Profunctor.Unsafe
import Data.Sequence as Seq
import Data.Set as Set
import Data.Text as StrictT
import Data.Text.Lazy as LazyT
import Data.Vector as Vector
import Data.Vector.Unboxed as Unboxed
import Data.Vector.Storable as Storable
#ifndef mingw32_HOST_OS
import GHC.Event
#endif
class AsEmpty a where
-- |
--
-- >>> isn't _Empty [1,2,3]
-- True
_Empty :: Prism' a ()
#ifndef HLINT
default _Empty :: (Monoid a, Eq a) => Prism' a ()
_Empty = only mempty
{-# INLINE _Empty #-}
#endif
{- Default Monoid instances -}
instance AsEmpty Ordering
instance AsEmpty ()
instance AsEmpty Any
instance AsEmpty All
#ifndef mingw32_HOST_OS
instance AsEmpty Event
#endif
instance (Eq a, Num a) => AsEmpty (Product a)
instance (Eq a, Num a) => AsEmpty (Sum a)
instance AsEmpty (Maybe a) where
_Empty = _Nothing
{-# INLINE _Empty #-}
instance AsEmpty (Last a) where
_Empty = nearly (Last Nothing) (isNothing .# getLast)
{-# INLINE _Empty #-}
instance AsEmpty (First a) where
_Empty = nearly (First Nothing) (isNothing .# getFirst)
{-# INLINE _Empty #-}
instance AsEmpty a => AsEmpty (Dual a) where
_Empty = iso getDual Dual . _Empty
{-# INLINE _Empty #-}
instance (AsEmpty a, AsEmpty b) => AsEmpty (a,b) where
_Empty = prism' (\() -> (_Empty # (), _Empty # ())) $ \(s,s') -> case _Empty Left s of
Left () -> case _Empty Left s' of
Left () -> Just ()
_ -> Nothing
_ -> Nothing
{-# INLINE _Empty #-}
instance (AsEmpty a, AsEmpty b, AsEmpty c) => AsEmpty (a,b,c) where
_Empty = prism' (\() -> (_Empty # (), _Empty # (), _Empty # ())) $ \(s,s',s'') -> case _Empty Left s of
Left () -> case _Empty Left s' of
Left () -> case _Empty Left s'' of
Left () -> Just ()
Right _ -> Nothing
Right _ -> Nothing
Right _ -> Nothing
{-# INLINE _Empty #-}
instance AsEmpty [a] where
_Empty = nearly [] Prelude.null
{-# INLINE _Empty #-}
instance AsEmpty (Map k a) where
_Empty = nearly Map.empty Map.null
{-# INLINE _Empty #-}
instance AsEmpty (HashMap k a) where
_Empty = nearly HashMap.empty HashMap.null
{-# INLINE _Empty #-}
instance AsEmpty (IntMap a) where
_Empty = nearly IntMap.empty IntMap.null
{-# INLINE _Empty #-}
instance AsEmpty (Set a) where
_Empty = nearly Set.empty Set.null
{-# INLINE _Empty #-}
instance AsEmpty (HashSet a) where
_Empty = nearly HashSet.empty HashSet.null
{-# INLINE _Empty #-}
instance AsEmpty IntSet where
_Empty = nearly IntSet.empty IntSet.null
{-# INLINE _Empty #-}
instance AsEmpty (Vector.Vector a) where
_Empty = nearly Vector.empty Vector.null
{-# INLINE _Empty #-}
instance Unbox a => AsEmpty (Unboxed.Vector a) where
_Empty = nearly Unboxed.empty Unboxed.null
{-# INLINE _Empty #-}
instance Storable a => AsEmpty (Storable.Vector a) where
_Empty = nearly Storable.empty Storable.null
{-# INLINE _Empty #-}
instance AsEmpty (Seq a) where
_Empty = nearly Seq.empty Seq.null
{-# INLINE _Empty #-}
instance AsEmpty StrictB.ByteString where
_Empty = nearly StrictB.empty StrictB.null
{-# INLINE _Empty #-}
instance AsEmpty LazyB.ByteString where
_Empty = nearly LazyB.empty LazyB.null
{-# INLINE _Empty #-}
instance AsEmpty StrictT.Text where
_Empty = nearly StrictT.empty StrictT.null
{-# INLINE _Empty #-}
instance AsEmpty LazyT.Text where
_Empty = nearly LazyT.empty LazyT.null
{-# INLINE _Empty #-}
| hvr/lens | src/Control/Lens/Empty.hs | bsd-3-clause | 4,307 | 0 | 18 | 839 | 1,232 | 673 | 559 | 112 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-|
Module : Finance.Blpapi.Event
Description : An event resulting for subscription or request
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : *nix, windows
-}
module Finance.Blpapi.Event where
import Finance.Blpapi.Service
import Control.Monad
import Data.Char
import Data.Map (Map)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Calendar
import Data.Time.LocalTime
import Finance.Blpapi.CorrelationId
import GHC.Float
-- | A single event resulting from a subscription or a request.
--
-- Events are created by the 'API' and passed to the application through the
-- 'SessionHandler'. The event is the basic unit of work provided to
-- applications. Each Event consists of an 'EventType' and a 'Message'.
data Event =
Event { -- | The type of 'Message's contained in this event
eventType :: !EventType,
-- \ The event content
eventContent :: !Message
} deriving (Show, Eq)
-- | The possible types of events
data EventType
-- | Undefined event type (Should never happen)
= EventTypeUndefined
-- | Admin Event
| EventTypeAdmin
-- | Status updates for Session
| EventTypeSessionStatus
-- | Status updates for Subscription
| EventTypeSubscriptionStatus
-- | Status update for Request
| EventTypeRequestStatus
-- | The final (and possibly only) response to a request
| EventTypeResponse
-- | A partial response to a request
| EventTypePartialResponse
-- | Unknown Event Type (Should never happen)
| EventTypeUnknown
-- | Data updates resulting from subscription
| EventTypeSubscriptionData
-- | Status updates for a service
| EventTypeServiceStatus
-- | A timeout event (Should never happen)
| EventTypeTimeout
-- | Status updates for user authorization
| EventTypeAuthorizationStatus
-- | Status updates for a resolution operation
| EventTypeResolutionStatus
-- | Status updates about topics for service providers
| EventTypeTopicStatus
-- | Status updates for a generate token request
| EventTypeTokenStatus
-- | A request event to respond to
| EventTypeRequest
deriving (Show, Eq, Enum, Bounded)
-- | Message is the actual data corresponding to an 'Event'. It is
-- associated with a 'Service' and one or more 'CorrelationId's. The
-- message contents are represented as 'Element'. Each message also contains
-- a fragment type, which informs whether the message is a part of a bigger
-- chunk.
data Message =
Message { -- | The topic name associated with the message.
messageTopicName :: !Text,
-- | The service which the message is from
messageService :: Maybe Service,
-- | The message content
messageData :: !Element,
-- | The message fragment type
messageFragmentType :: !MessageFragmentType,
-- | The list of 'CorrelationId's
messageCorrelationIds :: ![CorrelationId]
} deriving (Eq)
-- | A message could be split into more than one fragments to reduce
-- each message size. This enumeration is used to indicate whether a message
-- is a fragmented message and the position in the fragmented messages.
data MessageFragmentType
-- | Message is not fragmented
= MessageFragmentNone
-- | The first fragmented message
| MessageFragmentStart
-- | Intermediate fragmented messages
| MessageFragmentIntermediate
-- | The last fragmented message
| MessageFragmentEnd
deriving (Show, Eq, Enum, Bounded)
-- | An array of Elements
type ElementArray = [Element]
-- | A type determining an 'Element' object. Its a map between the element
-- name and its content along with the contents schema definition.
type ElementObject = Map Text ElementWithDefinition
-- | The message content type.
-- An Element can represent: a single value of any data type supported by
-- the Bloomberg API; an array of values; a sequence or a choice.
data Element = ElementBool {elementBoolValue :: !Bool}
| ElementChar {elementCharValue :: !Char}
| ElementInt {elementIntValue :: !Int}
| ElementInt64 {elementIntegerValue :: !Integer}
| ElementFloat {elementFloatValue :: !Float}
| ElementDouble {elementDoubleValue :: !Double}
| ElementString {elementStringValue :: !Text}
| ElementDate {elementDateValue :: !Day,
elementDateTimeZone :: Maybe TimeZone}
| ElementTime {elementTimeValue :: !TimeOfDay,
elementTimeTimeZone :: Maybe TimeZone}
| ElementDatetime {elementDatetimeValue :: !LocalTime,
elementDatetimeTimeZone :: Maybe TimeZone}
| ElementEnum {elementEnumValue :: !String}
| ElementSequence {elementSequenceValue :: !ElementObject}
| ElementChoice {elementChoiceValue :: !ElementObject}
| ElementArray {elementArrayValue:: !ElementArray }
| ElementNull
deriving (Show, Eq)
-- | An element along with its schema definition
data ElementWithDefinition = ElementWithDefinition {
elementWithDefinitionSchema :: !SchemaDefinition,
elementWithDefinitionContent :: !Element
} deriving (Eq)
instance Show Message where
show (Message topic _ e ft cl)
= "TopicName: " ++ show topic
++ ", " ++ show e
++ ", MessageFragmentType: " ++ show ft
++ ", Correlations: " ++ show cl
instance Show ElementWithDefinition where
show (ElementWithDefinition _ t)
= "ElementType: { " ++ show t ++ " }"
-- | A class which determines the types that an Element can be
-- converted to
class BlpConversionUtil r where
blpConvert :: Element -> Maybe r
instance BlpConversionUtil Bool where
blpConvert (ElementBool a) = Just a
blpConvert _ = Nothing
instance BlpConversionUtil Char where
blpConvert (ElementBool a) | a = Just 'T'
| otherwise = Just 'F'
blpConvert (ElementChar c) = Just c
blpConvert _ = Nothing
instance BlpConversionUtil Int where
blpConvert (ElementChar c) = Just (ord c)
blpConvert (ElementInt c) = Just c
blpConvert (ElementFloat c)
| floor c > (minBound :: Int) && floor c < (maxBound :: Int)
= Just $ floor c
| otherwise = Nothing
blpConvert (ElementDouble c)
| floor c > (minBound :: Int) && floor c < (maxBound :: Int)
= Just $ floor c
| otherwise = Nothing
blpConvert _ = Nothing
instance BlpConversionUtil Integer where
blpConvert (ElementChar c) = Just $ (toInteger . ord) c
blpConvert (ElementInt c) = Just $ toInteger c
blpConvert (ElementInt64 c) = Just c
blpConvert (ElementFloat c) = Just $ floor c
blpConvert (ElementDouble c) = Just $ floor c
blpConvert _ = Nothing
instance BlpConversionUtil Float where
blpConvert (ElementChar c) = Just $ (fromIntegral . ord) c
blpConvert (ElementInt c) = Just $ fromIntegral c
blpConvert (ElementFloat c) = Just c
blpConvert _ = Nothing
instance BlpConversionUtil Double where
blpConvert (ElementChar c) = Just $ (fromIntegral . ord) c
blpConvert (ElementInt c) = Just $ fromIntegral c
blpConvert (ElementFloat c) = Just $ float2Double c
blpConvert (ElementDouble c) = Just c
blpConvert _ = Nothing
instance BlpConversionUtil TimeOfDay where
blpConvert (ElementTime c _) = Just c
blpConvert (ElementDatetime c _) = Just $ localTimeOfDay c
blpConvert _ = Nothing
instance BlpConversionUtil Day where
blpConvert (ElementDate c _) = Just c
blpConvert (ElementDatetime c _) = Just $ localDay c
blpConvert _ = Nothing
instance BlpConversionUtil ZonedTime where
blpConvert (ElementDatetime c (Just z)) = Just $ ZonedTime c z
blpConvert _ = Nothing
instance BlpConversionUtil LocalTime where
blpConvert (ElementDatetime c _) = Just c
blpConvert _ = Nothing
instance BlpConversionUtil String where
blpConvert (ElementBool c) = Just $ show c
blpConvert (ElementChar c) = Just $ show c
blpConvert (ElementInt c) = Just $ show c
blpConvert (ElementInt64 c) = Just $ show c
blpConvert (ElementFloat c) = Just $ show c
blpConvert (ElementDouble c) = Just $ show c
blpConvert (ElementString c) = Just $ show c
blpConvert (ElementDate c _) = Just $ show c
blpConvert (ElementTime c _) = Just $ show c
blpConvert (ElementDatetime c (Just z)) = Just $ show $ ZonedTime c z
blpConvert (ElementDatetime c Nothing) = Just $ show c
blpConvert (ElementNull) = Just ""
instance BlpConversionUtil Text where
blpConvert (ElementString c) = Just c
blpConvert v = liftM T.pack $ blpConvert v
| bitemyapp/blpapi-hs | src/Finance/Blpapi/Event.hs | mit | 9,230 | 0 | 13 | 2,481 | 1,826 | 964 | 862 | 198 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Yi.Snippet.Internal
( Snippet (..)
, Var (..)
, VarValue (..)
, SnippetBody
, EditState (..)
, EditAction (..)
, initialEditState
, lit
, line
, nl
, place
, refer
, finish
, mirror
, renderSnippet
, collectVars
, advanceEditState
, expandSnippetE
, filename
) where
import Control.Lens
import Control.Monad.Free
import Control.Monad.State hiding (state)
import Control.Monad.Writer
import Data.Binary (Binary)
import Data.Default
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Typeable
import GHC.Generics
import Yi.Buffer
import Yi.Editor (withCurrentBuffer)
import Yi.Keymap
import Yi.Keymap.Keys
import qualified Yi.Rope as R
import Yi.Types (YiVariable, EditorM)
data Snippet = Snippet
{ snipTrigger :: R.YiString
, snipBody :: SnippetBody ()
}
data Var
= FilenameVar
| UserVar {fromVar :: Int}
deriving (Show, Eq, Ord, Generic)
data VarValue
= DefaultValue R.YiString
| CustomValue R.YiString
deriving (Show, Eq, Generic)
instance Binary Var
instance Binary VarValue
instance Default VarValue where
def = DefaultValue def
type Vars = M.Map Var VarValue
data SnippetBodyF a
= Lit R.YiString a
| Finish a
| MakeVar R.YiString (Var -> a)
| Mirror Var a
| Refer Var (R.YiString -> a)
deriving Functor
type SnippetBody = Free SnippetBodyF
filename :: Var
filename = FilenameVar
lit :: R.YiString -> SnippetBody ()
lit s = liftF (Lit s ())
line :: R.YiString -> SnippetBody ()
line s = lit (s <> "\n")
nl :: SnippetBody ()
nl = liftF (Lit "\n" ())
finish :: SnippetBody ()
finish = liftF (Finish ())
place :: R.YiString -> SnippetBody Var
place s = do
var <- liftF (MakeVar s id)
mirror var
return var
refer :: Var -> SnippetBody R.YiString
refer var = liftF (Refer var id)
mirror :: Var -> SnippetBody ()
mirror var = liftF (Mirror var ())
data EditState = EditState
{ sesCursorPosition :: (Maybe Var, Int)
, sesVars :: Vars
} deriving (Show, Eq, Generic, Typeable)
instance Binary EditState
instance Default EditState where
def = EditState (Nothing, 0) def
instance YiVariable EditState
initialEditState :: Snippet -> EditState
initialEditState (Snippet _ body) =
EditState
(listToMaybe (M.keys vars), 0)
vars
where
vars = collectVars body
collectVars :: SnippetBody a -> Vars
collectVars body =
snd (runState (iterM run body) mempty)
where
run :: SnippetBodyF (State Vars a) -> State Vars a
run (Lit _ rest) = rest
run (Finish rest) = rest
run (MakeVar s f) = do
vars <- get
let newVar = if M.null vars
then (UserVar 0)
else UserVar (maximum (map fromVar (M.keys vars)) + 1)
newVars = M.insert newVar (DefaultValue s) vars
put newVars
f newVar
run (Mirror _ rest) = rest
run (Refer var f) = do
vars <- get
f (toYiString (vars M.! var))
data EditAction
= SENext
| SEInsertChar Char
| SEBackSpace
| SEEscape
renderSnippet :: Snippet -> EditState -> (Int, R.YiString)
renderSnippet (Snippet _ body) (EditState (maybeActiveVar, offset) vars) =
(either id id epos, string)
where
(((), (_var, epos)), string) = runWriter (runStateT (iterM run body) (UserVar (-1), Right 0))
advance :: MonadState (Var, Either Int Int) m => Int -> m ()
advance n = modify (fmap (fmap (+ n)))
run :: SnippetBodyF ((StateT (Var, Either Int Int) (Writer R.YiString)) a)
-> StateT (Var, Either Int Int) (Writer R.YiString) a
run (Lit s rest) = do
tell s
advance (R.length s)
rest
run (Finish rest) = rest
run (Mirror var rest) = do
let s = toYiString (vars M.! var)
tell s
if Just var == maybeActiveVar
then do
(v, curPos) <- get
case curPos of
Right pos ->
put (v, (Left (pos + offset)))
_ -> return ()
else advance (R.length s)
rest
run (MakeVar _ f) = do
(varName, pos) <- get
let newVar = UserVar (fromVar varName + 1)
put (newVar, pos)
f (newVar)
run (Refer var f) = f (toYiString (vars M.! var))
toYiString :: VarValue -> R.YiString
toYiString (DefaultValue s) = s
toYiString (CustomValue s) = s
advanceEditState :: EditState -> EditAction -> EditState
advanceEditState state@(EditState (Nothing, _) _) SENext = state
advanceEditState (EditState (Just i, pos) vars) (SEInsertChar c) =
let newVars = M.adjust (insertChar c pos) i vars
in EditState (Just i, pos + 1) newVars
advanceEditState (EditState (Just i, pos) vars) SEBackSpace =
let newVars = M.adjust (backspace pos) i vars
in EditState (Just i, pos - 1) newVars
advanceEditState (EditState (Just i, _) vars) SENext =
let nextPlace = listToMaybe (dropWhile (<= i) (M.keys vars))
in EditState (nextPlace, 0) vars
advanceEditState state _ = state
insertChar :: Char -> Int -> VarValue -> VarValue
insertChar c _ (DefaultValue _) = CustomValue (R.singleton c)
insertChar c pos (CustomValue s) = CustomValue (lhs <> R.singleton c <> rhs)
where (lhs, rhs) = R.splitAt pos s
backspace :: Int -> VarValue -> VarValue
backspace _ (DefaultValue _) = CustomValue mempty
backspace 0 v = v
backspace pos (CustomValue s) = CustomValue (lhs <> R.drop 1 rhs)
where (lhs, rhs) = R.splitAt (pos - 1) s
expandSnippetE :: EditorM () -> [Snippet] -> EditorM Bool
expandSnippetE escapeAction snippets = do
trigger <- withCurrentBuffer readPrevWordB
let match = listToMaybe (filter ((== trigger) . snipTrigger) snippets)
case match of
Just snip -> do
beginEditingSnippetE escapeAction snip
return True
_ -> return False
beginEditingSnippetE :: EditorM () -> Snippet -> EditorM ()
beginEditingSnippetE escapeAction snip = do
withCurrentBuffer (deleteB unitWord Backward)
Point origin <- withCurrentBuffer pointB
filenameValue <- withCurrentBuffer (gets identString)
let editState0 =
(\(EditState x vars) ->
EditState x (M.insert filename (DefaultValue (R.fromText filenameValue)) vars))
(initialEditState snip)
withCurrentBuffer (putBufferDyn editState0)
oldKeymap <- withCurrentBuffer (gets (withMode0 modeKeymap))
withCurrentBuffer $ do
let (offset, s) = renderSnippet snip editState0
insertN s
moveTo (Point (origin + offset))
let go SEEscape = do
withCurrentBuffer (modifyMode $ modeKeymapA .~ oldKeymap)
escapeAction
go action = withCurrentBuffer $ do
editState <- getBufferDyn
let nextEditState = advanceEditState editState action
(_, prevS) = renderSnippet snip editState
moveTo (Point origin)
deleteN (R.length prevS)
let (offset, s) = renderSnippet snip nextEditState
insertN s
moveTo (Point (origin + offset))
case nextEditState of
EditState (Just _, _) _ -> putBufferDyn nextEditState
_ -> modifyMode $ modeKeymapA .~ oldKeymap
withCurrentBuffer $ modifyMode $ modeKeymapA .~ topKeymapA .~ choice
[ printableChar >>=! go . SEInsertChar
, Event KEsc [] ?>>! go SEEscape
, Event KTab [] ?>>! go SENext
, Event KBS [] ?>>! go SEBackSpace
, Event (KASCII 'h') [MCtrl] ?>>! go SEBackSpace
, Event (KASCII '[') [MCtrl] ?>>! go SEEscape
, Event (KASCII 'i') [MCtrl] ?>>! go SENext
] | siddhanathan/yi | yi-snippet/src/Yi/Snippet/Internal.hs | gpl-2.0 | 7,869 | 0 | 21 | 2,123 | 2,875 | 1,462 | 1,413 | -1 | -1 |
module Utils.System.Random where
import System.Random
import Control.Monad.State
-- Warning: an empty list leads to a crash.
-- TODO: make more failsafe randomL/randomLs functions.
randomL :: RandomGen g => [a] -> g -> (a, g)
randomL [] _ = error "randomL: empty list"
randomL lst gen =
let len = length lst
(idx, gen') = randomR (0, len-1) gen
in (lst !! idx, gen')
randomLs :: RandomGen g => [[a]] -> g -> ([a], g)
randomLs =
runState . sequence . map (state . randomL)
| charringer/gonimo-back | src/Utils/System/Random.hs | agpl-3.0 | 500 | 0 | 11 | 112 | 183 | 101 | 82 | 12 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
--------------------------------------------------------------------------------
{-|
Module : Timer
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Support for millisecond timers.
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.Timer
( Timer, timer, interval
) where
import Graphics.UI.WXCore.WxcClasses hiding (Timer)
import Graphics.UI.WXCore.Events
import Graphics.UI.WX.Types
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Classes
import Graphics.UI.WX.Events
import Control.Monad (void)
{--------------------------------------------------------------------
--------------------------------------------------------------------}
-- | A timer generates a 'command' event on a specified millisecond 'interval'.
--
-- * Attributes: 'interval'
--
-- * Instances: 'Able', 'Commanding'
--
type Timer = TimerEx ()
-- | Create a new timer with a 1 second interval. The timer is automatically discarded
-- when the parent is deleted.
timer :: Window a -> [Prop Timer] -> IO Timer
timer parent' props
= do t <- windowTimerCreate parent'
void (timerStart t 1000 False)
set t props
return t
-- | The millisecond interval of the timer.
interval :: Attr Timer Int
interval
= newAttr "timer-interval"
(\t -> timerGetInterval t)
(\t i -> do runs <- timerIsRuning t
if (runs)
then do timerStop t
isone <- timerIsOneShot t
void $ timerStart t i isone
else do void $ timerStart t i True
timerStop t)
instance Able Timer where
enabled
= newAttr "enabled"
(\t -> timerIsRuning t)
(\t able -> do runs <- timerIsRuning t
when (runs /= able)
(if able then do i <- get t interval
void $ timerStart t i False
else do timerStop t))
instance Commanding Timer where
command
= newEvent "command" timerGetOnCommand timerOnCommand
| jacekszymanski/wxHaskell | wx/src/Graphics/UI/WX/Timer.hs | lgpl-2.1 | 2,317 | 0 | 17 | 639 | 423 | 224 | 199 | 40 | 2 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE LiberalTypeSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Test.QuickCheck
import Test.QuickCheck.Function
import Test.QuickCheck.Instances
import Test.Framework
import Test.Framework.TH
import Test.Framework.Providers.QuickCheck2
import Binary
import Inference
import Parser
import Loader
import Syntax
import Var
-- | /NB:/ when adding a test suite here, make sure you add it to
-- the @other-modules:@ block under @test-suite properties@ in
-- @ermine.cabal@ or you'll break @cabal sdist@.
main :: IO ()
main = defaultMain
[ Binary.tests
, Inference.tests
, Loader.tests
, Parser.tests
, Syntax.tests
, Var.tests
]
| PipocaQuemada/ermine | tests/properties.hs | bsd-2-clause | 857 | 0 | 7 | 122 | 115 | 75 | 40 | 28 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Network.HTTP.Download.VerifiedSpec where
import Crypto.Hash
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Logger (LoggingT, runStdoutLoggingT)
import Control.Monad.Trans.Reader
import Control.Retry (limitRetries)
import Data.Maybe
import Network.HTTP.Client.Conduit
import Network.HTTP.Download.Verified
import Path
import Path.IO
import Test.Hspec
-- TODO: share across test files
withTempDir' :: (Path Abs Dir -> IO a) -> IO a
withTempDir' = withSystemTempDir "NHD_VerifiedSpec"
-- | An example path to download the exampleReq.
getExamplePath :: Path Abs Dir -> IO (Path Abs File)
getExamplePath dir = do
file <- parseRelFile "cabal-install-1.22.4.0.tar.gz"
return (dir </> file)
-- | An example DownloadRequest that uses a SHA1
exampleReq :: DownloadRequest
exampleReq = fromMaybe (error "exampleReq") $ do
let req = parseRequest_ "http://download.fpcomplete.com/stackage-cli/linux64/cabal-install-1.22.4.0.tar.gz"
return DownloadRequest
{ drRequest = req
, drHashChecks = [exampleHashCheck]
, drLengthCheck = Just exampleLengthCheck
, drRetryPolicy = limitRetries 1
}
exampleHashCheck :: HashCheck
exampleHashCheck = HashCheck
{ hashCheckAlgorithm = SHA1
, hashCheckHexDigest = CheckHexDigestString "b98eea96d321cdeed83a201c192dac116e786ec2"
}
exampleLengthCheck :: LengthCheck
exampleLengthCheck = 302513
-- | The wrong ContentLength for exampleReq
exampleWrongContentLength :: Int
exampleWrongContentLength = 302512
-- | The wrong SHA1 digest for exampleReq
exampleWrongDigest :: CheckHexDigest
exampleWrongDigest = CheckHexDigestString "b98eea96d321cdeed83a201c192dac116e786ec3"
exampleWrongContent :: String
exampleWrongContent = "example wrong content"
isWrongContentLength :: VerifiedDownloadException -> Bool
isWrongContentLength WrongContentLength{} = True
isWrongContentLength _ = False
isWrongDigest :: VerifiedDownloadException -> Bool
isWrongDigest WrongDigest{} = True
isWrongDigest _ = False
data T = T
{ manager :: Manager
}
runWith :: MonadIO m => Manager -> ReaderT Manager (LoggingT m) r -> m r
runWith manager = runStdoutLoggingT . flip runReaderT manager
setup :: IO T
setup = do
manager <- newManager
return T{..}
teardown :: T -> IO ()
teardown _ = return ()
spec :: Spec
spec = beforeAll setup $ afterAll teardown $ do
let exampleProgressHook _ = return ()
describe "verifiedDownload" $ do
-- Preconditions:
-- * the exampleReq server is running
-- * the test runner has working internet access to it
it "downloads the file correctly" $ \T{..} -> withTempDir' $ \dir -> do
examplePath <- getExamplePath dir
doesFileExist examplePath `shouldReturn` False
let go = runWith manager $ verifiedDownload exampleReq examplePath exampleProgressHook
go `shouldReturn` True
doesFileExist examplePath `shouldReturn` True
it "is idempotent, and doesn't redownload unnecessarily" $ \T{..} -> withTempDir' $ \dir -> do
examplePath <- getExamplePath dir
doesFileExist examplePath `shouldReturn` False
let go = runWith manager $ verifiedDownload exampleReq examplePath exampleProgressHook
go `shouldReturn` True
doesFileExist examplePath `shouldReturn` True
go `shouldReturn` False
doesFileExist examplePath `shouldReturn` True
-- https://github.com/commercialhaskell/stack/issues/372
it "does redownload when the destination file is wrong" $ \T{..} -> withTempDir' $ \dir -> do
examplePath <- getExamplePath dir
let exampleFilePath = toFilePath examplePath
writeFile exampleFilePath exampleWrongContent
doesFileExist examplePath `shouldReturn` True
readFile exampleFilePath `shouldReturn` exampleWrongContent
let go = runWith manager $ verifiedDownload exampleReq examplePath exampleProgressHook
go `shouldReturn` True
doesFileExist examplePath `shouldReturn` True
readFile exampleFilePath `shouldNotReturn` exampleWrongContent
it "rejects incorrect content length" $ \T{..} -> withTempDir' $ \dir -> do
examplePath <- getExamplePath dir
let wrongContentLengthReq = exampleReq
{ drLengthCheck = Just exampleWrongContentLength
}
let go = runWith manager $ verifiedDownload wrongContentLengthReq examplePath exampleProgressHook
go `shouldThrow` isWrongContentLength
doesFileExist examplePath `shouldReturn` False
it "rejects incorrect digest" $ \T{..} -> withTempDir' $ \dir -> do
examplePath <- getExamplePath dir
let wrongHashCheck = exampleHashCheck { hashCheckHexDigest = exampleWrongDigest }
let wrongDigestReq = exampleReq { drHashChecks = [wrongHashCheck] }
let go = runWith manager $ verifiedDownload wrongDigestReq examplePath exampleProgressHook
go `shouldThrow` isWrongDigest
doesFileExist examplePath `shouldReturn` False
-- https://github.com/commercialhaskell/stack/issues/240
it "can download hackage tarballs" $ \T{..} -> withTempDir' $ \dir -> do
dest <- fmap (dir </>) $ parseRelFile "acme-missiles-0.3.tar.gz"
let req = parseRequest_ "http://hackage.haskell.org/package/acme-missiles-0.3/acme-missiles-0.3.tar.gz"
let dReq = DownloadRequest
{ drRequest = req
, drHashChecks = []
, drLengthCheck = Nothing
, drRetryPolicy = limitRetries 1
}
let go = runWith manager $ verifiedDownload dReq dest exampleProgressHook
doesFileExist dest `shouldReturn` False
go `shouldReturn` True
doesFileExist dest `shouldReturn` True
| Blaisorblade/stack | src/test/Network/HTTP/Download/VerifiedSpec.hs | bsd-3-clause | 5,664 | 0 | 22 | 1,086 | 1,308 | 667 | 641 | 109 | 1 |
module Weak
{-# DEPRECATED "This module has moved to System.Mem.Weak" #-}
(module System.Mem.Weak) where
import System.Mem.Weak
| FranklinChen/hugs98-plus-Sep2006 | fptools/hslibs/lang/Weak.hs | bsd-3-clause | 133 | 0 | 5 | 21 | 20 | 14 | 6 | 4 | 0 |
module Language.Java.Paragon.TypeCheck.Test where
import Language.Java.Paragon.Syntax
import Language.Java.Paragon.Pretty
import Language.Java.Paragon.Parser
import Language.Java.Paragon.TypeCheck.TcExp
import Language.Java.Paragon.TypeCheck.Monad
import Language.Java.Paragon.TypeCheck.TcEnv
import Language.Java.Paragon.TypeCheck.TcState
import Language.Java.Paragon.TypeCheck.Policy
import Language.Java.Paragon.TypeCheck.Types
import Language.Java.Paragon.TypeCheck.Locks
import qualified Data.Map as Map
testExp :: String -> IO ()
testExp str =
case parser stmtExp str of
Right e -> do res <- runTc testEnv testState $ tcExp e
print res
Left errs -> print errs
testState :: TcState
testState = TcState {
actors = Map.empty,
lockMods = noMods,
exnS = Map.empty
}
testEnv :: TcEnv
testEnv = TcEnv {
typemap = testTypes,
vars = Map.fromList
[(nam "x", vti intT top)],
lockstate = [],
returnI = (intT, bottom),
exnsE = Map.empty,
branchPCE = (Map.empty, bottom)
}
vti t p = VTI t p False False
testTypes :: TypeMap
testTypes = TypeMap {
this = clsType (Ident "This"),
fields = Map.empty,
methods = Map.fromList
[((nam "foo", [intT]), fooInfo)],
constrs = Map.fromList
[((TcClassT [(Ident "Foo", [])], []), cInfo1),
((TcClassT [(Ident "Foo", [])], [intT]), cInfo2)],
locks = Map.fromList [(nam "L", lInfo)],
policies = Map.empty,
typemethods = Map.empty,
types = Map.empty
}
fooInfo :: MTypeInfo
fooInfo = MTI {
mRetType = intT,
mRetPol = bottom,
mPars = [bottom],
mWrites = top,
mExpects = [],
mLMods = noMods,
mExns = []
}
cInfo1, cInfo2 :: CTypeInfo
cInfo1 = CTI {
cPars = [],
cWrites = top,
cExpects = [],
cLMods = noMods,
cExns = []
}
cInfo2 = cInfo1 { cPars = [bottom] }
lInfo :: LTypeInfo
lInfo = LTI { arity = 1, lockPol = top }
nam :: String -> Name
nam str = Name [Ident str] | bvdelft/parac2 | src/Language/Java/Paragon/TypeCheck/Test.hs | bsd-3-clause | 2,351 | 0 | 14 | 824 | 683 | 410 | 273 | 67 | 2 |
-- | Visibility on the 2D plane.
-- Uses an instance of Warnocks algorithm.
-- TODO: animate the line segments, make them spin and move around so we can see
-- that it's a dynamic visiblity algorithm -- not pre-computed.
-- Draw lines in random shades of color depending on the index.
-- Make a key to swap between rectangular and polar projections.
-- Allow viewpoint to be set with the mouse.
import Interface
import Draw
import State
import World
import Graphics.Gloss.Game
main :: IO ()
main
= do world <- initialWorld
let state = initialState world
gameInWindow
"Visibility"
(1000, 1000)
(10, 10)
black
100
state
drawState
handleInput
stepState
| mainland/dph | dph-examples/broken/Visibility/Main.hs | bsd-3-clause | 723 | 29 | 7 | 181 | 118 | 69 | 49 | 19 | 1 |
module Control.Monad.Lazy
( mapM'
, forM'
-- , mapM_'
, sequence'
) where
import System.IO.Unsafe (unsafeInterleaveIO)
sequence' (mx:xs) = unsafeInterleaveIO $
combine xs =<< mx
where combine xs x = return . (x:) =<< sequence' xs
sequence' [] = return []
mapM' f (x:xs) = unsafeInterleaveIO $ do
y <- f x
ys <- mapM' f xs
return (y : ys)
mapM' _ [] = return []
forM' = flip mapM'
-- mapM_' f (x:xs) = unsafeInterleaveIO $ do
-- y <- f x
-- mapM_' f xs
-- mapM_' _ [] = return ()
| kawu/nerf-misc | src/Control/Monad/Lazy.hs | bsd-3-clause | 512 | 0 | 10 | 128 | 183 | 96 | 87 | 15 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/IP/Op.hs" #-}
module Data.IP.Op where
import Data.Bits
import Data.IP.Addr
import Data.IP.Mask
import Data.IP.Range
----------------------------------------------------------------
{-|
>>> toIPv4 [127,0,2,1] `masked` intToMask 7
126.0.0.0
-}
class Eq a => Addr a where
{-|
The 'masked' function takes an 'Addr' and a contiguous
mask and returned a masked 'Addr'.
-}
masked :: a -> a -> a
{-|
The 'intToMask' function takes an 'Int' representing the number of bits to
be set in the returned contiguous mask. When this integer is positive the
bits will be starting from the MSB and from the LSB otherwise.
>>> intToMask 16 :: IPv4
255.255.0.0
>>> intToMask (-16) :: IPv4
0.0.255.255
>>> intToMask 16 :: IPv6
ffff::
>>> intToMask (-16) :: IPv6
::ffff
-}
intToMask :: Int -> a
instance Addr IPv4 where
masked = maskedIPv4
intToMask = maskIPv4
instance Addr IPv6 where
masked = maskedIPv6
intToMask = maskIPv6
----------------------------------------------------------------
{-|
The >:> operator takes two 'AddrRange'. It returns 'True' if
the first 'AddrRange' contains the second 'AddrRange'. Otherwise,
it returns 'False'.
>>> makeAddrRange ("127.0.2.1" :: IPv4) 8 >:> makeAddrRange "127.0.2.1" 24
True
>>> makeAddrRange ("127.0.2.1" :: IPv4) 24 >:> makeAddrRange "127.0.2.1" 8
False
>>> makeAddrRange ("2001:DB8::1" :: IPv6) 16 >:> makeAddrRange "2001:DB8::1" 32
True
>>> makeAddrRange ("2001:DB8::1" :: IPv6) 32 >:> makeAddrRange "2001:DB8::1" 16
False
-}
(>:>) :: Addr a => AddrRange a -> AddrRange a -> Bool
a >:> b = mlen a <= mlen b && (addr b `masked` mask a) == addr a
{-|
The 'toMatchedTo' function take an 'Addr' address and an 'AddrRange',
and returns 'True' if the range contains the address.
>>> ("127.0.2.0" :: IPv4) `isMatchedTo` makeAddrRange "127.0.2.1" 24
True
>>> ("127.0.2.0" :: IPv4) `isMatchedTo` makeAddrRange "127.0.2.1" 32
False
>>> ("2001:DB8::1" :: IPv6) `isMatchedTo` makeAddrRange "2001:DB8::1" 32
True
>>> ("2001:DB8::" :: IPv6) `isMatchedTo` makeAddrRange "2001:DB8::1" 128
False
-}
isMatchedTo :: Addr a => a -> AddrRange a -> Bool
isMatchedTo a r = a `masked` mask r == addr r
{-|
The 'makeAddrRange' functions takes an 'Addr' address and a mask
length. It creates a bit mask from the mask length and masks
the 'Addr' address, then returns 'AddrRange' made of them.
>>> makeAddrRange (toIPv4 [127,0,2,1]) 8
127.0.0.0/8
>>> makeAddrRange (toIPv6 [0x2001,0xDB8,0,0,0,0,0,1]) 8
2000::/8
-}
makeAddrRange :: Addr a => a -> Int -> AddrRange a
makeAddrRange ad len = AddrRange adr msk len
where
msk = intToMask len
adr = ad `masked` msk
-- | Convert IPv4 range to IPV4-embedded-in-IPV6 range
ipv4RangeToIPv6 :: AddrRange IPv4 -> AddrRange IPv6
ipv4RangeToIPv6 range =
makeAddrRange (toIPv6 [0,0,0,0,0,0xffff, (i1 `shift` 8) .|. i2, (i3 `shift` 8) .|. i4]) (masklen + 96)
where
(ip, masklen) = addrRangePair range
[i1,i2,i3,i4] = fromIPv4 ip
{-|
The 'unmakeAddrRange' functions take a 'AddrRange' and
returns the network address and a mask length.
>>> addrRangePair ("127.0.0.0/8" :: AddrRange IPv4)
(127.0.0.0,8)
>>> addrRangePair ("2000::/8" :: AddrRange IPv6)
(2000::,8)
-}
addrRangePair :: Addr a => AddrRange a -> (a, Int)
addrRangePair (AddrRange adr _ len) = (adr, len)
| phischu/fragnix | tests/packages/scotty/Data.IP.Op.hs | bsd-3-clause | 3,438 | 0 | 11 | 684 | 479 | 261 | 218 | 31 | 1 |
-- |
-- Module : System.Mesos.Executor
-- Copyright : (c) Ian Duncan 2014
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : non-portable
--
-- Mesos executor interface and executor driver. An executor is
-- responsible for launching tasks in a framework specific way (i.e.,
-- creating new threads, new processes, etc). One or more executors
-- from the same framework may run concurrently on the same
-- machine. Note that we use the term "executor" fairly loosely to
-- refer to the code that implements an instance of the 'ToExecutor' type class (see
-- below) as well as the program that is responsible for instantiating
-- a new 'ExecutorDriver' (also below).
--
-- In fact, while a Mesos
-- slave is responsible for (forking and) executing the "executor",
-- there is no reason why whatever the slave executed might itself
-- actually execute another program which actually instantiates and
-- runs the 'SchedulerDriver'. The only contract with the slave is
-- that the program that it invokes does not exit until the "executor"
-- has completed. Thus, what the slave executes may be nothing more
-- than a script which actually executes (or forks and waits) the
-- "real" executor.
module System.Mesos.Executor (
-- * Implementing an executor
ToExecutor(..),
-- * Creating an executor
ExecutorDriver,
withExecutorDriver,
-- * Interacting with Mesos
start,
stop,
abort,
await,
run,
sendStatusUpdate,
sendFrameworkMessage,
-- * Primitive executor management
Executor,
createExecutor,
destroyExecutor,
withExecutor,
createDriver,
destroyDriver
) where
import Control.Monad.Managed
import Data.ByteString (ByteString, packCStringLen)
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Foreign.C
import Foreign.Marshal.Safe hiding (with)
import Foreign.Ptr
import Foreign.Storable
import System.Mesos.Internal
import System.Mesos.Raw
import System.Mesos.Raw.Executor
import System.Mesos.Types
withExecutor :: ToExecutor a => a -> (Executor -> IO b) -> IO b
withExecutor e f = do
executor <- createExecutor e
result <- f executor
destroyExecutor executor
return result
withExecutorDriver :: ToExecutor a => a -> (ExecutorDriver -> IO b) -> IO b
withExecutorDriver e f = withExecutor e $ \executor -> do
driver <- createDriver executor
result <- f driver
destroyDriver driver
return result
-- | Callback interface to be implemented by frameworks' executors. Note
-- that only one callback will be invoked at a time, so it is not
-- recommended that you block within a callback because it may cause a
-- deadlock.
class ToExecutor a where
-- | Invoked once the executor driver has been able to successfully
-- connect with Mesos.
registered :: a -> ExecutorDriver -> ExecutorInfo -> FrameworkInfo -> SlaveInfo -> IO ()
registered _ _ _ _ _ = return ()
-- | Invoked when the executor re-registers with a restarted slave.
reRegistered :: a -> ExecutorDriver -> SlaveInfo -> IO ()
reRegistered _ _ _ = return ()
-- | Invoked when the executor becomes "disconnected" from the slave
-- (e.g., the slave is being restarted due to an upgrade).
disconnected :: a -> ExecutorDriver -> IO ()
disconnected _ _ = return ()
-- | Invoked when a task has been launched on this executor (initiated
-- via 'launchTasks'). Note that this task can be realized
-- with a thread, a process, or some simple computation, however, no
-- other callbacks will be invoked on this executor until this
-- callback has returned.
launchTask :: a -> ExecutorDriver -> TaskInfo -> IO ()
launchTask _ _ _ = return ()
-- | Invoked when a task running within this executor has been killed
-- (via 'killTask'). Note that no status update will
-- be sent on behalf of the executor, the executor is responsible
-- for creating a new 'TaskStatus' (i.e., with 'Killed') and
-- invoking 'sendStatusUpdate'.
taskKilled :: a -> ExecutorDriver -> TaskID -> IO ()
taskKilled _ _ _ = return ()
-- | Invoked when a framework message has arrived for this
-- executor. These messages are best effort; do not expect a
-- framework message to be retransmitted in any reliable fashion.
frameworkMessage :: a -> ExecutorDriver -> ByteString -> IO ()
frameworkMessage _ _ _ = return ()
-- | Invoked when the executor should terminate all of its currently
-- running tasks. Note that after a Mesos has determined that an
-- executor has terminated any tasks that the executor did not send
-- terminal status updates for (e.g., 'Killed', 'Finished',
-- 'Failed', etc.) a 'Lost' status update will be created.
shutdown :: a -> ExecutorDriver -> IO ()
shutdown _ _ = return ()
-- | Invoked when a fatal error has occured with the executor and/or
-- executor driver. The driver will be aborted *before* invoking this
-- callback.
errorMessage :: a
-> ExecutorDriver
-> ByteString -- ^ error message
-> IO ()
errorMessage _ _ _ = return ()
createExecutor :: ToExecutor a => a -> IO Executor
createExecutor c = do
registeredFun <- wrapExecutorRegistered $ \edp eip fip sip -> runManaged $ do
ei <- unmarshal eip
fi <- unmarshal fip
si <- unmarshal sip
liftIO $ registered c (ExecutorDriver edp) ei fi si
reRegisteredFun <- wrapExecutorReRegistered $ \edp sip -> runManaged $ do
si <- unmarshal sip
liftIO $ reRegistered c (ExecutorDriver edp) si
disconnectedFun <- wrapExecutorDisconnected $ \edp -> disconnected c (ExecutorDriver edp)
launchTaskFun <- wrapExecutorLaunchTask $ \edp tip -> runManaged $ do
ti <- unmarshal tip
liftIO $ launchTask c (ExecutorDriver edp) ti
taskKilledFun <- wrapExecutorTaskKilled $ \edp tip -> runManaged $ do
ti <- unmarshal tip
liftIO $ taskKilled c (ExecutorDriver edp) ti
frameworkMessageFun <- wrapExecutorFrameworkMessage $ \edp mcp mlp -> do
bs <- packCStringLen (mcp, fromIntegral mlp)
frameworkMessage c (ExecutorDriver edp) bs
shutdownFun <- wrapExecutorShutdown $ \edp -> shutdown c (ExecutorDriver edp)
errorCallback <- wrapExecutorError $ \edp mcp mlp -> do
bs <- packCStringLen (mcp, fromIntegral mlp)
errorMessage c (ExecutorDriver edp) bs
e <- c_createExecutor registeredFun reRegisteredFun disconnectedFun launchTaskFun taskKilledFun frameworkMessageFun shutdownFun errorCallback
return $ Executor e registeredFun reRegisteredFun disconnectedFun launchTaskFun taskKilledFun frameworkMessageFun shutdownFun errorCallback
destroyExecutor :: Executor -> IO ()
destroyExecutor e = do
c_destroyExecutor $ executorImpl e
freeHaskellFunPtr $ rawExecutorRegistered e
freeHaskellFunPtr $ rawExecutorReRegistered e
freeHaskellFunPtr $ rawExecutorDisconnected e
freeHaskellFunPtr $ rawExecutorLaunchTask e
freeHaskellFunPtr $ rawExecutorTaskKilled e
freeHaskellFunPtr $ rawExecutorFrameworkMessage e
freeHaskellFunPtr $ rawExecutorShutdown e
freeHaskellFunPtr $ rawExecutorErrorCallback e
createDriver :: Executor -> IO ExecutorDriver
createDriver = fmap ExecutorDriver . c_createExecutorDriver . executorImpl
destroyDriver :: ExecutorDriver -> IO ()
destroyDriver = c_destroyExecutorDriver . fromExecutorDriver
-- | Starts the executor driver. This needs to be called before any
-- other driver calls are made.
start :: ExecutorDriver -> IO Status
start = fmap toStatus . c_startExecutorDriver . fromExecutorDriver
-- | Stops the 'ExecutorDriver'.
stop :: ExecutorDriver -> IO Status
stop = fmap toStatus . c_stopExecutorDriver . fromExecutorDriver
-- | Aborts the driver so that no more callbacks can be made to the
-- executor. The semantics of abort and stop have deliberately been
-- separated so that code can detect an aborted driver (i.e., via
-- the return status of @abort@, see below), and
-- instantiate and start another driver if desired (from within the
-- same process ... although this functionality is currently not
-- supported for executors).
abort :: ExecutorDriver -> IO Status
abort = fmap toStatus . c_abortExecutorDriver . fromExecutorDriver
-- | Waits for the driver to be stopped or aborted, possibly
-- *blocking* the current thread indefinitely. The return status of
-- this function can be used to determine if the driver was aborted
-- (see mesos.proto for a description of Status).
await :: ExecutorDriver -> IO Status
await = fmap toStatus . c_joinExecutorDriver . fromExecutorDriver
-- | 'start's and immediately @await@s (i.e., blocks on) the driver.
run :: ExecutorDriver -> IO Status
run = fmap toStatus . c_runExecutorDriver . fromExecutorDriver
-- | Sends a status update to the framework scheduler, retrying as
-- necessary until an acknowledgement has been received or the
-- executor is terminated (in which case, a 'Lost' status update
-- will be sent). See 'System.Mesos.Scheduler.statusUpdate' for more information
-- about status update acknowledgements.
sendStatusUpdate :: ExecutorDriver -> TaskStatus -> IO Status
sendStatusUpdate (ExecutorDriver d) s = with (cppValue s) $ \sp -> do
result <- c_sendExecutorDriverStatusUpdate d sp
return $ toStatus result
-- | Sends a message to the framework scheduler. These messages are
-- best effort; do not expect a framework message to be
-- retransmitted in any reliable fashion.
sendFrameworkMessage :: ExecutorDriver
-> ByteString -- ^ message
-> IO Status
sendFrameworkMessage (ExecutorDriver d) s = with (cstring s) $ \(sp, sl) -> do
result <- c_sendExecutorDriverFrameworkMessage d sp (fromIntegral sl)
return $ toStatus result
| jhedev/hs-mesos | src/System/Mesos/Executor.hs | mit | 9,755 | 0 | 16 | 1,908 | 1,650 | 849 | 801 | 121 | 1 |
-- GSoC 2013 - Communicating with mobile devices.
{-# LANGUAGE FlexibleContexts #-}
-- | This Module define the main data types for sending Push Notifications through Apple Push Notification Service.
module Network.PushNotify.Apns.Types
( -- * APNS Settings
APNSConfig(..)
, APNSManager(..)
, DeviceToken
, Env(..)
-- * APNS Messages
, APNSmessage(..)
, AlertDictionary(..)
-- * APNS Results
, APNSresult(..)
, APNSFeedBackresult(..)
) where
import Network.PushNotify.Apns.Constants
import Network.TLS (PrivateKey)
import Control.Concurrent
import Control.Concurrent.STM.TChan
import Control.Monad.Writer
import Control.Retry
import Data.Aeson.Types
import Data.Certificate.X509 (X509)
import Data.Default
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.IORef
import Data.Text
import Data.Time.Clock
-- | 'Env' represents the three possible working environments. This determines the url and port to connect to.
data Env = Development -- ^ Development environment (by Apple).
| Production -- ^ Production environment (by Apple).
| Local -- ^ Local environment, just to test the service in the \"localhost\".
deriving Show
-- | 'APNSConfig' represents the main necessary information for sending notifications through APNS.
--
-- For loading the certificate and privateKey you can use: 'Network.TLS.Extra.fileReadCertificate' and 'Network.TLS.Extra.fileReadPrivateKey' .
data APNSConfig = APNSConfig
{ apnsCertificate :: X509 -- ^ Certificate provided by Apple.
, apnsPrivateKey :: PrivateKey -- ^ Private key provided by Apple.
, environment :: Env -- ^ One of the possible environments.
, timeoutLimit :: Int -- ^ The time to wait for a server response. (microseconds)
, apnsRetrySettings :: RetrySettings -- ^ How to retry to connect to APNS servers.
}
instance Default APNSConfig where
def = APNSConfig {
apnsCertificate = undefined
, apnsPrivateKey = undefined
, environment = Development
, timeoutLimit = 200000
, apnsRetrySettings = RetrySettings {
backoff = True
, baseDelay = 200
, numRetries = limitedRetries 5
}
}
data APNSManager = APNSManager
{ mState :: IORef (Maybe ())
, mApnsChannel :: TChan ( MVar (Maybe (Chan Int,Int)) , APNSmessage)
, mWorkerID :: ThreadId
, mTimeoutLimit :: Int
}
-- | Binary token stored in hexadecimal representation as text.
type DeviceToken = Text
-- | 'APNSmessage' represents a message to be sent through APNS.
data APNSmessage = APNSmessage
{ deviceTokens :: HS.HashSet DeviceToken -- ^ Destination.
, expiry :: Maybe UTCTime -- ^ Identifies when the notification is no longer valid and can be discarded.
, alert :: Either Text AlertDictionary -- ^ For the system to displays a standard alert.
, badge :: Maybe Int -- ^ Number to display as the badge of the application icon.
, sound :: Text -- ^ The name of a sound file in the application bundle.
, rest :: Maybe Object -- ^ Extra information.
} deriving Show
instance Default APNSmessage where
def = APNSmessage {
deviceTokens = HS.empty
, expiry = Nothing
, alert = Left empty
, badge = Nothing
, sound = empty
, rest = Nothing
}
-- | 'AlertDictionary' represents the possible dictionary in the 'alert' label.
data AlertDictionary = AlertDictionary
{ body :: Text
, action_loc_key :: Text
, loc_key :: Text
, loc_args :: [Text]
, launch_image :: Text
} deriving Show
instance Default AlertDictionary where
def = AlertDictionary{
body = empty
, action_loc_key = empty
, loc_key = empty
, loc_args = []
, launch_image = empty
}
-- | 'APNSresult' represents information about messages after a communication with APNS Servers.
data APNSresult = APNSresult
{ successfulTokens :: HS.HashSet DeviceToken
, toReSendTokens :: HS.HashSet DeviceToken -- ^ Failed tokens that you need to resend the message to,
-- because there was a problem.
} deriving Show
instance Default APNSresult where
def = APNSresult HS.empty HS.empty
-- | 'APNSFeedBackresult' represents information after connecting with the Feedback service.
data APNSFeedBackresult = APNSFeedBackresult
{ unRegisteredTokens :: HM.HashMap DeviceToken UTCTime -- ^ Devices tokens and time indicating when APNS determined
-- that the application no longer exists on the device.
} deriving Show
instance Default APNSFeedBackresult where
def = APNSFeedBackresult HM.empty
ifNotDef :: (ToJSON a,MonadWriter [Pair] m,Eq a,Default b)
=> Text
-> (b -> a)
-> b
-> m ()
ifNotDef label f msg = if f def /= f msg
then tell [(label .= (f msg))]
else tell []
instance ToJSON APNSmessage where
toJSON msg = case rest msg of
Nothing -> object [(cAPPS .= toJSONapps msg)]
Just (map) -> Object $ HM.insert cAPPS (toJSONapps msg) map
toJSONapps msg = object $ execWriter $ do
case alert msg of
Left xs -> if xs == empty
then tell []
else tell [(cALERT .= xs)]
Right m -> tell [(cALERT .= (toJSON m))]
ifNotDef cBADGE badge msg
ifNotDef cSOUND sound msg
instance ToJSON AlertDictionary where
toJSON msg = object $ execWriter $ do
ifNotDef cBODY body msg
ifNotDef cACTION_LOC_KEY action_loc_key msg
ifNotDef cLOC_KEY loc_key msg
if loc_key def /= loc_key msg
then ifNotDef cLOC_ARGS loc_args msg
else tell []
ifNotDef cLAUNCH_IMAGE launch_image msg
| MarcosPividori/GSoC-Communicating-with-mobile-devices | push-notify/Network/PushNotify/Apns/Types.hs | mit | 6,801 | 0 | 16 | 2,475 | 1,105 | 637 | 468 | 121 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Tests for lock allocation.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Locking.Allocation
( testLocking_Allocation
, TestLock
, TestOwner
, requestSucceeded
) where
import Control.Applicative
import qualified Data.Foldable as F
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Text.JSON as J
import Test.QuickCheck
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Ganeti.BasicTypes
import Ganeti.Locking.Allocation
import Ganeti.Locking.Types
{-
Ganeti.Locking.Allocation is polymorphic in the types of locks
and lock owners. So we can use much simpler types here than Ganeti's
real locks and lock owners, knowning that polymorphic functions cannot
exploit the simplicity of the types they're deling with.
-}
data TestOwner = TestOwner Int deriving (Ord, Eq, Show)
instance Arbitrary TestOwner where
arbitrary = TestOwner <$> choose (0, 2)
data TestLock = TestBigLock
| TestCollectionLockA
| TestLockA Int
| TestCollectionLockB
| TestLockB Int
deriving (Ord, Eq, Show, Read)
instance Arbitrary TestLock where
arbitrary = frequency [ (1, elements [ TestBigLock
, TestCollectionLockA
, TestCollectionLockB
])
, (2, TestLockA <$> choose (0, 2))
, (2, TestLockB <$> choose (0, 2))
]
instance Lock TestLock where
lockImplications (TestLockA _) = [TestCollectionLockA, TestBigLock]
lockImplications (TestLockB _) = [TestCollectionLockB, TestBigLock]
lockImplications TestBigLock = []
lockImplications _ = [TestBigLock]
{-
All states of a LockAllocation ever available outside the
Ganeti.Locking.Allocation module must be constructed by starting
with emptyAllocation and applying the exported functions.
-}
instance Arbitrary OwnerState where
arbitrary = elements [OwnShared, OwnExclusive]
instance Arbitrary a => Arbitrary (LockRequest a) where
arbitrary = LockRequest <$> arbitrary <*> genMaybe arbitrary
data UpdateRequest b a = UpdateRequest b [LockRequest a]
| FreeLockRequest b
deriving Show
instance (Arbitrary a, Arbitrary b) => Arbitrary (UpdateRequest a b) where
arbitrary =
frequency [ (4, UpdateRequest <$> arbitrary <*> (choose (1, 4) >>= vector))
, (1, FreeLockRequest <$> arbitrary)
]
-- | Transform an UpdateRequest into the corresponding state transformer.
asAllocTrans :: (Lock a, Ord b, Show b)
=> LockAllocation a b -> UpdateRequest b a -> LockAllocation a b
asAllocTrans state (UpdateRequest owner updates) =
fst $ updateLocks owner updates state
asAllocTrans state (FreeLockRequest owner) = freeLocks state owner
-- | Fold a sequence of requests to transform a lock allocation onto the empty
-- allocation. As we consider all exported LockAllocation transformers, any
-- LockAllocation definable is obtained in this way.
foldUpdates :: (Lock a, Ord b, Show b)
=> [UpdateRequest b a] -> LockAllocation a b
foldUpdates = foldl asAllocTrans emptyAllocation
instance (Arbitrary a, Lock a, Arbitrary b, Ord b, Show b)
=> Arbitrary (LockAllocation a b) where
arbitrary = foldUpdates <$> (choose (0, 8) >>= vector)
-- | Basic property of locking: the exclusive locks of one user
-- are disjoint from any locks of any other user.
prop_LocksDisjoint :: Property
prop_LocksDisjoint =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in counterexample
(show a ++ "'s exclusive lock" ++ " is not respected by " ++ show b)
(S.null $ S.intersection aExclusive bAll)
-- | Verify that the list of active locks indeed contains all locks that
-- are owned by someone.
prop_LockslistComplete :: Property
prop_LockslistComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
counterexample "All owned locks must be mentioned in the all-locks list" $
let allLocks = listAllLocks state in
all (`elem` allLocks) (M.keys $ listLocks a state)
-- | Verify that the list of all locks with states is contained in the list
-- of all locks.
prop_LocksAllOwnersSubsetLockslist :: Property
prop_LocksAllOwnersSubsetLockslist =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
counterexample "The list of all active locks must contain all locks mentioned\
\ in the locks state" $
S.isSubsetOf (S.fromList . map fst $ listAllLocksOwners state)
(S.fromList $ listAllLocks state)
-- | Verify that all locks of all owners are mentioned in the list of all locks'
-- owner's state.
prop_LocksAllOwnersComplete :: Property
prop_LocksAllOwnersComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
counterexample "Owned locks must be mentioned in list of all locks' state" $
let allLocksState = listAllLocksOwners state
in flip all (M.toList $ listLocks a state) $ \(lock, ownership) ->
elem (a, ownership) . fromMaybe [] $ lookup lock allLocksState
-- | Verify that all lock owners mentioned in the list of all locks' owner's
-- state actually own their lock.
prop_LocksAllOwnersSound :: Property
prop_LocksAllOwnersSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . listAllLocksOwners)) $ \state ->
counterexample "All locks mentioned in listAllLocksOwners must be owned by\
\ the mentioned owner" .
flip all (listAllLocksOwners state) $ \(lock, owners) ->
flip all owners $ \(owner, ownership) -> holdsLock owner lock ownership state
-- | Verify that exclusive group locks are honored, i.e., verify that if someone
-- holds a lock, then no one else can hold a lock on an exclusive lock on an
-- implied lock.
prop_LockImplicationX :: Property
prop_LockImplicationX =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let bExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks b state
in counterexample "Others cannot have an exclusive lock on an implied lock" .
flip all (M.keys $ listLocks a state) $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bExclusive
-- | Verify that shared group locks are honored, i.e., verify that if someone
-- holds an exclusive lock, then no one else can hold any form on lock on an
-- implied lock.
prop_LockImplicationS :: Property
prop_LockImplicationS =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keys . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in counterexample "Others cannot hold locks implied by an exclusive lock" .
flip all aExclusive $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bAll
-- | Verify that locks can only be modified by updates of the owner.
prop_LocksStable :: Property
prop_LocksStable =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', _) = updateLocks b request state
in (listLocks a state ==? listLocks a state')
-- | Verify that a given request is statisfied in list of owned locks
requestSucceeded :: Ord a => M.Map a OwnerState -> LockRequest a -> Bool
requestSucceeded owned (LockRequest lock status) = M.lookup lock owned == status
-- | Verify that lock updates are atomic, i.e., either we get all the required
-- locks, or the state is completely unchanged.
prop_LockupdateAtomic :: Property
prop_LockupdateAtomic =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', result) = updateLocks a request state
in if result == Ok S.empty
then counterexample
("Update succeeded, but in final state " ++ show state'
++ "not all locks are as requested")
$ let owned = listLocks a state'
in all (requestSucceeded owned) request
else counterexample
("Update failed, but state changed to " ++ show state')
(state == state')
-- | Verify that releasing a lock always succeeds.
prop_LockReleaseSucceeds :: Property
prop_LockReleaseSucceeds =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
let (_, result) = updateLocks a [requestRelease lock] state
in counterexample
("Releasing a lock has to suceed uncondiationally, but got "
++ show result)
(isOk result)
-- | Verify the property that only the blocking owners prevent
-- lock allocation. We deliberatly go for the expensive variant
-- restraining by suchThat, as otherwise the number of cases actually
-- covered is too small.
prop_BlockSufficient :: Property
prop_BlockSufficient =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (elements [ [requestShared lock]
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) (not . S.null)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockedOn = genericResult (const S.empty) id result
in counterexample "After all blockers release, a request must succeed"
. isOk . snd . updateLocks a request $ F.foldl freeLocks state blockedOn
-- | Verify the property that every blocking owner is necessary, i.e., even
-- if we only keep the locks of one of the blocking owners, the request still
-- will be blocked. We deliberatly use the expensive variant of restraining
-- to ensure good coverage. To make sure the request can always be blocked
-- by two owners, for a shared request we request two different locks.
prop_BlockNecessary :: Property
prop_BlockNecessary =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (arbitrary `suchThat` (/= lock)) $ \lock' ->
forAll (elements [ [requestShared lock, requestShared lock']
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) ((>= 2) . S.size)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockers = genericResult (const S.empty) id result
in counterexample "Each blocker alone must block the request"
. flip all (S.elems blockers) $ \blocker ->
(==) (Ok $ S.singleton blocker) . snd . updateLocks a request
. F.foldl freeLocks state
$ S.filter (/= blocker) blockers
instance J.JSON TestOwner where
showJSON (TestOwner x) = J.showJSON x
readJSON = (>>= return . TestOwner) . J.readJSON
instance J.JSON TestLock where
showJSON = J.showJSON . show
readJSON = (>>= return . read) . J.readJSON
-- | Verify that for LockAllocation we have readJSON . showJSON = Ok.
prop_ReadShow :: Property
prop_ReadShow =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
-- | Verify that the list of lock owners is complete.
prop_OwnerComplete :: Property
prop_OwnerComplete =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
foldl freeLocks state (lockOwners state) ==? emptyAllocation
-- | Verify that each owner actually owns a lock.
prop_OwnerSound :: Property
prop_OwnerSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . lockOwners)) $ \state ->
counterexample "All subjects listed as owners must own at least one lock"
. flip all (lockOwners state) $ \owner ->
not . M.null $ listLocks owner state
-- | Verify that for LockRequest we have readJSON . showJSON = Ok.
prop_ReadShowRequest :: Property
prop_ReadShowRequest =
forAll (arbitrary :: Gen (LockRequest TestLock)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
testSuite "Locking/Allocation"
[ 'prop_LocksDisjoint
, 'prop_LockslistComplete
, 'prop_LocksAllOwnersSubsetLockslist
, 'prop_LocksAllOwnersComplete
, 'prop_LocksAllOwnersSound
, 'prop_LockImplicationX
, 'prop_LockImplicationS
, 'prop_LocksStable
, 'prop_LockupdateAtomic
, 'prop_LockReleaseSucceeds
, 'prop_BlockSufficient
, 'prop_BlockNecessary
, 'prop_ReadShow
, 'prop_OwnerComplete
, 'prop_OwnerSound
, 'prop_ReadShowRequest
]
| apyrgio/ganeti | test/hs/Test/Ganeti/Locking/Allocation.hs | bsd-2-clause | 14,952 | 0 | 27 | 3,127 | 3,390 | 1,815 | 1,575 | 231 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>SAML Support</title>
<maps>
<homeID>saml</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/saml/src/main/javahelp/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 958 | 82 | 52 | 156 | 390 | 206 | 184 | -1 | -1 |
module Network.Transport.Test where
import qualified Network.Transport as NT
import Data.Typeable (Typeable)
-- | Extra operations required of transports for the purposes of testing.
data TestTransport = TestTransport
{ -- | The transport to use for testing.
testTransport :: NT.Transport
-- | IO action to perform to simulate losing a connection.
, testBreakConnection :: NT.EndPointAddress -> NT.EndPointAddress -> IO ()
} deriving (Typeable)
| jeremyjh/distributed-process-lifted | test/Network/Transport/Test.hs | bsd-3-clause | 461 | 0 | 12 | 78 | 75 | 46 | 29 | 7 | 0 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExtendedDefaultRules #-}
module T12797 where
import Prelude
import Control.Monad.IO.Class
import Data.Kind (Type)
type family FuncArg (m :: (Type -> Type)) :: Maybe Type
test2 :: (MonadIO m, FuncArg m ~ 'Nothing) => m ()
test2 = liftIO $ print 6
| sdiehl/ghc | testsuite/tests/typecheck/should_compile/T12797.hs | bsd-3-clause | 395 | 0 | 8 | 90 | 97 | 57 | 40 | 11 | 1 |
{-# LANGUAGE BangPatterns, CPP, ForeignFunctionInterface, MagicHash, Rank2Types,
RecordWildCards, UnboxedTuples, UnliftedFFITypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-@ LIQUID "--c-files=../../cbits/cbits.c" @-}
-- |
-- Module : Data.Text.Array
-- Copyright : (c) 2009, 2010, 2011 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected], [email protected],
-- [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Packed, unboxed, heap-resident arrays. Suitable for performance
-- critical use, both in terms of large data quantities and high
-- speed.
--
-- This module is intended to be imported @qualified@, to avoid name
-- clashes with "Prelude" functions, e.g.
--
-- > import qualified Data.Text.Array as A
--
-- The names in this module resemble those in the 'Data.Array' family
-- of modules, but are shorter due to the assumption of qualifid
-- naming.
module Data.Text.Array
(
-- * Types
--LIQUID added Array dataCon export
Array(..)
--LIQUID , MArray(maBA)
, MArray(..)
-- * Functions
, copyM
, copyI
, empty
, equal
--LIQUID #if defined(ASSERTS)
, length
--LIQUID #endif
, run
, run2
, toList
, unsafeFreeze
, unsafeIndex
, new
, unsafeWrite
--LIQUID
, unsafeIndexF
, unsafeIndexB
) where
#if defined(ASSERTS)
-- This fugly hack is brought by GHC's apparent reluctance to deal
-- with MagicHash and UnboxedTuples when inferring types. Eek!
# define CHECK_BOUNDS(_func_,_len_,_k_) \
if (_k_) < 0 || (_k_) >= (_len_) then error ("Data.Text.Array." ++ (_func_) ++ ": bounds error, offset " ++ show (_k_) ++ ", length " ++ show (_len_)) else
#else
# define CHECK_BOUNDS(_func_,_len_,_k_)
#endif
#include "MachDeps.h"
--LIQUID #if defined(ASSERTS)
import Control.Exception (assert)
--LIQUID #endif
#if __GLASGOW_HASKELL__ >= 702
import Control.Monad.ST.Unsafe (unsafeIOToST)
#else
import Control.Monad.ST (unsafeIOToST)
#endif
import Data.Bits ((.&.), xor)
import Data.Text.Unsafe.Base (inlinePerformIO)
import Data.Text.UnsafeShift (shiftL, shiftR)
#if __GLASGOW_HASKELL__ >= 703
import Foreign.C.Types (CInt(CInt), CSize(CSize))
#else
import Foreign.C.Types (CInt, CSize)
#endif
import GHC.Base (ByteArray#, MutableByteArray#, Int(..),
indexWord16Array#, newByteArray#,
unsafeCoerce#, writeWord16Array#)
import GHC.ST (ST(..), runST)
import GHC.Word (Word16(..))
import Prelude hiding (length, read)
--LIQUID
import Language.Haskell.Liquid.Prelude
{-@ predicate Btwn V X Y = ((X <= V) && (V < Y)) @-}
{-@ predicate BtwnE V X Y = ((X < V) && (V < Y)) @-}
{-@ predicate BtwnI V X Y = ((X <= V) && (V <= Y)) @-}
{-@ predicate BtwnEI V X Y = ((X < V) && (V <= Y)) @-}
{-@ qualif LenDiff(v:List a, i:int, l:int): (len v) = (l - i) @-}
{-@ qualif Diff(v:int, d:int, l:int): v = l - d @-}
-- | Immutable array type.
data Array = Array {
aBA :: ByteArray#
--LIQUID #if defined(ASSERTS)
, aLen :: {-# UNPACK #-} !Int -- length (in units of Word16, not bytes)
--LIQUID #endif
}
{-@ data Array
= Array
(aBA :: ByteArray#)
(aLen :: Nat)
@-}
{-@ measure alen :: Array -> Int
alen (Array aBA aLen) = aLen
@-}
{-@ aLen :: a:Array -> {v:Nat | v = (alen a)} @-}
{-@ type ArrayN N = {v:Array | (alen v) = N} @-}
{-@ type AValidI A = {v:Nat | v < (alen A)} @-}
{-@ type AValidO A = {v:Nat | v <= (alen A)} @-}
{-@ type AValidL O A = {v:Nat | (v+O) <= (alen A)} @-}
{-@ qualif ALen(v:Int, a:Array): v = alen(a) @-}
{-@ qualif ALen(v:Array, i:Int): i = alen(v) @-}
{-@ invariant {v:Array | (alen v) >= 0} @-}
{-@ measure numchars :: Array -> Int -> Int -> Int @-}
-- | Mutable array type, for use in the ST monad.
data MArray s = MArray {
maBA :: MutableByteArray# s
--LIQUID #if defined(ASSERTS)
, maLen :: {-# UNPACK #-} !Int -- length (in units of Word16, not bytes)
--LIQUID #endif
}
{-@ data MArray s = MArray
(maBA :: MutableByteArray# s)
(maLen :: Nat)
@-}
{-@ measure malen :: MArray s -> Int
malen (MArray maBA maLen) = maLen
@-}
{-@ maLen :: ma:(MArray s) -> {v:Nat | v = (malen ma)} @-}
{-@ type MArrayN s N = {v:MArray s | (malen v) = N} @-}
{-@ type MAValidI A = {v:Nat | v < (malen A)} @-}
{-@ type MAValidO A = {v:Nat | v <= (malen A)} @-}
{-@ type MAValidL O A = {v:Nat | (v+O) <= (malen A)} @-}
{-@ qualif MALen(v:Int, a:MArray s): v = malen(a) @-}
{-@ qualif MALen(v:MArray s, i:Int): i = malen(v) @-}
{-@ invariant {v:MArray s | (malen v) >= 0} @-}
{-@ qualif FreezeMArr(v:Array, ma:MArray s):
alen(v) = malen(ma)
@-}
--LIQUID #if defined(ASSERTS)
-- | Operations supported by all arrays.
class IArray a where
-- | Return the length of an array.
length :: a -> Int
instance IArray Array where
length = aLen
{-# INLINE length #-}
instance IArray (MArray s) where
length = maLen
{-# INLINE length #-}
--LIQUID #endif
-- | Create an uninitialized mutable array.
{-@ assume new :: forall s. n:Nat -> ST s (MArrayN s n) @-}
-- TODO: losing information in cast
new :: forall s. Int -> ST s (MArray s)
new n
| n < 0 || n .&. highBit /= 0 = error $ "Data.Text.Array.new: size overflow"
| otherwise = ST $ \s1# ->
case newByteArray# len# s1# of
(# s2#, marr# #) -> (# s2#, MArray marr#
--LIQUID #if defined(ASSERTS)
n
--LIQUID #endif
#)
where !(I# len#) = bytesInArray n
highBit = maxBound `xor` (maxBound `shiftR` 1)
{-# INLINE new #-}
-- | Freeze a mutable array. Do not mutate the 'MArray' afterwards!
{-@ assume unsafeFreeze :: ma:MArray s -> (ST s (ArrayN (malen ma))) @-}
-- TODO: losing information in cast
unsafeFreeze :: MArray s -> ST s Array
unsafeFreeze MArray{..} = ST $ \s# ->
(# s#, Array (unsafeCoerce# maBA)
--LIQUID #if defined(ASSERTS)
maLen
--LIQUID #endif
#)
{-# INLINE unsafeFreeze #-}
-- | Indicate how many bytes would be used for an array of the given
-- size.
bytesInArray :: Int -> Int
bytesInArray n = n `shiftL` 1
{-# INLINE bytesInArray #-}
-- | Unchecked read of an immutable array. May return garbage or
-- crash on an out-of-bounds access.
{-@ unsafeIndex :: a:Array -> AValidI a -> Word16 @-}
unsafeIndex :: Array -> Int -> Word16
unsafeIndex Array{..} i@(I# i#) =
CHECK_BOUNDS("unsafeIndex",aLen,i)
case indexWord16Array# aBA i# of r# -> (W16# r#)
{-# INLINE unsafeIndex #-}
--LIQUID
{-@ predicate SpanChar D A O L I = (((numchars (A) (O) ((I-O)+D)) = (1 + (numchars (A) (O) (I-O))))
&& ((numchars (A) (O) ((I-O)+D)) <= (numchars A O L))
&& (((I-O)+D) <= L))
@-}
{-@ unsafeIndexF :: a:Array -> o:AValidO a -> l:AValidL o a
-> i:{v:Int | (Btwn (v) (o) (o + l))}
-> {v:Word16 | (if (BtwnI v 55296 56319)
then (SpanChar 2 a o l i)
else (SpanChar 1 a o l i))}
@-}
unsafeIndexF :: Array -> Int -> Int -> Int -> Word16
unsafeIndexF a o l i = let x = unsafeIndex a i
in liquidAssume (unsafeIndexFQ x a o l i) x
{-@ unsafeIndexFQ :: x:Word16 -> a:Array -> o:Int -> l:Int -> i:Int
-> {v:Bool | ((Prop v) <=> (if (BtwnI x 55296 56319)
then (SpanChar 2 a o l i)
else (SpanChar 1 a o l i)))}
@-}
unsafeIndexFQ :: Word16 -> Array -> Int -> Int -> Int -> Bool
unsafeIndexFQ = undefined
{-@ unsafeIndexB :: a:Array -> o:AValidO a -> l:AValidL o a
-> i:{v:Int | Btwn v o (o + l)}
-> {v:Word16 | (if (v >= 56320 && v <= 57343)
then ((numchars(a, o, (i - o )+1)
= (1 + numchars(a, o, (i-o)-1)))
&& (numchars(a, o, (i-o-1)) >= 0)
&& (((i-o)-1) >= 0))
else ((numchars(a, o, (i-o)+1)
= (1 + numchars(a, o, i-o)))
&& (numchars(a, o, (i-o)) >= 0)))}
@-}
unsafeIndexB :: Array -> Int -> Int -> Int -> Word16
unsafeIndexB a o l i = let x = unsafeIndex a i
in liquidAssume (unsafeIndexBQ x a o i) x
{-@ unsafeIndexBQ :: x:Word16 -> a:Array -> o:Int -> i:Int
-> {v:Bool | ((Prop v) <=>
(if ((x >= 56320) && (x <= 57343))
then ((numchars(a, o, (i-o)+1)
= (1 + numchars(a, o, (i-o)-1)))
&& (numchars(a, o, (i-o-1)) >= 0)
&& (((i-o)-1) >= 0))
else ((numchars(a, o, (i-o)+1)
= (1 + numchars(a, o, i-o)))
&& (numchars(a, o, (i-o)) >= 0))))}
@-}
unsafeIndexBQ :: Word16 -> Array -> Int -> Int -> Bool
unsafeIndexBQ = undefined
-- | Unchecked write of a mutable array. May return garbage or crash
-- on an out-of-bounds access.
{-@ unsafeWrite :: ma:MArray s -> MAValidI ma -> Word16 -> ST s () @-}
unsafeWrite :: MArray s -> Int -> Word16 -> ST s ()
unsafeWrite MArray{..} i@(I# i#) (W16# e#) = ST $ \s1# ->
CHECK_BOUNDS("unsafeWrite",maLen,i)
case writeWord16Array# maBA i# e# s1# of
s2# -> (# s2#, () #)
{-# INLINE unsafeWrite #-}
-- | Convert an immutable array to a list.
{-@ toList :: a:Array -> o:AValidO a -> l:AValidL o a -> {v:[Word16] | (len v) = l} @-}
toList :: Array -> Int -> Int -> [Word16]
toList ary off len = loop len 0
{- LIQUID WITNESS -}
where loop (d :: Int) i
| i < len = unsafeIndex ary (off+i) : loop (d-1) (i+1)
| otherwise = []
-- | An empty immutable array.
{-@ empty :: {v:Array | (alen v) = 0} @-}
empty :: Array
empty = runST (new 0 >>= unsafeFreeze)
-- | Run an action in the ST monad and return an immutable array of
-- its result.
{-
run :: forall <p :: Int -> Prop>.
(forall s. GHC.ST.ST s (Data.Text.Array.MArray s)<p>)
-> exists[z:Int<p>]. Data.Text.Array.Array<p>
@-}
{- run :: (forall s. GHC.ST.ST s ma:(Data.Text.Array.MArray s))
-> {v:Data.Text.Array.Array | (alen v) = (len ma)}
@-}
run :: (forall s. ST s (MArray s)) -> Array
run k = runST (k >>= unsafeFreeze)
-- | Run an action in the ST monad and return an immutable array of
-- its result paired with whatever else the action returns.
{- run2 :: (forall s. GHC.ST.ST s (ma:Data.Text.Array.MArray s, a:a))
-> ({v:Data.Text.Array.Array | (alen v) = (malen ma)}, {v:a | v = a})
@-}
run2 :: (forall s. ST s (MArray s, a)) -> (Array, a)
run2 k = runST (do
(marr,b) <- k
arr <- unsafeFreeze marr
return (arr,b))
-- | Copy some elements of a mutable array.
{-@ copyM :: dest:MArray s -> didx:MAValidO dest
-> src:MArray s -> sidx:MAValidO src
-> {v:Nat | (((v + didx) <= (malen dest))
&& ((v + sidx) <= (malen src)))}
-> ST s ()
@-}
copyM :: MArray s -- ^ Destination
-> Int -- ^ Destination offset
-> MArray s -- ^ Source
-> Int -- ^ Source offset
-> Int -- ^ Count
-> ST s ()
copyM dest didx src sidx count
| count <= 0 = return ()
| otherwise =
--LIQUID #if defined(ASSERTS)
--LIQUID assert (sidx + count <= length src) .
--LIQUID assert (didx + count <= length dest) .
--LIQUID #endif
liquidAssert (sidx + count <= maLen src) .
liquidAssert (didx + count <= maLen dest) .
unsafeIOToST $ memcpyM (maBA dest) (fromIntegral didx)
(maBA src) (fromIntegral sidx)
(fromIntegral count)
{-# INLINE copyM #-}
-- | Copy some elements of an immutable array.
{-@ copyI :: dest:MArray s -> i0:MAValidO dest
-> src:Array -> j0:AValidO src
-> top:{v:MAValidO dest | ((v-i0)+j0) <= (alen src)}
-> GHC.ST.ST s ()
@-}
copyI :: MArray s -- ^ Destination
-> Int -- ^ Destination offset
-> Array -- ^ Source
-> Int -- ^ Source offset
-> Int -- ^ First offset in destination /not/ to
-- copy (i.e. /not/ length)
-> ST s ()
copyI dest i0 src j0 top
| i0 >= top = return ()
| otherwise = unsafeIOToST $
memcpyI (maBA dest) (fromIntegral i0)
(aBA src) (fromIntegral j0)
(fromIntegral (top-i0))
{-# INLINE copyI #-}
-- | Compare portions of two arrays for equality. No bounds checking
-- is performed.
--LIQUID TODO: this is not correct because we're just comparing sub-arrays
{- equal :: a1:Data.Text.Array.Array
-> o1:{v:Int | ((v >= 0) && (v < (alen a1)))}
-> a2:Data.Text.Array.Array
-> o2:{v:Int | ((v >= 0) && (v < (alen a2)))}
-> cnt:{v:Int | ((v >= 0) && ((v+o1) < (alen a1)) && ((v+o2) < (alen a2)))}
-> {v:Bool | ((Prop v) <=> (a1 = a2))}
@-}
equal :: Array -- ^ First
-> Int -- ^ Offset into first
-> Array -- ^ Second
-> Int -- ^ Offset into second
-> Int -- ^ Count
-> Bool
equal arrA offA arrB offB count = inlinePerformIO $ do
i <- memcmp (aBA arrA) (fromIntegral offA)
(aBA arrB) (fromIntegral offB) (fromIntegral count)
return $! i == 0
{-# INLINE equal #-}
foreign import ccall unsafe "_hs_text_memcpy" memcpyI
:: MutableByteArray# s -> CSize -> ByteArray# -> CSize -> CSize -> IO ()
{-@ memcpyI :: MutableByteArray# s -> CSize -> ByteArray# -> CSize -> CSize -> IO () @-}
foreign import ccall unsafe "_hs_text_memcmp" memcmp
:: ByteArray# -> CSize -> ByteArray# -> CSize -> CSize -> IO CInt
{-@ memcmp :: ByteArray# -> CSize -> ByteArray# -> CSize -> CSize -> IO CInt @-}
foreign import ccall unsafe "_hs_text_memcpy" memcpyM
:: MutableByteArray# s -> CSize -> MutableByteArray# s -> CSize -> CSize
-> IO ()
{-@ memcpyM :: MutableByteArray# s -> CSize -> MutableByteArray# s -> CSize -> CSize -> IO () @-}
| abakst/liquidhaskell | benchmarks/text-0.11.2.3/Data/Text/Array.hs | bsd-3-clause | 14,643 | 0 | 12 | 4,558 | 1,885 | 1,071 | 814 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-
This URI URL thing is truly confusing...
This is how I'll handle it.
If the URI is a star then the server will handle it itself. (never get evaluated)
So the parsing will return nothing.
If the URI is an abs path we'll cheat and just throw it into the URL object
-}
module Smutt.HTTP.URI (
URI
, URL (..)
, fromString
) where
import Smutt.Util.String as Str
import Data.Monoid
import Smutt.Util.URL.Simple (URL)
import qualified Smutt.Util.URL.Simple as SimpleURL
import Data.ByteString.Lazy as BL
type URI = URL BL.ByteString
-- Decided to handle it as follows,
-- If the "URI" is a "*" then the server will use a special handle
-- If the "URI" is an absolute
fromString :: (Str.StringData s) => s -> Maybe (URL s)
fromString "" = Nothing
fromString "*" = Nothing
fromString strIn = SimpleURL.fromString strIn
| black0range/Smutt | src/Smutt/HTTP/URI.hs | mit | 894 | 0 | 9 | 189 | 139 | 86 | 53 | 15 | 1 |
module Main where
import HaskellSkeleton
main :: IO ()
main = do
putStrLn getHelloString
| FunTimeCoding/haskell-skeleton | src/Main.hs | mit | 96 | 0 | 7 | 21 | 28 | 15 | 13 | 5 | 1 |
-- | Prefer using the main module. If you manipulate the internals of `Plan`
-- to add fake steps, bad things might happen.
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ApplicativeDo #-}
module Control.Plan.Core (module Control.Plan.Core) where
import Prelude hiding ((.),id)
import qualified Data.Bifunctor as Bifunctor
import Data.Semigroup
import Data.Foldable
import Data.Bifoldable
import Data.Bitraversable
import Data.Bifunctor(Bifunctor,bimap)
import Data.Bifunctor.Clown
import Data.Functor.Identity
import Data.Functor.Compose
import Data.Tree
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty
import qualified Data.Sequence as Seq
import Data.Sequence (Seq)
import Data.Profunctor (Profunctor(..),Star(..))
import Control.Category
import Control.Arrow
import Control.Monad
import Control.Comonad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Streaming (hoist)
import qualified Streaming.Prelude
import Streaming.Prelude (Stream,Of(..),yield,next,effects)
{- $setup
>>> :set -XArrows
>>> :set -XTypeApplications
>>> import Control.Applicative
>>> import Control.Plan
>>> import Data.Tree
>>> import Text.Read(readMaybe)
-}
-- | A computation that takes inputs of type @i@ and produces outputs of type
-- @o@ working in the underlying monad @m@. The 'Applicative' instance cares
-- only about the outputs, the 'Arrow' instance cares about both inputs and
-- outputs.
--
-- Parts of the computation can be labeled as steps with tags of type @s@.
--
-- Computations can have monoidal resource annotations of type @w@.
--
-- The structure of steps and the monoidal annotations can be inspected before
-- executing the 'Plan'.
data Plan s w m i o = Plan (Steps s w) (Star (Stream (Of Tick') m) i o) deriving Functor
instance (Semigroup w,Monoid w,Monad m) => Applicative (Plan s w m i) where
pure x = Plan mempty (pure x)
Plan forest1 f <*> Plan forest2 x = Plan (forest1 `mappend` forest2) (f <*> x)
instance (Semigroup w,Monoid w,Monad m) => Category (Plan s w m) where
id = Plan mempty (Star (runKleisli id))
(Plan forest1 (Star f1)) . (Plan forest2 (Star f2)) =
Plan (forest2 `mappend` forest1) (Star (f2 >=> f1))
instance (Semigroup w,Monoid w,Monad m) => Arrow (Plan s w m) where
arr f = Plan mempty (Star (runKleisli (arr f)))
first (Plan forest (Star f)) = Plan forest (Star (runKleisli (first (Kleisli f))))
instance (Semigroup w,Monoid w,Monad m) => Profunctor (Plan s w m) where
lmap f p = f ^>> p
rmap f p = p >>^ f
-- | A 'Data.Tree.Forest' of steps tags of type @s@ interspersed with monoidal
-- annotations of type @w@.
data Steps s w = Steps !(Seq (w,s,Mandatoriness,Steps s w)) w
deriving (Functor,Foldable,Traversable,Eq,Show)
{- Steps of 'Plan's constructed in 'Applicative' fashion are always
'Mandatory'. Only steps declared with 'skippable' are optional.
-}
data Mandatoriness = Skippable
| Mandatory
deriving (Show,Eq,Ord)
instance Bifunctor Steps where
first f (Steps steps w) =
let go (w',e,mandatoriness',substeps) = (w',f e,mandatoriness',Bifunctor.first f substeps)
in Steps (fmap go steps) w
second = fmap
-- | 'bifoldMap' allows extracting the steps and the annotations together.
--
instance Bifoldable Steps where
bifoldMap g f (Steps steps w) =
foldMap (\(w',s,_,substeps) -> f w'
`mappend`
g s
`mappend`
bifoldMap g f substeps) steps
`mappend`
f w
instance Bitraversable Steps where
bitraverse g f (Steps steps w) =
Steps <$> traverse innertraverse steps <*> f w
where
innertraverse (w',e,mandatoriness',substeps) =
(,,,) <$> f w' <*> g e <*> pure mandatoriness' <*> bitraverse g f substeps
instance Semigroup w => Semigroup (Steps s w) where
Steps s1 w1 <> Steps s2 w2 =
case Seq.viewl s2 of
Seq.EmptyL -> Steps s1 (w1 <> w2)
(w',s,mandatoriness',substeps) Seq.:< s2' ->
Steps (s1 <> ((w1 <> w',s,mandatoriness',substeps) Seq.<| s2')) w2
instance (Semigroup w,Monoid w) => Monoid (Steps s w) where
mempty = Steps mempty mempty
mappend = (<>)
-- | A catamorphism on 'Steps', that "destroys" the 'Steps' value from the
-- leaves upwards.
--
-- Unlike 'foldMap' or 'bifoldMap', it allows a more structured analysis of the
-- annotations, by preserving their relationship with the hierarchy of steps.
--
foldSteps :: ([(w,s,Mandatoriness,r)] -> w -> r) -- ^ A function that consumes a list of step tags of type @s@, surrounded and interleaved with annotations of type @w@. Each step is also annotated with its mandatoriness and with the result @r@ of consuming its substeps, if there were any.
-> Steps s w
-> r
foldSteps f = foldSteps' (\steps -> f (toList steps))
foldSteps' :: (Seq (w,s,Mandatoriness,r) -> w -> r) -> Steps s w -> r
foldSteps' f = go
where
go (Steps steps w) =
f (fmap (\(w',e',mandatoriness',substeps) -> (w',e',mandatoriness',go substeps)) steps) w
-- | Adapt the 'Step' value inside a 'Plan' without extracting it.
bimapSteps :: (s -> s') -> (w -> w') -> Plan s w m i o -> Plan s' w' m i o
bimapSteps f g (Plan steps star) = Plan (Bifunctor.bimap f g steps) star
-- | Use a lens setter to "zoom" the monoidal annotations of a 'Plan' into a
-- wider monoidal context.
zoomSteps :: Monoid w' => ((w -> Identity w) -> w' -> Identity w') -> Plan s w m i o -> Plan s w' m i o
zoomSteps setter = bimapSteps id (\w -> set' w mempty)
where
set' w = runIdentity . setter (Identity . const w)
-- | Change the underlying monad of a 'Plan'.
hoistPlan :: Monad m => (forall x. m x -> n x) -> Plan s w m i o -> Plan s w n i o
hoistPlan trans (Plan steps (Star f)) = Plan steps (Star (hoist trans . f))
data Tick' = Skipped' | Started' | Finished' deriving (Eq,Ord,Enum,Show)
-- | Inspect a plan without executing it.
getSteps :: Plan s w m i o -> Steps s w
getSteps (Plan steps _) = steps
-- | Decorate each step tag with its mandatoriness. Useful in combination with 'toForest'.
mandatoriness :: Steps s w -> Steps (Mandatoriness,s) w
mandatoriness (Steps steps w) = Steps (fmap go steps) w
where
go (w',s,mandatory,substeps) = (w',(mandatory,s),mandatory,mandatoriness substeps)
-- | Declare a step by wrapping an existing plan (which may contain substeps).
step :: (Monoid w,Monad m) => s -> Plan s w m i o -> Plan s w m i o
step s (Plan forest (Star f)) =
Plan (Steps (Seq.singleton (mempty,s,Mandatory,forest)) mempty)
(Star (\x -> yield Started' *> f x <* yield Finished'))
{-| Declare an optional step by wrapping an existing arrow plan. The step will only
be executed when the input is 'Just'.
This function only makes sense when using the 'Arrow' instance of 'Plan',
because for 'Applicative's an effect cannot depend on previously obtained
values.
>>> :{
let example :: Plan String () IO () ()
example = proc () -> do
i <- step "reading" (plan (readMaybe @Int <$> getLine)) -< ()
skippable "writing" (plan' print) -< i
in putStr . drawForest . fmap (fmap show) . toForest . mandatoriness . getSteps $ example
:}
(Mandatory,"reading")
<BLANKLINE>
(Skippable,"writing")
<BLANKLINE>
-}
skippable :: (Monoid w,Monad m) => s -> Plan s w m i o -> Plan s w m (Maybe i) ()
skippable s (Plan forest (Star f)) =
Plan (Steps (Seq.singleton (mempty,s,Skippable,forest)) mempty)
(Star (\m -> case m of
Just x -> yield Started' *> f x *> yield Finished'
Nothing -> yield Skipped'))
-- | Declare a monoidal annotation. The annotation can be later inspected
-- without having to run the 'Plan'.
--
-- Usually the annotations will represent resources that the 'Plan' is expected
-- to require.
foretell :: (Monad m) => w -> Plan s w m i ()
foretell w = Plan (Steps mempty w) (pure ())
-- | Lift a monadic action to a 'Plan'. The input type @i@ remains polymorphic, usually it will become @()@.
plan :: (Semigroup w,Monoid w,Monad m) => m o -> Plan s w m i o
plan x = Plan mempty (Star (const (lift x)))
-- | Lift a Kleisli arrow to a 'Plan'.
plan' :: (Semigroup w,Monoid w,Monad m) => (i -> m o) -> Plan s w m i o
plan' f = Plan mempty (Star (lift . f))
{-# DEPRECATED kplan "Use plan' instead." #-}
kplan :: (Semigroup w,Monoid w,Monad m) => (i -> m o) -> Plan s w m i o
kplan = plan'
-- | Lift an 'IO' action to a 'Plan'. The input type @i@ remains polymorphic, usually it will become @()@.
planIO :: (Semigroup w,Monoid w,MonadIO m) => IO o -> Plan s w m i o
planIO x = Plan mempty (Star (const (liftIO x)))
-- | Lift a Kleisli arrow working in 'IO' to a 'Plan'.
planIO' :: (Semigroup w,Monoid w,MonadIO m) => (i -> IO o) -> Plan s w m i o
planIO' f = Plan mempty (Star (liftIO . f))
{-# DEPRECATED kplanIO "Use planIO' instead." #-}
kplanIO :: (Semigroup w,Monoid w,MonadIO m) => (i -> IO o) -> Plan s w m i o
kplanIO = planIO'
zipStepsi :: Forest a -> Steps r w -> Maybe (Steps (a,r) w)
zipStepsi forest (Steps substeps w)
| length forest == length substeps =
let paired = Seq.zipWith (\(Node a subforest) (w',s,mandatory,substeps') ->
(w',(a,s),mandatory,zipStepsi subforest substeps'))
(Seq.fromList forest)
substeps
go (w',s,mandatory,ms) = fmap (\x -> (w',s,mandatory,x)) ms
in flip Steps w <$> traverse go paired
| otherwise = Nothing
-- | Pair each step tag @s@ inside a 'Plan' with the corresponding element of the 'Forest'.
--
-- If the forest doesn't have the same structure as the steps, the function
-- fails with 'Nothing'.
--
-- This function can be useful to annotate each step tag with some information,
-- for example the time duration of the step in a previous execution of the
-- plan. See 'Timeline', 'instants', and 'toForest'.
zipSteps :: Forest s' -> Plan s w m i o -> Maybe (Plan (s',s) w m i o)
zipSteps forest (Plan steps star) = Plan <$> zipStepsi forest steps <*> pure star
-- | A given step might not have been reached yet. It it has been reached,
-- either it has been skipped at a certain time, or started at a certain time.
-- If if has been started, maybe it has already finished, too.
--
-- This function can be used in combination with 'toForest' and
-- 'Data.Tree.drawForest' to render the state of each step for a 'Tick'.
completedness :: Tick s t -> Tick (Maybe (Either t (t,Maybe t)),s) t
completedness (Tick (Context {completed,current,pending}:|contexts) progress) =
let startingTime = extract completed
(progress',time') = progressCompletedness startingTime progress
in Tick (Context (adapt (instants completed))
(time',current)
(fmap (fmap (\s -> (Nothing,s))) pending)
:| map (contextCompletedness (\t -> Right (t,Nothing))) contexts)
progress'
contextCompletedness :: (t -> (Either t (t,Maybe t)))
-> Context s t
-> Context (Maybe (Either t (t,Maybe t)),s) t
contextCompletedness tf (Context {completed,current,pending}) =
Context (adapt (instants completed))
(Just (tf (extract completed)),current)
(fmap (fmap (\s -> (Nothing,s))) pending)
adapt :: Timeline (Either t (t,t),s) t -> Timeline (Maybe (Either t (t,Maybe t)),s) t
adapt timeline =
let go = Bifunctor.first (Just . bimap id (fmap Just))
in Bifunctor.first go timeline
progressCompletedness :: t -> Progress s t -> (Progress (Maybe (Either t (t,Maybe t)),s) t, Maybe (Either t (t,Maybe t)))
progressCompletedness startingTime = \case
Skipped forest -> (Skipped $ fmap (fmap (\s -> (Just (Left startingTime),s))) forest
,Just (Left startingTime))
Started forest -> (Started $ fmap (fmap (\s -> (Nothing,s))) forest
,Just (Right (startingTime,Nothing)))
Finished timeline -> (Finished $ adapt (instants timeline)
,Just (Right (startingTime,Just (extract timeline))))
-- | Forget that there is a plan, get the underlying monadic action.
unliftPlan :: Monad m => Plan s w m () o -> m o
unliftPlan p = extract <$> effects (runKPlan (pure ()) p ())
-- | Forget that there is a plan, get the underlying Kleisli arrow.
unliftPlan' :: Monad m => Plan s w m i o -> i -> m o
unliftPlan' p i = extract <$> effects (runKPlan (pure ()) p i)
{-# DEPRECATED unliftKPlan "Use unliftPlan' instead." #-}
unliftKPlan :: Monad m => Plan s w m i o -> i -> m o
unliftKPlan = unliftPlan'
-- | A 'Data.Tree.Forest' of steps tags of type @s@ interspersed with
-- measurements of type @t@.
data Timeline s t = Timeline !(Seq (t,s,Either (Forest s) (Timeline s t))) t
deriving (Functor,Foldable,Traversable,Eq,Show)
instance Bifunctor Timeline where
first f (Timeline steps w) =
let go (w',e,substeps) = (w',f e,bimap (fmap (fmap f)) (Bifunctor.first f) substeps)
in Timeline (fmap go steps) w
second = fmap
instance Bifoldable Timeline where
bifoldMap g f (Timeline steps w) =
foldMap (\(w',e,substeps) -> f w'
`mappend`
g e
`mappend`
bifoldMap (mconcat . map (foldMap g)) (bifoldMap g f) substeps) steps
`mappend`
f w
instance Bitraversable Timeline where
bitraverse g f (Timeline steps w) =
Timeline <$> traverse innertraverse steps <*> f w
where
innertraverse (w',e,substeps) = (,,)
<$> f w'
<*> g e
<*> bitraverse (traverse (traverse g)) (bitraverse g f) substeps
-- | 'Timeline's always have at least one measurement. 'extract' gives the final measurement.
instance Comonad (Timeline s) where
extract (Timeline _ t) = t
duplicate tip@(Timeline steps _) =
let go steps' = case Seq.viewr steps' of
Seq.EmptyR -> error "should never happen"
lefto Seq.:> (t',c',timeline') -> ((Timeline lefto t'),c',fmap duplicate timeline')
in Timeline (fmap go (Seq.inits steps)) tip
-- | Decorate each step tag with either the time the step was skipped, or the
-- time it was started and finished. Useful in combination with 'toForest'.
instants :: Timeline s t -> Timeline (Either t (t,t),s) t
instants (Timeline past limit) = Timeline (fmap go past) limit
where
go (t',c',Left forest) = (t',(Left t',c') ,Left (fmap (fmap (\x -> (Left t',x))) forest))
go (t',c',Right timeline') = (t',(Right (t',extract timeline'),c'),Right (instants timeline'))
-- | A catamorphism on 'Timeline's, that "destroys" the 'Timeline' value from the
-- leaves upwards.
--
foldTimeline :: ([(t,s,Either (Forest s) r)] -> t -> r) -- ^ A function that consumes a list of step tags of type @s@, surrounded and interleaved with measurements of type @t@. Each step is also annotated with either its substeps, if it the step was skipped, or the results of consuming the substeps, if it was executed.
-> Timeline s t
-> r
foldTimeline f = foldTimeline' (\steps -> f (toList steps))
foldTimeline' :: (Seq (t,c,Either (Forest c) r) -> t -> r) -> Timeline c t -> r
foldTimeline' f = go
where
go (Timeline steps t) = f (fmap (\(t',c',foreste) -> (t',c',fmap go foreste)) steps) t
-- | Represents how far we are along a sequence of sibling steps.
--
-- For the already completed steps, a 'Timeline' of measurements is provided. 'extract' for the 'Timeline' returns the starting measurement of the current step.
data Context s t = Context
{
completed :: Timeline s t
, current :: s
, pending :: Forest s
} deriving (Functor,Foldable,Traversable,Eq,Show)
instance Bifunctor Context where
first f (Context {completed,current,pending}) =
Context (Bifunctor.first f completed)
(f current)
(fmap (fmap f) pending)
second = fmap
-- | Represents some kind of progress through the 'Steps' of a 'Plan' while the
-- plan executes.
--
-- The ascending list of contexts provides the current position of the
-- execution along the hierarchy of steps.
--
-- If the plan only has a linear sequence of steps, the list will have only one
-- 'Context'.
data Tick s t = Tick (NonEmpty (Context s t)) (Progress s t)
deriving (Functor,Foldable,Traversable,Eq,Show)
instance Bifunctor Tick where
first f (Tick contexts progress) =
Tick (fmap (Bifunctor.first f) contexts) (Bifunctor.first f progress)
second = fmap
instance Bifoldable Tick where
bifoldMap g f (Tick contexts progress) =
foldMap (\(Context {completed,current}) -> bifoldMap g f completed `mappend` g current)
(Data.List.NonEmpty.reverse contexts)
`mappend`
bifoldMap g f progress
`mappend`
foldMap (\(Context {pending}) -> foldMap (foldMap g) pending)
contexts
instance Bitraversable Tick where
bitraverse g f (Tick contexts progress) = do
phase1r <- traverse (\(Context {completed,current}) -> (,)
<$> bitraverse g f completed
<*> g current)
(Data.List.NonEmpty.reverse contexts)
progress' <- bitraverse g f progress
phase2 <- traverse (\(Context {pending}) -> traverse (traverse g) pending)
contexts
pure (Tick (fmap (\((completed',current'),pending') -> Context completed' current' pending')
(Data.List.NonEmpty.zip (Data.List.NonEmpty.reverse phase1r) phase2))
progress')
instance Sylvan Tick where
toForest (Tick contexts progress) = foldl ctx2forest (toForest progress) contexts
where
ctx2forest below (Context {completed,current,pending}) =
toForest completed ++ [Node current below] ++ pending
-- | The execution of a 'Plan' can make progress by skipping a step, starting a
-- step, or finishing a step.
data Progress s t = Skipped (Forest s) -- ^ Provides the substeps that were skipped.
| Started (Forest s) -- ^ Provides the substeps that will be executed next.
| Finished (Timeline s t) -- ^ Provides a 'Timeline' of measurements for the completed substeps. 'extract' for the 'Timeline' gives the finishing measurement for the current step.
deriving (Functor,Foldable,Traversable,Eq,Show)
instance Sylvan Progress where
toForest progress =
case progress of
Skipped forest -> forest
Started forest -> forest
Finished timeline -> toForest timeline
instance Bifunctor Progress where
first f (Skipped forest) = Skipped (fmap (fmap f) forest)
first f (Started forest) = Skipped (fmap (fmap f) forest)
first f (Finished timeline) = Finished (bimap f id timeline)
second = fmap
instance Bifoldable Progress where
bifoldMap g _ (Skipped forest) = foldMap (foldMap g) forest
bifoldMap g _ (Started forest) = foldMap (foldMap g) forest
bifoldMap g f (Finished timeline) = bifoldMap g f timeline
instance Bitraversable Progress where
bitraverse g _ (Skipped forest) = Skipped <$> traverse (traverse g) forest
bitraverse g _ (Started forest) = Started <$> traverse (traverse g) forest
bitraverse g f (Finished timeline) = Finished <$> (bitraverse g f) timeline
-- | Specify a monadic callback for processing each 'Tick' update.
onTick :: Monad m => (tick -> m ()) -> Stream (Of tick) m r -> m r
onTick = Streaming.Prelude.mapM_
-- | Runs a plan that doesn't need input. It returns a 'Stream' of 'Tick'
-- updates that are emitted every time the execution advances through the
-- 'Steps'.
--
-- For each 'Tick' update, a monadic measurement of type @t@ is taken. Usually
-- the measurement consists in getting the current time.
--
-- When the execution finishes, a 'Timeline' with the measurements for each
-- 'Tick' is returned, along with the result value.
--
-- Even if the plan didn't have any steps, the 'Timeline' will contain a
-- measurement taken when the computation finished.
runPlan :: Monad m
=> m t -- ^ Monadic measurement to be taken on each tick.
-> Plan s w m () o -- ^ Plan without input.
-> Stream (Of (Tick s t)) m (Timeline s t,o)
runPlan measurement p = runPlan' measurement p ()
-- | Like 'runPlan', but for 'Arrow'-like 'Plan's that take inputs.
runPlan' :: Monad m
=> m t -- ^ Monadic measurement to be taken on each tick.
-> Plan s w m i o -- ^ Plan that takes input.
-> i
-> Stream (Of (Tick s t)) m (Timeline s t,o)
runPlan' makeMeasure (Plan steps (Star f)) initial =
let go state stream =
do n <- lift (next stream)
measure <- lift makeMeasure
case (n,state) of
(Left b,
RunState completed [] []) -> do
return (Timeline completed measure,b)
(Right (Skipped',stream'),
RunState previous (Node root subforest:forest) upwards) -> do
yield (Tick (Context (Timeline previous measure) root forest :| upwards)
(Skipped subforest))
go (RunState (previous Seq.|> (measure,root,Left subforest)) forest upwards)
stream'
(Right (Started',stream'),
RunState previous (Node root subforest:forest) upwards) -> do
yield (Tick (Context (Timeline previous measure) root forest :| upwards)
(Started subforest))
go (RunState mempty subforest (Context (Timeline previous measure) root forest : upwards))
stream'
(Right (Finished',stream'),
RunState previous' [] (ctx@(Context {completed,current,pending}) : upwards)) -> do
let subtimeline = Timeline previous' measure
Timeline previous'' instant = completed
yield (Tick (ctx :| upwards)
(Finished subtimeline))
go (RunState (previous'' Seq.|> (instant,current,Right subtimeline)) pending upwards)
stream'
_ -> error "should never happen"
in go (RunState mempty (toForest steps) []) (f initial)
{-# DEPRECATED runKPlan "Use runPlan' instead." #-}
runKPlan :: Monad m
=> m t -- ^ Monadic measurement to be taken on each tick.
-> Plan s w m i o -- ^ Plan that takes input.
-> i
-> Stream (Of (Tick s t)) m (Timeline s t,o)
runKPlan = runPlan'
data RunState s t = RunState !(Seq (t,s,Either (Forest s) (Timeline s t)))
!(Forest s)
![Context s t]
-- | Instances of 'Sylvan' are 'Data.Tree.Forest's with nodes of type @n@,
-- interspersed with annotations of type @a@, and perhaps some other extra
-- information.
--
-- They must satisfy
--
-- > bifoldMap f (\_ -> mempty) s == foldMap (foldMap f) (toForest s)
class (Bitraversable l) => Sylvan l where
-- | Forget about the annotations and return the underlying 'Data.Tree.Forest'.
toForest :: l n a -> Forest n
-- | 'toForest' forgets about the annotations and returns a 'Forest' of step
-- tags.
instance Sylvan Steps where
toForest (Steps steps _) =
map (\(_,e,_,steps') -> Node e (toForest steps')) (toList steps)
-- | 'toForest' forgets about the measurements and returns a 'Forest' of step
-- tags.
instance Sylvan Timeline where
toForest (Timeline past _) = fmap (\(_,c,timeline') -> Node c (either id toForest timeline')) (toList past)
-- | A 'Data.Tree.Forest' is a 'Sylvan' for which no annotations exist.
instance Sylvan (Clown (Compose [] Tree)) where
toForest (Clown (Compose forest)) = forest
| danidiaz/plan-applicative | lib/Control/Plan/Core.hs | mit | 24,885 | 0 | 23 | 6,732 | 7,216 | 3,794 | 3,422 | 353 | 5 |
----------------------------------------------------------------
--
-- Module: Quark.Layout
-- Author: Stefan Peterson
-- License: MIT License
--
-- Maintainer: Stefan Peterson ([email protected])
-- Stability: Stable
-- Portability: Unknown
--
-- ----------------------------------------------------------
--
-- Module for frontend-agnostic layout functions.
--
----------------------------------------------------------------
module Quark.Layout ( firstL
, secondL
, thirdL
, bimapL
, primary
, ) where
import Quark.Frontend.HSCurses ( Window
, Layout ( MinimalLayout
, BasicLayout
, VSplitLayout
, HSplitLayout ))
firstL :: (Window -> Window) -> Layout -> Layout
firstL f (MinimalLayout t u p w) = MinimalLayout t u (f p) w
firstL f (BasicLayout t u d p) = BasicLayout t u d (f p)
firstL f (VSplitLayout t u d p s) = VSplitLayout t u d (f p) s
firstL f (HSplitLayout t u d p s) = HSplitLayout t u d (f p) s
secondL :: (Window -> Window) -> Layout -> Layout
secondL f (VSplitLayout t u d p s) = VSplitLayout t u d p (f s)
secondL f (HSplitLayout t u d p s) = HSplitLayout t u d p (f s)
thirdL :: (Window -> Window) -> Layout -> Layout
thirdL f (BasicLayout t u d p) = BasicLayout t u (f d) p
thirdL f (VSplitLayout t u d p s) = VSplitLayout t u (f d) p s
thirdL f (HSplitLayout t u d p s) = HSplitLayout t u (f d) p s
bimapL :: (Window -> Window) -> (Window -> Window) -> Layout -> Layout
bimapL f0 f1 (VSplitLayout t u d p s) = VSplitLayout t u d (f0 p) (f1 s)
bimapL f0 f1 (HSplitLayout t u d p s) = HSplitLayout t u d (f0 p) (f1 s)
primary :: Layout -> Window
primary (MinimalLayout _ _ p _) = p
primary (BasicLayout _ _ _ p) = p
primary (VSplitLayout _ _ _ p _) = p
primary (HSplitLayout _ _ _ p _) = p | sjpet/quark | src/Quark/Layout.hs | mit | 2,003 | 0 | 8 | 604 | 702 | 368 | 334 | 30 | 1 |
module Core.Parser
( ParseError
, parseFile
, parseProgram
) where
import Control.Exception (Exception(), throw)
import Data.Functor
import Data.Maybe
import Text.Parsec hiding (spaces)
import Text.Parsec.Language
import qualified Text.Parsec.Token as P
import qualified Text.Parsec.Expr as P
import Text.Parsec.Char (digit)
import Text.Parsec.String (Parser())
import Core.Language
instance Exception ParseError
doParseProgram :: SourceName -> String -> Either ParseError CoreProgram
doParseProgram = parse pProgram
parseFile :: FilePath -> IO CoreProgram
parseFile f = either throw id . doParseProgram f <$> readFile f
parseProgram :: String -> Either ParseError CoreProgram
parseProgram = doParseProgram ""
-- * Lexing and utils
coreDef = haskellStyle
{ P.reservedNames = keywords
, P.reservedOpNames = reservedOps
}
coreLexer = P.makeTokenParser coreDef
parens = P.parens coreLexer
braces = P.braces coreLexer
identifier = P.identifier coreLexer
reserved = P.reserved coreLexer
reservedOp = P.reservedOp coreLexer
integer = P.integer coreLexer
symbol = P.symbol coreLexer
operator = P.operator coreLexer
spaces = P.whiteSpace coreLexer
semi = P.semi coreLexer
int = read <$> many1 digit
binary op = P.Infix e P.AssocLeft where
e = do { o <- symbol op; return (\x y -> EAp (EAp (EVar o) x) y) }
--prefix op = Prefix (do { reservedOp op; return fun })
sequence1 :: Parser a -> Parser [a]
sequence1 = P.semiSep1 coreLexer
-- * Parser
pProgram :: Parser CoreProgram
pProgram = spaces *> sequence1 pSc
-- Supercombinators
pSc :: Parser CoreScDefn
pSc = do
name <- identifier
args <- many identifier
reservedOp "="
body <- pCoreExpr
return (name, args, body)
pCoreExpr :: Parser CoreExpr
pCoreExpr = choice [pLet, pCase, pLam, expr1] where
expr1 = P.buildExpressionParser table term
table = [ map binary ["*", "/"]
, map binary ["+", "-"]
, map binary relOps
, [ binary "&" ]
, [ binary "|" ]
]
-- App or single Aexpr
term = foldl1 EAp <$> many1 pAexpr
pLam = do
reservedOp "\\"
params <- many1 identifier
reservedOp "."
expr <- pCoreExpr
return (ELam params expr)
pCase = do
reserved "case"
e <- pCoreExpr
reserved "of"
alts <- sequence1 pAlt1
return (ECase e alts)
where
pAlt1 = do
i <- reservedOp "<" *> int <* reservedOp ">"
vars <- many identifier
reservedOp "->"
expr <- pCoreExpr
return (i, vars, expr)
pLet = do
isrec <- try (reserved "let" $> False) <|> (reserved "letrec" $> True)
binds <- sequence1 (try bind1)
reserved "in"
expr <- pCoreExpr
return (ELet isrec binds expr)
where
bind1 = do
name <- identifier
reservedOp "="
expr <- pCoreExpr
return (name, expr)
pCtor = do
reserved "Pack"
braces $ EConstr <$> int <* reservedOp "," <*> int
pAexpr = spaces *> choice [pVar, pNum, pCtor, parens pCoreExpr] <* spaces
pNum = ENum <$> int
pVar = EVar <$> identifier
| themattchan/core | src/Core/Parser.hs | mit | 3,102 | 0 | 16 | 771 | 1,013 | 514 | 499 | 93 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Numeric
import Control.Monad
import qualified Data.ByteString.Char8 as S
import Data.ByteString.Read
import Test.Tasty
import Test.Tasty.QuickCheck
(=~~) :: Double -> Double -> Bool
(=~~) a b = a == b || abs (a - b) <= max (abs a) (abs b) * 1e20
-- Word
newtype Word8 = Word8 String
deriving Show
instance Arbitrary Word8 where
arbitrary = do
i <- choose (1, 50)
n <- replicateM i $ choose ('0', '7')
return $ Word8 $ "0o" ++ n
newtype Word10 = Word10 String
deriving Show
instance Arbitrary Word10 where
arbitrary = do
i <- choose (1, 50)
n <- replicateM i $ choose ('0', '9')
return $ Word10 n
newtype Word16 = Word16 String
deriving Show
instance Arbitrary Word16 where
arbitrary = do
i <- choose (1, 50)
n <- replicateM i $ oneof [choose ('0', '9'), choose ('a', 'f'), choose ('A', 'F')]
return $ Word16 $ "0x" ++ n
-- Int
newtype Int8 = Int8 String
deriving Show
instance Arbitrary Int8 where
arbitrary = do
sign <- oneof $ map return ["", "-"]
Word8 w <- arbitrary
return . Int8 $ sign ++ w
newtype Int10 = Int10 String
deriving Show
instance Arbitrary Int10 where
arbitrary = do
sign <- oneof $ map return ["", "-"]
Word10 w <- arbitrary
return . Int10 $ sign ++ w
newtype Int16 = Int16 String
deriving Show
instance Arbitrary Int16 where
arbitrary = do
sign <- oneof $ map return ["", "-"]
Word16 w <- arbitrary
return . Int16 $ sign ++ w
-- Float
newtype Float10 = Float10 String
deriving Show
instance Arbitrary Float10 where
arbitrary = do
Int10 q <- arbitrary
Word10 r <- arbitrary
return . Float10 $ q ++ '.': r
newtype SmallFloat10 = SmallFloat10 String
deriving Show
instance Arbitrary SmallFloat10 where
arbitrary = do
sign <- oneof $ map return ["", "-"]
i <- choose (0, 100)
Word10 r <- arbitrary
return . SmallFloat10 $ sign ++ "0." ++ replicate i '0' ++ r
newtype Float10Exp = Float10Exp String
deriving Show
instance Arbitrary Float10Exp where
arbitrary = do
Float10 f <- arbitrary
c <- oneof $ map return "eE"
s <- oneof $ map return ["", "+", "-"]
e <- choose (0, 10000 :: Int)
return . Float10Exp $ f ++ c: s ++ show e
main :: IO ()
main = defaultMain $ testGroup "read . show == id"
[ testGroup "Integral"
[ testProperty "Int" $ \i ->
let Just (i', "") = signed integral . S.pack $ show i
in (i :: Int) == i'
]
, testGroup "Fractional"
[ testProperty "showEFloat" $ \d ->
let Just (d', "") = signed fractional . S.pack $ showEFloat Nothing d ""
in (d :: Double) =~~ d'
, testProperty "showFFloat" $ \d ->
let Just (d', "") = signed fractional . S.pack $ showFFloat Nothing d ""
in (d :: Double) =~~ d'
, testProperty "showGFloat" $ \d ->
let Just (d', "") = signed fractional . S.pack $ showGFloat Nothing d ""
in (d :: Double) =~~ d'
, testProperty "showHex" $ \i ->
let Just (i', "") = signed fractional . (S.append "0x") . S.pack $ showHex (abs i) ""
in fromIntegral (abs i :: Int) =~~ i'
, testProperty "showOct" $ \i ->
let Just (i', "") = signed fractional . (S.append "0o") . S.pack $ showOct (abs i) ""
in fromIntegral (abs i :: Int) =~~ i'
, testProperty "Word8" $ \(Word8 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Word10" $ \(Word10 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Word16" $ \(Word16 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Int8" $ \(Int8 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Int10" $ \(Int10 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Int16" $ \(Int16 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Float10" $ \(Float10 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "SmallFloat10" $ \(SmallFloat10 d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
, testProperty "Float10Exp" $ \(Float10Exp d) ->
let Just (d', "") = signed fractional (S.pack d)
in d' =~~ (read d :: Double)
]
]
| philopon/bytestring-read | tests/tasty.hs | mit | 4,961 | 0 | 21 | 1,635 | 1,952 | 989 | 963 | 124 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module OpaleyeDemo.Utils
( -- * Exports
printSql
, runQueryDebug
, runQ
, todoToString
) where
import Data.List (intercalate)
import Data.Profunctor.Product.Default (Default)
import Database.PostgreSQL.Simple (Connection)
import Opaleye (Query, QueryRunner,
Unpackspec, runQuery,
showSqlForPostgres)
import OpaleyeDemo.Flags
import qualified OpaleyeDemo.Hashtag as H
import qualified OpaleyeDemo.Ids as I
import qualified OpaleyeDemo.Todo as T
printSql :: Default Unpackspec a a => Query a -> IO ()
printSql = maybe (pure ()) putStrLn . showSqlForPostgres
runQueryDebug :: (Default Unpackspec columns columns,
Default QueryRunner columns haskells) =>
Connection -> Query columns -> IO [haskells]
runQueryDebug c q = printSql q >> runQuery c q
runQ :: (Default Unpackspec columns columns,
Default QueryRunner columns haskells) =>
Connection -> Query columns -> [Flag] -> IO [haskells]
runQ c q flags = if Debug `elem` flags then
runQueryDebug c q
else
runQuery c q
todoToString :: T.Todo -> [H.Hashtag] -> String
todoToString todo hashtags
= unwords [ show ((I.todoId . T._id) todo) ++ "."
, T._title todo
, "(due by: " ++ show (T._dueDate todo) ++ ")"
, "(prio: " ++ maybe "-" show (I.prio (T._prio todo)) ++ ")"
, intercalate ", " (map (I.hashtagStr . H._hashtag) hashtags)
]
| charlydagos/haskell-sql-edsl-demo | code/opaleye/src/OpaleyeDemo/Utils.hs | mit | 1,805 | 0 | 14 | 651 | 461 | 251 | 210 | 37 | 2 |
module Language.Doczen.PrettyPrint (
prettyPrintDocument
) where
import Text.PrettyPrint
import Language.Doczen.Types
prettyPrintDocument :: Document -> String
prettyPrintDocument = render . document
document (Document h ss) = (header h) <> separator <> body ss
separator = text "---\n"
header (Header "") = empty
header (Header t) = text t <> char '\n'
body ss = sepJoin $ map section ss
sepJoin (s:t:ss) = s <> separator <> sepJoin (t:ss)
sepJoin [s] = if isEmpty s then separator else s
sepJoin [] = empty
section (Section opts is) = sectionOpts opts <> items is
sectionOpts = hcat . map sectionOpt
sectionOpt (NoAttachedRepl) = text "!norepl\n"
sectionOpt (SectionId i) = text $ "!id: " ++ i ++ "\n"
items = hcat . map item
item (Heading _ []) = empty
item (Heading hl c) = headingPounds hl <+> enhancedText c <> char '\n'
item (Code os c) = text "```" <> codeOptions os <> char '\n' <> codeText c <> text "```\n"
item (Paragraph []) = empty
item (Paragraph c) = enhancedText c <> char '\n'
codeOptions = hcat . map codeOption
codeOption Runnable = char '>'
codeOption Hidden = char '~'
codeOption (CodeId i) = text $ " (" ++ i ++ ")"
codeText = hcat . map codeTextNode
codeTextNode (RCC c) = char c
codeTextNode (Blanks NormalBlank) = text "____"
codeTextNode (Blanks LargeBlank) = text "______"
enhancedText = hcat . map enhancedTextNode
enhancedTextNode (RC c) = char c
enhancedTextNode (Small et) = "::" `wrapped` enhancedText et
enhancedTextNode (Em et) = "__" `wrapped` enhancedText et
enhancedTextNode (Strong et) = "**" `wrapped` enhancedText et
enhancedTextNode (InlineCode s) = "`" `wrapped` text s
enhancedTextNode (Tt et) = "++" `wrapped` enhancedText et
enhancedTextNode (Smile) = text ":)"
enhancedTextNode (SmileP) = text ":P"
enhancedTextNode (Link et s) = char '[' <> enhancedText et <> text "](" <> text s <> char ')'
enhancedTextNode (Html s) = text s
wrapped s t = text s <> t <> text s
headingPounds H1 = text "#"
headingPounds H2 = text "##"
headingPounds H3 = text "###"
headingPounds H4 = text "####" | yanatan16/doczen-generator | src/Language/Doczen/PrettyPrint.hs | mit | 2,039 | 0 | 9 | 352 | 861 | 422 | 439 | 48 | 2 |
module Types where
-- TODO: include all languages supported by pandoc or pygments or whatnot.
data Language = Bash
| Haskell
| Scala
| Python
| Html
| Css
| CoffeeScript
| JavaScript
| Handlebars
| Markdown
| JSON
| None
deriving (Enum, Ord, Eq, Show)
languages = [Bash .. None]
data Delimiter = Begin | End | Pause | Continue | Nil deriving (Enum, Ord, Eq, Show)
delimiters = [Begin .. Nil]
data Fence = Fence { lineno :: Int
, delim :: Delimiter
, lang :: Language
, fname :: FilePath } deriving (Eq, Show)
| sshastry/litany | Types.hs | mit | 739 | 0 | 8 | 327 | 168 | 103 | 65 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ViewPatterns #-}
module Database.Persist.Quasi
( parse
, PersistSettings (..)
, upperCaseSettings
, lowerCaseSettings
, nullable
#if TEST
, Token (..)
, tokenize
, parseFieldType
#endif
) where
import Prelude hiding (lines)
import Database.Persist.Types
import Data.Char
import Data.Maybe (mapMaybe, fromMaybe, maybeToList)
import Data.Text (Text)
import qualified Data.Text as T
import Control.Arrow ((&&&))
import qualified Data.Map as M
import Data.List (foldl')
import Data.Monoid (mappend)
import Control.Monad (msum, mplus)
data ParseState a = PSDone | PSFail String | PSSuccess a Text deriving Show
parseFieldType :: Text -> Either String FieldType
parseFieldType t0 =
case parseApplyFT t0 of
PSSuccess ft t'
| T.all isSpace t' -> Right ft
PSFail err -> Left $ "PSFail " ++ err
other -> Left $ show other
where
parseApplyFT t =
case goMany id t of
PSSuccess (ft:fts) t' -> PSSuccess (foldl' FTApp ft fts) t'
PSSuccess [] _ -> PSFail "empty"
PSFail err -> PSFail err
PSDone -> PSDone
parseEnclosed :: Char -> (FieldType -> FieldType) -> Text -> ParseState FieldType
parseEnclosed end ftMod t =
let (a, b) = T.break (== end) t
in case parseApplyFT a of
PSSuccess ft t' -> case (T.dropWhile isSpace t', T.uncons b) of
("", Just (c, t'')) | c == end -> PSSuccess (ftMod ft) (t'' `mappend` t')
(x, y) -> PSFail $ show (b, x, y)
x -> PSFail $ show x
parse1 t =
case T.uncons t of
Nothing -> PSDone
Just (c, t')
| isSpace c -> parse1 $ T.dropWhile isSpace t'
| c == '(' -> parseEnclosed ')' id t'
| c == '[' -> parseEnclosed ']' FTList t'
| isUpper c ->
let (a, b) = T.break (\x -> isSpace x || x `elem` ("()[]"::String)) t
in PSSuccess (getCon a) b
| otherwise -> PSFail $ show (c, t')
getCon t =
case T.breakOnEnd "." t of
(_, "") -> FTTypeCon Nothing t
("", _) -> FTTypeCon Nothing t
(a, b) -> FTTypeCon (Just $ T.init a) b
goMany front t =
case parse1 t of
PSSuccess x t' -> goMany (front . (x:)) t'
PSFail err -> PSFail err
PSDone -> PSSuccess (front []) t
-- _ ->
data PersistSettings = PersistSettings
{ psToDBName :: !(Text -> Text)
, psStrictFields :: !Bool
-- ^ Whether fields are by default strict. Default value: @True@.
--
-- Since 1.2
, psIdName :: !Text
-- ^ The name of the id column. Default value: @id@
-- The name of the id column can also be changed on a per-model basis
-- <https://github.com/yesodweb/persistent/wiki/Persistent-entity-syntax>
--
-- Since 2.0
}
defaultPersistSettings, upperCaseSettings, lowerCaseSettings :: PersistSettings
defaultPersistSettings = PersistSettings
{ psToDBName = id
, psStrictFields = True
, psIdName = "id"
}
upperCaseSettings = defaultPersistSettings
lowerCaseSettings = defaultPersistSettings
{ psToDBName =
let go c
| isUpper c = T.pack ['_', toLower c]
| otherwise = T.singleton c
in T.dropWhile (== '_') . T.concatMap go
}
-- | Parses a quasi-quoted syntax into a list of entity definitions.
parse :: PersistSettings -> Text -> [EntityDef]
parse ps = parseLines ps
. removeSpaces
. filter (not . empty)
. map tokenize
. T.lines
-- | A token used by the parser.
data Token = Spaces !Int -- ^ @Spaces n@ are @n@ consecutive spaces.
| Token Text -- ^ @Token tok@ is token @tok@ already unquoted.
deriving (Show, Eq)
-- | Tokenize a string.
tokenize :: Text -> [Token]
tokenize t
| T.null t = []
| "--" `T.isPrefixOf` t = [] -- Comment until the end of the line.
| "#" `T.isPrefixOf` t = [] -- Also comment to the end of the line, needed for a CPP bug (#110)
| T.head t == '"' = quotes (T.tail t) id
| T.head t == '(' = parens 1 (T.tail t) id
| isSpace (T.head t) =
let (spaces, rest) = T.span isSpace t
in Spaces (T.length spaces) : tokenize rest
-- support mid-token quotes and parens
| Just (beforeEquals, afterEquals) <- findMidToken t
, not (T.any isSpace beforeEquals)
, Token next : rest <- tokenize afterEquals =
Token (T.concat [beforeEquals, "=", next]) : rest
| otherwise =
let (token, rest) = T.break isSpace t
in Token token : tokenize rest
where
findMidToken t' =
case T.break (== '=') t' of
(x, T.drop 1 -> y)
| "\"" `T.isPrefixOf` y || "(" `T.isPrefixOf` y -> Just (x, y)
_ -> Nothing
quotes t' front
| T.null t' = error $ T.unpack $ T.concat $
"Unterminated quoted string starting with " : front []
| T.head t' == '"' = Token (T.concat $ front []) : tokenize (T.tail t')
| T.head t' == '\\' && T.length t' > 1 =
quotes (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):))
| otherwise =
let (x, y) = T.break (`elem` ['\\','\"']) t'
in quotes y (front . (x:))
parens count t' front
| T.null t' = error $ T.unpack $ T.concat $
"Unterminated parens string starting with " : front []
| T.head t' == ')' =
if count == (1 :: Int)
then Token (T.concat $ front []) : tokenize (T.tail t')
else parens (count - 1) (T.tail t') (front . (")":))
| T.head t' == '(' =
parens (count + 1) (T.tail t') (front . ("(":))
| T.head t' == '\\' && T.length t' > 1 =
parens count (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):))
| otherwise =
let (x, y) = T.break (`elem` ['\\','(',')']) t'
in parens count y (front . (x:))
-- | A string of tokens is empty when it has only spaces. There
-- can't be two consecutive 'Spaces', so this takes /O(1)/ time.
empty :: [Token] -> Bool
empty [] = True
empty [Spaces _] = True
empty _ = False
-- | A line. We don't care about spaces in the middle of the
-- line. Also, we don't care about the ammount of indentation.
data Line = Line { lineIndent :: Int
, tokens :: [Text]
}
-- | Remove leading spaces and remove spaces in the middle of the
-- tokens.
removeSpaces :: [[Token]] -> [Line]
removeSpaces =
map toLine
where
toLine (Spaces i:rest) = toLine' i rest
toLine xs = toLine' 0 xs
toLine' i = Line i . mapMaybe fromToken
fromToken (Token t) = Just t
fromToken Spaces{} = Nothing
-- | Divide lines into blocks and make entity definitions.
parseLines :: PersistSettings -> [Line] -> [EntityDef]
parseLines ps lines =
fixForeignKeysAll $ toEnts lines
where
toEnts (Line indent (name:entattribs) : rest) =
let (x, y) = span ((> indent) . lineIndent) rest
in mkEntityDef ps name entattribs x : toEnts y
toEnts (Line _ []:rest) = toEnts rest
toEnts [] = []
fixForeignKeysAll :: [UnboundEntityDef] -> [EntityDef]
fixForeignKeysAll unEnts = map fixForeignKeys unEnts
where
ents = map unboundEntityDef unEnts
entLookup = M.fromList $ map (\e -> (entityHaskell e, e)) ents
fixForeignKeys :: UnboundEntityDef -> EntityDef
fixForeignKeys (UnboundEntityDef foreigns ent) =
ent { entityForeigns = map (fixForeignKey ent) foreigns }
-- check the count and the sqltypes match and update the foreignFields with the names of the primary columns
fixForeignKey :: EntityDef -> UnboundForeignDef -> ForeignDef
fixForeignKey ent (UnboundForeignDef foreignFieldTexts fdef) =
case M.lookup (foreignRefTableHaskell fdef) entLookup of
Just pent -> case entityPrimary pent of
Just pdef ->
if length foreignFieldTexts /= length (compositeFields pdef)
then lengthError pdef
else let fds_ffs = zipWith (toForeignFields pent)
foreignFieldTexts
(compositeFields pdef)
in fdef { foreignFields = map snd fds_ffs
, foreignNullable = setNull $ map fst fds_ffs
}
Nothing ->
error $ "no explicit primary key fdef="++show fdef++ " ent="++show ent
Nothing ->
error $ "could not find table " ++ show (foreignRefTableHaskell fdef)
++ " fdef=" ++ show fdef ++ " allnames="
++ show (map (unHaskellName . entityHaskell . unboundEntityDef) unEnts)
++ "\n\nents=" ++ show ents
where
setNull :: [FieldDef] -> Bool
setNull [] = error "setNull: impossible!"
setNull (fd:fds) = let nullSetting = isNull fd in
if all ((nullSetting ==) . isNull) fds then nullSetting
else error $ "foreign key columns must all be nullable or non-nullable"
++ show (map (unHaskellName . fieldHaskell) (fd:fds))
isNull = (NotNullable /=) . nullable . fieldAttrs
toForeignFields pent fieldText pfd =
case chktypes fd haskellField (entityFields pent) pfh of
Just err -> error err
Nothing -> (fd, ((haskellField, fieldDB fd), (pfh, pfdb)))
where
fd = getFd (entityFields ent) haskellField
haskellField = HaskellName fieldText
(pfh, pfdb) = (fieldHaskell pfd, fieldDB pfd)
chktypes :: FieldDef -> HaskellName -> [FieldDef] -> HaskellName -> Maybe String
chktypes ffld _fkey pflds pkey =
if fieldType ffld == fieldType pfld then Nothing
else Just $ "fieldType mismatch: " ++ show (fieldType ffld) ++ ", " ++ show (fieldType pfld)
where
pfld = getFd pflds pkey
entName = entityHaskell ent
getFd [] t = error $ "foreign key constraint for: " ++ show (unHaskellName entName)
++ " unknown column: " ++ show t
getFd (f:fs) t
| fieldHaskell f == t = f
| otherwise = getFd fs t
lengthError pdef = error $ "found " ++ show (length foreignFieldTexts) ++ " fkeys and " ++ show (length (compositeFields pdef)) ++ " pkeys: fdef=" ++ show fdef ++ " pdef=" ++ show pdef
data UnboundEntityDef = UnboundEntityDef
{ _unboundForeignDefs :: [UnboundForeignDef]
, unboundEntityDef :: EntityDef
}
lookupKeyVal :: Text -> [Text] -> Maybe Text
lookupKeyVal key = lookupPrefix $ key `mappend` "="
lookupPrefix :: Text -> [Text] -> Maybe Text
lookupPrefix prefix = msum . map (T.stripPrefix prefix)
-- | Construct an entity definition.
mkEntityDef :: PersistSettings
-> Text -- ^ name
-> [Attr] -- ^ entity attributes
-> [Line] -- ^ indented lines
-> UnboundEntityDef
mkEntityDef ps name entattribs lines =
UnboundEntityDef foreigns $
EntityDef
entName
(DBName $ getDbName ps name' entattribs)
-- idField is the user-specified Id
-- otherwise useAutoIdField
-- but, adjust it if the user specified a Primary
(setComposite primaryComposite $ fromMaybe autoIdField idField)
entattribs
cols
uniqs
[]
derives
extras
isSum
where
entName = HaskellName name'
(isSum, name') =
case T.uncons name of
Just ('+', x) -> (True, x)
_ -> (False, name)
(attribs, extras) = splitExtras lines
attribPrefix = flip lookupKeyVal entattribs
idName | Just _ <- attribPrefix "id" = error "id= is deprecated, ad a field named 'Id' and use sql="
| otherwise = Nothing
(idField, primaryComposite, uniqs, foreigns) = foldl' (\(mid, mp, us, fs) attr ->
let (i, p, u, f) = takeConstraint ps name' cols attr
squish xs m = xs `mappend` maybeToList m
in (just1 mid i, just1 mp p, squish us u, squish fs f)) (Nothing, Nothing, [],[]) attribs
derives = concat $ mapMaybe takeDerives attribs
cols :: [FieldDef]
cols = mapMaybe (takeColsEx ps) attribs
autoIdField = mkAutoIdField ps entName (DBName `fmap` idName) idSqlType
idSqlType = maybe SqlInt64 (const $ SqlOther "Primary Key") primaryComposite
setComposite Nothing fd = fd
setComposite (Just c) fd = fd { fieldReference = CompositeRef c }
just1 :: (Show x) => Maybe x -> Maybe x -> Maybe x
just1 (Just x) (Just y) = error $ "expected only one of: "
`mappend` show x `mappend` " " `mappend` show y
just1 x y = x `mplus` y
mkAutoIdField :: PersistSettings -> HaskellName -> Maybe DBName -> SqlType -> FieldDef
mkAutoIdField ps entName idName idSqlType = FieldDef
{ fieldHaskell = HaskellName "Id"
-- this should be modeled as a Maybe
-- but that sucks for non-ID field
-- TODO: use a sumtype FieldDef | IdFieldDef
, fieldDB = fromMaybe (DBName $ psIdName ps) idName
, fieldType = FTTypeCon Nothing $ keyConName $ unHaskellName entName
, fieldSqlType = idSqlType
-- the primary field is actually a reference to the entity
, fieldReference = ForeignRef entName defaultReferenceTypeCon
, fieldAttrs = []
, fieldStrict = True
}
defaultReferenceTypeCon :: FieldType
defaultReferenceTypeCon = FTTypeCon (Just "Data.Int") "Int64"
keyConName :: Text -> Text
keyConName entName = entName `mappend` "Id"
splitExtras :: [Line] -> ([[Text]], M.Map Text [[Text]])
splitExtras [] = ([], M.empty)
splitExtras (Line indent [name]:rest)
| not (T.null name) && isUpper (T.head name) =
let (children, rest') = span ((> indent) . lineIndent) rest
(x, y) = splitExtras rest'
in (x, M.insert name (map tokens children) y)
splitExtras (Line _ ts:rest) =
let (x, y) = splitExtras rest
in (ts:x, y)
takeColsEx :: PersistSettings -> [Text] -> Maybe FieldDef
takeColsEx = takeCols (\ft perr -> error $ "Invalid field type " ++ show ft ++ " " ++ perr)
takeCols :: (Text -> String -> Maybe FieldDef) -> PersistSettings -> [Text] -> Maybe FieldDef
takeCols _ _ ("deriving":_) = Nothing
takeCols onErr ps (n':typ:rest)
| not (T.null n) && isLower (T.head n) =
case parseFieldType typ of
Left err -> onErr typ err
Right ft -> Just FieldDef
{ fieldHaskell = HaskellName n
, fieldDB = DBName $ getDbName ps n rest
, fieldType = ft
, fieldSqlType = SqlOther $ "SqlType unset for " `mappend` n
, fieldAttrs = rest
, fieldStrict = fromMaybe (psStrictFields ps) mstrict
, fieldReference = NoReference
}
where
(mstrict, n)
| Just x <- T.stripPrefix "!" n' = (Just True, x)
| Just x <- T.stripPrefix "~" n' = (Just False, x)
| otherwise = (Nothing, n')
takeCols _ _ _ = Nothing
getDbName :: PersistSettings -> Text -> [Text] -> Text
getDbName ps n [] = psToDBName ps n
getDbName ps n (a:as) = fromMaybe (getDbName ps n as) $ T.stripPrefix "sql=" a
takeConstraint :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> (Maybe FieldDef, Maybe CompositeDef, Maybe UniqueDef, Maybe UnboundForeignDef)
takeConstraint ps tableName defs (n:rest) | not (T.null n) && isUpper (T.head n) = takeConstraint'
where
takeConstraint'
| n == "Unique" = (Nothing, Nothing, Just $ takeUniq ps tableName defs rest, Nothing)
| n == "Foreign" = (Nothing, Nothing, Nothing, Just $ takeForeign ps tableName defs rest)
| n == "Primary" = (Nothing, Just $ takeComposite defs rest, Nothing, Nothing)
| n == "Id" = (Just $ takeId ps tableName (n:rest), Nothing, Nothing, Nothing)
| otherwise = (Nothing, Nothing, Just $ takeUniq ps "" defs (n:rest), Nothing) -- retain compatibility with original unique constraint
takeConstraint _ _ _ _ = (Nothing, Nothing, Nothing, Nothing)
-- TODO: this is hacky (the double takeCols, the setFieldDef stuff, and setIdName.
-- need to re-work takeCols function
takeId :: PersistSettings -> Text -> [Text] -> FieldDef
takeId ps tableName (n:rest) = fromMaybe (error "takeId: impossible!") $ setFieldDef $
takeCols (\_ _ -> addDefaultIdType) ps (field:rest `mappend` setIdName)
where
field = case T.uncons n of
Nothing -> error "takeId: empty field"
Just (f, ield) -> toLower f `T.cons` ield
addDefaultIdType = takeColsEx ps (field : keyCon : rest `mappend` setIdName)
setFieldDef = fmap (\fd ->
let refFieldType = if fieldType fd == FTTypeCon Nothing keyCon
then defaultReferenceTypeCon
else fieldType fd
in fd { fieldReference = ForeignRef (HaskellName tableName) $ refFieldType
})
keyCon = keyConName tableName
-- this will be ignored if there is already an existing sql=
-- TODO: I think there is a ! ignore syntax that would screw this up
setIdName = ["sql=" `mappend` psIdName ps]
takeId _ tableName _ = error $ "empty Id field for " `mappend` show tableName
takeComposite :: [FieldDef]
-> [Text]
-> CompositeDef
takeComposite fields pkcols
= CompositeDef
(map (getDef fields) pkcols)
attrs
where
(_, attrs) = break ("!" `T.isPrefixOf`) pkcols
getDef [] t = error $ "Unknown column in primary key constraint: " ++ show t
getDef (d:ds) t
| fieldHaskell d == HaskellName t =
if nullable (fieldAttrs d) /= NotNullable
then error $ "primary key column cannot be nullable: " ++ show t
else d
| otherwise = getDef ds t
-- Unique UppercaseConstraintName list of lowercasefields
takeUniq :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> UniqueDef
takeUniq ps tableName defs (n:rest)
| not (T.null n) && isUpper (T.head n)
= UniqueDef
(HaskellName n)
(DBName $ psToDBName ps (tableName `T.append` n))
(map (HaskellName &&& getDBName defs) fields)
attrs
where
(fields,attrs) = break ("!" `T.isPrefixOf`) rest
getDBName [] t = error $ "Unknown column in unique constraint: " ++ show t
getDBName (d:ds) t
| fieldHaskell d == HaskellName t = fieldDB d
| otherwise = getDBName ds t
takeUniq _ tableName _ xs = error $ "invalid unique constraint on table[" ++ show tableName ++ "] expecting an uppercase constraint name xs=" ++ show xs
data UnboundForeignDef = UnboundForeignDef
{ _unboundFields :: [Text] -- ^ fields in other entity
, _unboundForeignDef :: ForeignDef
}
takeForeign :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> UnboundForeignDef
takeForeign ps tableName _defs (refTableName:n:rest)
| not (T.null n) && isLower (T.head n)
= UnboundForeignDef fields $ ForeignDef
(HaskellName refTableName)
(DBName $ psToDBName ps refTableName)
(HaskellName n)
(DBName $ psToDBName ps (tableName `T.append` n))
[]
attrs
False
where
(fields,attrs) = break ("!" `T.isPrefixOf`) rest
takeForeign _ tableName _ xs = error $ "invalid foreign key constraint on table[" ++ show tableName ++ "] expecting a lower case constraint name xs=" ++ show xs
takeDerives :: [Text] -> Maybe [Text]
takeDerives ("deriving":rest) = Just rest
takeDerives _ = Nothing
nullable :: [Text] -> IsNullable
nullable s
| "Maybe" `elem` s = Nullable ByMaybeAttr
| "nullable" `elem` s = Nullable ByNullableAttr
| otherwise = NotNullable
| pseudonom/persistent | persistent/Database/Persist/Quasi.hs | mit | 20,289 | 0 | 20 | 6,243 | 6,359 | 3,283 | 3,076 | 401 | 13 |
module Language.Lambda.HspecUtils where
import Data.Map (empty)
import Test.Hspec
import Language.Lambda
shouldEvalTo :: String -> String -> Expectation
shouldEvalTo s1 = shouldBe (eval s1) . eval
eval :: String -> Either ParseError (LambdaExpr String)
eval = fmap fst . evalString empty
| sgillespie/lambda-calculus | test/Language/Lambda/HspecUtils.hs | mit | 292 | 0 | 8 | 44 | 96 | 51 | 45 | 8 | 1 |
module Position where
data Position = Position { row :: Int, col :: Int }
-- Translating a position
translate :: Position -> Position -> Position
translate (Position row col) (Position drow dcol) =
Position (row + drow) (col + dcol)
| crockeo/maze | src/Position.hs | mit | 237 | 0 | 8 | 45 | 84 | 47 | 37 | 5 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
module Hablog.Data.Slug
( Slug(..)
, slugify
) where
import Data.Char (toLower)
import Data.Data (Data, Typeable)
import Database.Persist (PersistField(..))
import Web.Routes (PathInfo(..))
newtype Slug = Slug { unSlug :: String }
deriving (Eq, Ord, Read, Show, Data, Typeable, PathInfo, PersistField)
slugChars :: [Char]
slugChars = ['a'..'z'] ++ ['0'..'9'] ++ ['-']
slugify :: String -> Slug
slugify = Slug . (mkSlug True)
where
mkSlug _ [] = []
mkSlug replace (x:xs)
| toLower x `elem` slugChars = toLower x:mkSlug True xs
| otherwise = (if replace then ['-'] else []) ++ mkSlug False xs
| garrettpauls/Hablog | src/Hablog/Data/Slug.hs | mit | 734 | 0 | 11 | 176 | 271 | 153 | 118 | 18 | 3 |
import Utils.ListOps (minimumUsing)
import Utils.IntegerOps (divide)
main = print problem71Value
problem71Value :: Int
problem71Value = (`quot` 7) $ (*3) $ minimumUsing (\x -> ((x*3) `divide` 7) - (fromIntegral (x*3 `quot` 7))) $ filter (\x -> x `rem` 7 /= 0) [5..1000000]
| jchitel/ProjectEuler.hs | Problems/Problem0071.hs | mit | 275 | 0 | 15 | 43 | 139 | 81 | 58 | 5 | 1 |
{-# LANGUAGE Rank2Types #-}
{- |
Module : Summoner.Tui.Field
Copyright : (c) 2018-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <[email protected]>
Stability : Stable
Portability : Portable
This modules adds necessary functions for Forms and Form fields
that are not covered in @brick@ library.
-}
module Summoner.Tui.Field
( strField
, checkboxField
, activeCheckboxField
, radioField
, disabledAttr
) where
import Brick (BrickEvent (..), EventM, Location (..), Widget, clickable, showCursor, str, vBox,
withAttr, withDefAttr, (<+>))
import Brick.AttrMap (AttrName)
import Brick.Forms (FormField (..), FormFieldState (..), checkboxCustomField, focusedFormInputAttr,
radioCustomField)
import Lens.Micro (Lens', lens, (^.))
import qualified Graphics.Vty as V
-- | A form field with a given text value which can not be modified or changed
-- via any events. It is always valid.
strField :: forall s e n . String -> s -> FormFieldState s e n
strField t _ = FormFieldState
{ formFieldState = ()
, formFieldLens = fakeLens
, formFields = []
, formFieldRenderHelper = renderString
, formFieldConcat = vBox
, formFieldUpdate = flip const
}
where
-- looool
fakeLens :: Lens' s ()
fakeLens = lens (const ()) (\s () -> s)
renderString :: Widget n -> Widget n
renderString w = str t <+> w
{- | Custom checkbox with unique fancy style.
__Example:__
@
⟦✔⟧ Library
⟦ ⟧ Executable
@
-}
checkboxField
:: (Ord n, Show n)
=> Lens' s Bool -- ^ The state lens for this value.
-> n -- ^ The resource name for the input field.
-> Text -- ^ The label for the check box, to appear at its right.
-> s -- ^ The initial form state.
-> FormFieldState s e n
checkboxField = checkboxCustomField '⟦' '✔' '⟧'
{- | Custom radio button with unique fancy style.
__Example:__
@
❮◆❯ Enable ❮ ❯ Disable
@
-}
radioField
:: (Ord n, Show n, Eq a)
=> Lens' s a -- ^ The state lens for this value.
-> [(a, n, Text)] -- ^ The available choices, in order.
-> s -- ^ The initial form state.
-> FormFieldState s e n
radioField = radioCustomField '❮' '◆' '❯'
-- | Checkbox that can be disabled.
activeCheckboxField
:: forall n s e . Ord n
=> Lens' s Bool
-> (s -> n -> Bool) -- ^ Function should return 'False' if checkbox should be disabled.
-> n
-> String -- ^ The label for the check box, to appear at its right.
-> s -- ^ The initial form state.
-> FormFieldState s e n
activeCheckboxField stLens isActive name label initialState = FormFieldState
{ formFieldState = initVal
, formFields = [checkboxFormField]
, formFieldLens = stLens
, formFieldRenderHelper = id
, formFieldConcat = vBox
, formFieldUpdate = flip const
}
where
initVal, isEnabled :: Bool
initVal = initialState ^. stLens
isEnabled = isActive initialState name
handleEvent :: BrickEvent n e -> Bool -> EventM n Bool
handleEvent (MouseDown n _ _ _)
| isEnabled && n == name = pure . not
handleEvent (VtyEvent (V.EvKey (V.KChar ' ') [])) = pure . not
handleEvent _ = pure
checkboxFormField :: FormField Bool Bool e n
checkboxFormField = FormField
{ formFieldName = name
, formFieldValidate = Just
, formFieldExternallyValid = True
, formFieldRender = renderCheckbox isEnabled label name
, formFieldHandleEvent = handleEvent
}
-- | Renders checkbox depending on its state.
renderCheckbox :: Bool -> String -> n -> Bool -> Bool -> Widget n
renderCheckbox isEnabled label n foc val =
let addAttr = if foc then withDefAttr focusedFormInputAttr else id
csr = if foc then showCursor n (Location (1,0)) else id
in if isEnabled
then clickable n $ addAttr $ csr $ str $
"⟦" <> (if val then "✔" else " ") <> "⟧" <> " " <> label
else withAttr disabledAttr $ str $ "⟦ ⟧ " <> label
-- | Attribute for disabled checkboxes.
disabledAttr :: AttrName
disabledAttr = "disabled"
| vrom911/hs-init | summoner-tui/src/Summoner/Tui/Field.hs | mit | 4,309 | 0 | 16 | 1,202 | 938 | 529 | 409 | 81 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationcloudwatchloggingoption-cloudwatchloggingoption.html
module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption.
-- See
-- 'kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption'
-- for a more convenient constructor.
data KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption =
KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption
{ _kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOptionLogStreamARN :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption where
toJSON KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption{..} =
object $
catMaybes
[ (Just . ("LogStreamARN",) . toJSON) _kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOptionLogStreamARN
]
-- | Constructor for
-- 'KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption'
-- containing required fields as arguments.
kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption
:: Val Text -- ^ 'kavacwlocwloLogStreamARN'
-> KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption
kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption logStreamARNarg =
KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption
{ _kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOptionLogStreamARN = logStreamARNarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationcloudwatchloggingoption-cloudwatchloggingoption.html#cfn-kinesisanalyticsv2-applicationcloudwatchloggingoption-cloudwatchloggingoption-logstreamarn
kavacwlocwloLogStreamARN :: Lens' KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption (Val Text)
kavacwlocwloLogStreamARN = lens _kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOptionLogStreamARN (\s a -> s { _kinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOptionLogStreamARN = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationCloudWatchLoggingOptionCloudWatchLoggingOption.hs | mit | 2,588 | 0 | 13 | 168 | 178 | 104 | 74 | 23 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-# LANGUAGE CPP, OverloadedStrings, TypeSynonymInstances, MultiParamTypeClasses #-}
-- Copyright (C) 2010 Petr Rockai
--
-- Permission is hereby granted, free of charge, to any person
-- obtaining a copy of this software and associated documentation
-- files (the "Software"), to deal in the Software without
-- restriction, including without limitation the rights to use, copy,
-- modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be
-- included in all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-- BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-- ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-- CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
-- |
-- Module : Darcs.Patch.Annotate
-- Copyright : 2010 Petr Rockai
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
module Darcs.Patch.Annotate
(
annotate
, annotateDirectory
, format
, machineFormat
, AnnotateResult
) where
import Prelude hiding ( pi )
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.Map as M
import qualified Data.Vector as V
import Data.Function ( on )
import Data.List( nub, groupBy )
import Data.Maybe( isJust, mapMaybe )
import Control.Monad.State ( modify, when, gets, State, execState )
import Control.Applicative( (<$>) )
import Darcs.Patch.ApplyMonad( ApplyMonad(..) )
import Darcs.Patch.Apply ( Apply, apply, ApplyState )
import Darcs.Patch.Info ( PatchInfo(..), showPatchInfoUI, piAuthor, makePatchname )
import Darcs.Patch.PatchInfoAnd( info, PatchInfoAnd )
import Darcs.Patch.Witnesses.Ordered
import Storage.Hashed.Tree( Tree )
import Darcs.Util.Path ( FileName, movedirfilename, fn2ps, ps2fn )
import Darcs.Util.Printer( renderString, RenderMode(..) )
import Darcs.Util.ByteString ( linesPS, unlinesPS )
import Darcs.Util.Diff ( getChanges )
import qualified Darcs.Util.Diff as D ( DiffAlgorithm )
#include "impossible.h"
data FileOrDirectory = File
| Directory
deriving (Show, Eq)
data Annotated = Annotated
{ annotated :: V.Vector (Maybe PatchInfo, B.ByteString)
, current :: [(Int, B.ByteString)]
, path :: Maybe FileName
, what :: FileOrDirectory
, currentInfo :: PatchInfo
, diffAlgorithm :: D.DiffAlgorithm
} deriving Show
type AnnotateResult = V.Vector (Maybe PatchInfo, B.ByteString)
type AnnotatedM = State Annotated
-- XXX: No explicit method nor default method for 'editFile', 'editDirectory'
instance ApplyMonad AnnotatedM Tree where
type ApplyMonadBase AnnotatedM = AnnotatedM
nestedApply _ _ = undefinedFun "nestedApply"
liftApply _ _ = undefinedFun "liftApply"
getApplyState = undefinedFun "getApplyState"
putApplyState _ = undefinedFun "putApplyState"
mReadFilePS = undefinedFun "mReadFilePS"
mDoesFileExist _ = return True
mDoesDirectoryExist _ = return True
mCreateDirectory _ = return ()
mCreateFile _ = return ()
mRemoveFile f = do
p <- gets path
when (p == Just f) $ modify (\x -> x { path = Nothing })
updateDirectory f
mRemoveDirectory = mRemoveFile
mRename a b = do
p <- gets path
w <- gets what
when (isJust p) $
modify $ \st -> st { path = Just $ movedirfilename a b (fromJust p) }
when (w == Directory) $ do
let fix (i, x) = (i, fn2ps $ movedirfilename a b (ps2fn x))
modify $ \st -> st { current = map fix $ current st }
mModifyFilePS f job = do
p <- gets path
when (p == Just f) $ updateFile (fmap linesPS . job . unlinesPS)
mModifyFilePSs f job = do
p <- gets path
when (p == Just f) $ updateFile job
undefinedFun :: Monad m
=> String
-> m a
undefinedFun name = fail $ name ++ " undefined for Annotated"
updateFile :: ([B.ByteString]
-> AnnotatedM [B.ByteString])
-> AnnotatedM ()
updateFile job = (==File) <$> gets what >>= flip when go
where
go = do
before <- map snd `fmap` gets current
after <- job before
da <- gets diffAlgorithm
reannotate $ getChanges da before after
reannotate [] = return ()
reannotate ((off, remove, add):rest) = do
i <- gets currentInfo
c <- gets current
a <- gets annotated
modify $ \s -> s { current = take off c ++ [ (-1, x) | x <- add ] ++
drop (off + length remove) c
, annotated = merge i a $ take (length remove) $ drop off c
}
reannotate rest
merge i a l = a V.// [ (line, (Just i, B.empty))
| (line, _) <- l, line >= 0 && line < V.length a]
updateDirectory :: FileName -> AnnotatedM ()
updateDirectory p = (==Directory) <$> gets what >>= flip when go
where
go = do let line = fn2ps p
files <- gets current
case filter ((==line) . snd) files of
[match@(ident, _)] -> reannotate ident match line
_ -> return ()
reannotate ident match line =
modify $ \x -> x { annotated = annotated x V.// [ (ident, update line $ currentInfo x) ]
, current = filter (/= match) $ current x }
update line inf = (Just inf, BC.concat [ " -- created as: ", line ])
complete :: Annotated -> Bool
complete x = V.all (isJust . fst) $ annotated x
annotate' :: (Apply p, ApplyState p ~ Tree)
=> FL (PatchInfoAnd p) wX wY
-> Annotated
-> Annotated
annotate' NilFL ann = ann
annotate' (p :>: ps) ann
| complete ann = ann
| otherwise = annotate' ps $ execState (apply p) (ann { currentInfo = info p })
annotate :: (Apply p, ApplyState p ~ Tree)
=> D.DiffAlgorithm
-> FL (PatchInfoAnd p) wX wY
-> FileName
-> B.ByteString
-> AnnotateResult
annotate da patches inipath inicontent = annotated $ annotate' patches initial
where
initial = Annotated { path = Just inipath
, currentInfo = error "There is no currentInfo."
, current = zip [0..] (linesPS inicontent)
, what = File
, annotated = V.replicate (length $ breakLines inicontent)
(Nothing, B.empty)
, diffAlgorithm = da
}
annotateDirectory :: (Apply p, ApplyState p ~ Tree)
=> D.DiffAlgorithm
-> FL (PatchInfoAnd p) wX wY
-> FileName
-> [FileName]
-> AnnotateResult
annotateDirectory da patches inipath inicontent = annotated $ annotate' patches initial
where
initial = Annotated { path = Just inipath
, currentInfo = error "There is no currentInfo."
, current = zip [0..] (map fn2ps inicontent)
, what = Directory
, annotated = V.replicate (length inicontent) (Nothing, B.empty)
, diffAlgorithm = da
}
machineFormat :: B.ByteString -> AnnotateResult -> String
machineFormat d a = unlines [ case i of
Just inf -> show $ makePatchname inf
Nothing -> -- make unknowns uniform, for easier parsing
take 40 ( repeat '0' ) -- fake hash of the right size
++ " | " ++ BC.unpack line ++ " " ++ BC.unpack add
| ((i, add), line) <- zip (V.toList a) (breakLines d) ]
format :: B.ByteString
-> AnnotateResult
-> String
format d a = pi_list ++ "\n" ++ numbered
where
numberedLines = zip [(1 :: Int)..] . lines $ file
prependNum (lnum, annLine) =
let maxDigits = length . show . length $ numberedLines
lnumStr = show lnum
paddingNum = maxDigits - length lnumStr
in replicate paddingNum ' ' ++ lnumStr ++ ": " ++ annLine
numbered = unlines . map prependNum $ numberedLines
pi_list = unlines [ show n ++ ": " ++ renderString Encode (showPatchInfoUI i)
| (n :: Int, i) <- zip [1..] pis ]
file = concat [ annotation (fst $ head chunk) ++ " | " ++ line (head chunk) ++
"\n" ++ unlines [ indent 25 (" | " ++ line l) | l <- tail chunk ]
| chunk <- file_ann ]
pis = nub $ mapMaybe fst $ V.toList a
pi_map = M.fromList (zip pis [1 :: Int ..])
file_ann = groupBy ((==) `on` fst) $ zip (V.toList a) (breakLines d)
line ((_, add), l) = BC.unpack $ BC.concat [l, " ", add]
annotation (Just i, _) | Just n <- M.lookup i pi_map =
pad 20 (piMail i) ++ " " ++ pad 4 ('#' : show n)
annotation _ = pad 25 "unknown"
pad n str = replicate (n - length str) ' ' ++ take n str
indent n str = replicate n ' ' ++ str
piMail pi
| '<' `elem` piAuthor pi = takeWhile (/= '>') . drop 1 . dropWhile (/= '<') $ piAuthor pi
| otherwise = piAuthor pi
breakLines :: BC.ByteString
-> [BC.ByteString]
breakLines s = case BC.split '\n' s of
[] -> []
split | BC.null (last split) -> init split
| otherwise -> split
| DavidAlphaFox/darcs | src/Darcs/Patch/Annotate.hs | gpl-2.0 | 9,955 | 0 | 18 | 3,051 | 2,909 | 1,532 | 1,377 | -1 | -1 |
{-| Implementation of the Ganeti Confd client functionality.
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Confd.Client
( getConfdClient
, query
) where
import Control.Concurrent
import Control.Monad
import Data.List
import Data.Maybe
import qualified Network.Socket as S
import System.Posix.Time
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Confd.Types
import Ganeti.Confd.Utils
import qualified Ganeti.Constants as C
import Ganeti.Hash
import Ganeti.Ssconf
-- | Builds a properly initialized ConfdClient.
-- The parameters (an IP address and the port number for the Confd client
-- to connect to) are mainly meant for testing purposes. If they are not
-- provided, the list of master candidates and the default port number will
-- be used.
getConfdClient :: Maybe String -> Maybe Int -> IO ConfdClient
getConfdClient addr portNum = S.withSocketsDo $ do
hmac <- getClusterHmac
candList <- getMasterCandidatesIps Nothing
peerList <-
case candList of
(Ok p) -> return p
(Bad msg) -> fail msg
let addrList = maybe peerList (:[]) addr
port = fromMaybe C.defaultConfdPort portNum
return . ConfdClient hmac addrList $ fromIntegral port
-- | Sends a query to all the Confd servers the client is connected to.
-- Returns the most up-to-date result according to the serial number,
-- chosen between those received before the timeout.
query :: ConfdClient -> ConfdRequestType -> ConfdQuery -> IO (Maybe ConfdReply)
query client crType cQuery = do
semaphore <- newMVar ()
answer <- newMVar Nothing
let dest = [(host, serverPort client) | host <- peers client]
hmac = hmacKey client
jobs = map (queryOneServer semaphore answer crType cQuery hmac) dest
watchdog reqAnswers = do
threadDelay $ 1000000 * C.confdClientExpireTimeout
_ <- swapMVar reqAnswers 0
putMVar semaphore ()
waitForResult reqAnswers = do
_ <- takeMVar semaphore
l <- takeMVar reqAnswers
unless (l == 0) $ do
putMVar reqAnswers $ l - 1
waitForResult reqAnswers
reqAnswers <- newMVar . min C.confdDefaultReqCoverage $ length dest
workers <- mapM forkIO jobs
watcher <- forkIO $ watchdog reqAnswers
waitForResult reqAnswers
mapM_ killThread $ watcher:workers
takeMVar answer
-- | Updates the reply to the query. As per the Confd design document,
-- only the reply with the highest serial number is kept.
updateConfdReply :: ConfdReply -> Maybe ConfdReply -> Maybe ConfdReply
updateConfdReply newValue Nothing = Just newValue
updateConfdReply newValue (Just currentValue) = Just $
if confdReplyStatus newValue == ReplyStatusOk
&& (confdReplyStatus currentValue /= ReplyStatusOk
|| confdReplySerial newValue > confdReplySerial currentValue)
then newValue
else currentValue
-- | Send a query to a single server, waits for the result and stores it
-- in a shared variable. Then, sends a signal on another shared variable
-- acting as a semaphore.
-- This function is meant to be used as one of multiple threads querying
-- multiple servers in parallel.
queryOneServer
:: MVar () -- ^ The semaphore that will be signalled
-> MVar (Maybe ConfdReply) -- ^ The shared variable for the result
-> ConfdRequestType -- ^ The type of the query to be sent
-> ConfdQuery -- ^ The content of the query
-> HashKey -- ^ The hmac key to sign the message
-> (String, S.PortNumber) -- ^ The address and port of the server
-> IO ()
queryOneServer semaphore answer crType cQuery hmac (host, port) = do
request <- newConfdRequest crType cQuery
timestamp <- fmap show epochTime
let signedMsg =
signMessage hmac timestamp (J.encodeStrict request)
completeMsg = C.confdMagicFourcc ++ J.encodeStrict signedMsg
s <- S.socket S.AF_INET S.Datagram S.defaultProtocol
hostAddr <- S.inet_addr host
_ <- S.sendTo s completeMsg $ S.SockAddrInet port hostAddr
replyMsg <- S.recv s C.maxUdpDataSize
parsedReply <-
if C.confdMagicFourcc `isPrefixOf` replyMsg
then return . parseReply hmac (drop 4 replyMsg) $ confdRqRsalt request
else fail "Invalid magic code!"
reply <-
case parsedReply of
Ok (_, r) -> return r
Bad msg -> fail msg
modifyMVar_ answer $! return . updateConfdReply reply
putMVar semaphore ()
| damoxc/ganeti | src/Ganeti/Confd/Client.hs | gpl-2.0 | 5,059 | 0 | 17 | 1,058 | 1,003 | 495 | 508 | 86 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
module Lib.Directory
( getMFileStatus
, catchDoesNotExist
, removeFileOrDirectory
, removeFileOrDirectoryOrNothing
, createDirectories
, getDirectoryContents
, makeAbsolutePath
) where
import Prelude.Compat hiding (FilePath)
import Control.Monad
import Lib.Exception (bracket)
import Lib.FilePath (FilePath, (</>))
import System.IO.Error
import qualified Control.Exception as E
import qualified Data.ByteString.Char8 as BS8
import qualified Lib.FilePath as FilePath
import qualified System.Directory as Dir
import qualified System.Posix.ByteString as Posix
catchDoesNotExist :: IO a -> IO a -> IO a
catchDoesNotExist act handler =
act `E.catch` \e ->
if isDoesNotExistErrorType (ioeGetErrorType e)
then handler
else E.throwIO e
getMFileStatus :: FilePath -> IO (Maybe Posix.FileStatus)
getMFileStatus path = do
doesExist <- FilePath.exists path
if doesExist
then (Just <$> Posix.getFileStatus path) `catchDoesNotExist` return Nothing
else return Nothing
createDirectories :: FilePath -> IO ()
createDirectories path
| BS8.null path = return ()
| otherwise = do
doesExist <- FilePath.exists path
unless doesExist $ do
createDirectories $ FilePath.takeDirectory path
Posix.createDirectory path 0o777
removeFileByStat :: IO () -> FilePath -> IO ()
removeFileByStat notExist path = do
mFileStat <- getMFileStatus path
case mFileStat of
Nothing -> notExist
Just fileStat
| Posix.isRegularFile fileStat -> Posix.removeLink path
| Posix.isSymbolicLink fileStat -> Posix.removeLink path
| Posix.isDirectory fileStat -> Dir.removeDirectoryRecursive $ BS8.unpack path
| otherwise -> error $ "removeFileOrDirectoryOrNothing: unsupported filestat " ++ show path
removeFileOrDirectoryOrNothing :: FilePath -> IO ()
removeFileOrDirectoryOrNothing = removeFileByStat $ return ()
removeFileOrDirectory :: FilePath -> IO ()
removeFileOrDirectory path =
removeFileByStat
-- Try to remove the file when it doesn't exist in order to generate
-- the meaningful IO exception:
(Posix.removeLink path) path
getDirectoryContents :: FilePath -> IO [FilePath]
getDirectoryContents path =
bracket (Posix.openDirStream path) Posix.closeDirStream go
where
go dirStream = do
fn <- Posix.readDirStream dirStream
if BS8.null fn
then return []
else (fn :) <$> go dirStream
makeAbsolutePath :: FilePath -> IO FilePath
makeAbsolutePath path = (</> path) <$> Posix.getWorkingDirectory
| da-x/buildsome | src/Lib/Directory.hs | gpl-2.0 | 2,538 | 0 | 14 | 455 | 686 | 351 | 335 | 65 | 2 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module TinyMicro.Board where
import MOS6502.Utils
import TinyMicro.Video
import Language.KansasLava
import Data.Sized.Unsigned
import Data.Bits
import qualified Data.ByteString as BS
import Language.KansasLava.Signal
import Control.Applicative
-- Memory layout:
--
-- 0x0000 - 0x3FFF: 16K x 8 RAM
-- 0x0200 - 0x05FF: 1K x 4 VRAM (mirrored in RAM)
-- 0xF000 - 0xFFFF: 4K x 8 ROM
type Byte = U8
type Addr = U16
type RAMAddr = U14
type ROMAddr = U13
programToROM :: Addr -> BS.ByteString -> (ROMAddr -> Byte)
programToROM startingAddr bs addr
| offset < 0 = 0
| offset >= BS.length bs = 0
| otherwise = fromIntegral $ BS.index bs offset
where
offset = fromIntegral $ addr - fromIntegral startingAddr
data CPUSocketIn clk = CPUSocketIn
{ csMemR :: Signal clk Byte
}
data CPUSocketOut clk = CPUSocketOut
{ csMemA :: Signal clk Addr
, csMemW :: Signal clk (Enabled Byte)
}
boardCircuit :: forall clk. (Clock clk)
=> (CPUSocketIn clk -> CPUSocketOut clk)
-> (ROMAddr -> Byte)
-> Signal clk (Pipe VAddr Nybble)
boardCircuit cpu romContents = vpipe
where
CPUSocketOut{..} = cpu CPUSocketIn{..}
mpipe :: Signal clk (Pipe RAMAddr Byte)
mpipe = packEnabled (isEnabled csMemW .&. isRAM) $
pack (unsigned csMemA, enabledVal csMemW)
ram = writeMemory mpipe
ramR = syncRead ram (unsigned csMemA)
vpipe :: Signal clk (Pipe VAddr U4)
vpipe = packEnabled (isEnabled csMemW .&&. isVideo) $
pack (unsigned (csMemA - 0x0200), unsigned $ enabledVal csMemW)
romR = rom (unsigned csMemA) (Just . romContents)
isVideo = 0x0200 .<=. csMemA .&&. csMemA .<. 0x0600
isRAM = csMemA .<. 0x4000
isROM = 0xF000 .<=. csMemA
csMemR = forceDefined 0 $
memoryMapping [ (isRAM, ramR)
, (isROM, romR)
]
forceDefined :: (Clock clk, Rep a) => a -> Signal clk a -> Signal clk a
forceDefined def = shallowMapS (fmap (optX . (<|> Just def) . unX))
| gergoerdi/tinymicro-mos6502-kansas-lava | lava/TinyMicro/Board.hs | gpl-2.0 | 2,102 | 0 | 12 | 534 | 642 | 343 | 299 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Lambia.Index (nil, indexing, append, ixDecl, ixExpr, Indexed) where
import Prelude hiding (lookup, foldr)
import Control.Monad.State.Strict
import Control.Monad.Trans.Except
import Control.Applicative hiding (empty)
import Data.Char
import Data.ByteString.Char8 hiding (append,empty,reverse,foldr,last,elemIndex,head,length)
import qualified Data.ByteString.Char8 as B
import Data.List (elemIndex)
import Data.Map.Strict hiding (split)
import Data.Traversable
import Data.Maybe (isJust, fromMaybe)
import Lambia.Types
import Lambia.Prim
type Local s a = ExceptT ByteString (State (Status s)) a
type Indexed s = Either ByteString (Maybe s,(Save s,Save s))
none :: Primitive s
none = const Nothing
nil :: Save s
nil = Save (empty,none)
ini :: Store s => Status s
ini = Status prim prim
pass :: Status s -> (Save s, Save s)
pass (Status a b) = (a,b)
indexing :: Store s => Source -> Indexed s
indexing (Source decls e) = let
(e',s) = flip runState ini $ runExceptT $ do
mapM_ ixDecl decls
traverse ixExpr e
in case e' of
Left l -> Left l
Right d -> Right (d,pass s)
append :: ByteString -> Maybe ByteString -> s -> Save s -> Save s
append v sc e (Save (s,p)) = Save $ (,p) $ let
u = lookup v s
in case u of
Just w -> insert v (fst w,Just (e,sc)) s
Nothing -> insert v (nil,Just (e,sc)) s
match :: [ByteString] -> Save s -> Maybe (Entity s)
match [] e = Nothing
match [x] (Save (e,u)) = lookup x e <|> u x
match (x:xs) (Save (e,u)) = case lookup x e <|> u x of
Just (y,_) -> match xs y
Nothing -> Nothing
merge :: [ByteString] -> Save s -> Save s -> Local s (Save s)
merge [] (Save (l,lp)) (Save (r,rp)) = do
let
meld :: ByteString ->
Entity s ->
Entity s ->
Maybe (Either ([ByteString] -> [ByteString]) (Entity s))
meld key (Save (a,ap),v) (Save (b,bp),w) = Just $ let
c = mu a b
c' = mapMaybe right c
x = v <|> w
p' = mixP ap bp
in case j c of
Just e -> Left e
Nothing -> case isJust v && isJust w of
False -> Right (Save (c',p'),x)
True -> case (v,w) of
(Just (_,vs), Just (_,ws))
| vs == ws -> Right (Save (c',p'),x)
| otherwise -> let
i = fromMaybe "[Outer]"
in Left (B.concat ["{",i vs,"|",i ws,"}.",key]:)
mu = mergeWithKey meld (fmap Right) (fmap Right)
u = mu l r
j = foldr f Nothing where
f (Left x) (Just xs) = Just $ x.xs
f (Left x) Nothing = Just x
f (Right _) (Just xs) = Just xs
f (Right m) Nothing = Nothing
right :: Either ([ByteString] -> [ByteString]) (Entity s) -> Maybe (Entity s)
right (Left _) = Nothing
right (Right x) = Just x
u' = mapMaybe right u
mixE (Save (s,sp),v) (Save (t,tp),w) = (Save (mixM s t, mixP sp tp), v <|> w)
mixM = unionWith mixE
mixP p q s = case (p s, q s) of
(Nothing, Nothing) -> Nothing
(Just e, Nothing) -> Just e
(Nothing, Just e) -> Just e
(Just e, Just e') -> Just $ mixE e e'
p = mixP lp rp
case j u of
Just e -> throwE $ B.append "Duplicate variable : " $ B.intercalate ", " $ e []
Nothing -> return $ Save (u',p)
merge (x:xs) l@(Save (_,lp)) (Save (r,rp)) = Save <$> case lookup x r of
Just (e,v) -> do
u <- merge xs l e
return (insert x (u,v) r, rp)
Nothing -> do
u <- merge xs l nil
return (insert x (u,Nothing) r, rp)
ixDecl :: Store s => Declare -> Local s ()
ixDecl (Decl str scope e) = do
Status g l <- get
m <- ixExpr e
let
g' = if isLower $ B.head str
then g
else append str scope m g
l' = append str scope m l
put $ Status g' l'
ixDecl (Scope False str ds) = do
Status g l <- get
put $ Status nil l
mapM_ ixDecl ds
Status g' _ <- get
l' <- merge [str] g' l
g'' <- merge [str] g' g
put $ Status g'' l'
ixDecl (Scope True str ds) = do
Status g l <- get
put $ Status nil l
mapM_ ixDecl ds
Status g' l' <- get
l'' <- merge [str] g' l'
g'' <- merge [str] g' g
g''' <- merge [] g' g''
put $ Status g''' l''
ixDecl (Open u) = do
Status g l <- get
let
us = split '.' u
r = match us l <|> match us g
case r of
Just (e,v) -> do
let n = last us
l' <- merge [] e l
g' <- merge [] e g
let
(l'',g'') = case v of
Just (v',sc)
| length us > 1 -> (append n sc v' l', append n sc v' g')
| otherwise -> (l',g')
Nothing -> (l',g')
put $ Status g'' l''
Nothing -> throwE $ "Not a scope name : "`B.append`u
ixExpr :: Store s => Expr -> Local s s
ixExpr e = snd . simple 100 . fromSyn <$> iE [] e
iE :: Store s => [ByteString] -> Expr -> Local s (Syn s)
iE us (Expr decls t) = do
s <- get
forM_ decls $ \(Decl str sc e) -> do
Status g l <- get
m <- iE us e
let
g' = append str Nothing (fromSyn m) g
l' = append str Nothing (fromSyn m) l
put $ Status g' l'
i <- iT us t
put s
return i
iT :: Store s => [ByteString] -> Term -> Local s (Syn s)
iT us (Abst args e) = d args <$> iE (reverse args ++ us) e where
d (x:xs) = Lm . d xs
d [] = id
iT us (Apply a b) = do
x <- iT us a
y <- iT us b
return $ Ap x y
iT us (Wrap e) = iE us e
iT us (Var v) = case elemIndex v us of
Just u -> return $ Ix u
Nothing -> do
Status _ l <- get
let
us = split '.' v
err = throwE $ "Not in scope : "`B.append`v
-- search :: [ByteString] -> Save s -> Local s s
search [] s = err
search [x] (Save (s,u)) = case lookup x s <|> u x of
Just (_,Just (e,_)) -> return e
_ -> err
search (x:xs) (Save (s,u)) = case lookup x s <|> u x of
Just (s',_) -> search xs s'
_ -> err
Og <$> search us l
| phi16/Lambia | src/Lambia/Index.hs | gpl-3.0 | 5,847 | 152 | 18 | 1,788 | 2,913 | 1,543 | 1,370 | 181 | 12 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Main
( main
) where
import Control.Applicative ((<*>), Applicative)
import Control.Monad (Monad, return)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT, ask, runReaderT)
import Data.Aeson ((.=), ToJSON, toJSON)
import Data.Char (toLower, toUpper)
import Data.Default.Class (def)
import Data.Either (Either(..))
import Data.Function (($))
import Data.Functor ((<$>), Functor, fmap)
import Data.Int (Int)
import Data.List ((++))
import Data.Maybe (Maybe(..), fromJust, listToMaybe)
import Data.String (String)
import Data.Text.Lazy (Text, fromStrict)
import Data.Time.Clock (UTCTime)
import Network.HTTP.Types.Status (status404)
import Network.Wai.Middleware.Static ((>->))
import Prelude (error)
import System.IO (IO, putStrLn)
import Text.Mustache ((~>), Template, ToMustache, automaticCompile, substitute)
import Text.Show (show)
import Web.Scotty.Trans (ActionT, ScottyT, scottyOptsT)
import qualified Data.Aeson as Aeson
import qualified Database.SQLite.Simple as Simple
import qualified Network.Wai.Middleware.Static as Static
import qualified Text.Mustache as Mustache
import qualified Web.Scotty.Trans as Scotty
-- =============================================================================
-- Config
-- =============================================================================
data Config = Config { _configConnection :: Simple.Connection }
newtype ConfigM a = ConfigM
{ runConfigM :: ReaderT Config IO a }
deriving (Applicative, Functor, Monad, MonadIO, MonadReader Config)
-- =============================================================================
-- Database
-- =============================================================================
databaseName :: String
databaseName = "db/portfolio.db"
data PostField = PostField
Int -- ^ id
Text -- ^ title
Text -- ^ description
UTCTime -- ^ created_at
UTCTime -- ^ updated_at
Text -- ^ slug
instance Simple.FromRow PostField where
fromRow = PostField
<$> Simple.field
<*> Simple.field
<*> Simple.field
<*> Simple.field
<*> Simple.field
<*> Simple.field
instance ToJSON PostField where
toJSON (PostField a b c d e f) = Aeson.object
[ "id" .= a
, "title" .= b
, "description" .= c
, "created_at" .= d
, "updated_at" .= e
, "slug" .= f
]
initialize :: (MonadReader Config m, MonadIO m) => m ()
initialize = do
config <- ask
let conn = _configConnection config
liftIO $ Simple.execute_ conn " \
\ CREATE TABLE IF NOT EXISTS Post \
\ ( id INTEGER PRIMARY KEY \
\ , title TEXT \
\ , description TEXT \
\ , created_at TEXT \
\ , updated_at TEXT \
\ , slug TEXT UNIQUE \
\ ); "
-- =============================================================================
-- Verbs
-- =============================================================================
-- | Specify types here for error message type resolution.
get :: (MonadIO m)
=> Scotty.RoutePattern
-> Scotty.ActionT Text m ()
-> Scotty.ScottyT Text m ()
get = Scotty.get
-- =============================================================================
-- Actions
-- =============================================================================
getPost :: (MonadReader Config m, MonadIO m) => String -> m (Maybe PostField)
getPost slug = do
config <- ask
let conn = _configConnection config
results <- liftIO $ query conn slug
return $ listToMaybe results
where
query :: Simple.Connection -> String -> IO [PostField]
query c slug =
Simple.query c "SELECT * FROM POST WHERE slug = ?" (Simple.Only slug)
getPostList :: (MonadReader Config m, MonadIO m) => m [PostField]
getPostList = do
config <- ask
let conn = _configConnection config
liftIO $ query conn
where
query :: Simple.Connection -> IO [PostField]
query c = Simple.query_ c "SELECT * FROM Post ORDER BY created_at DESC"
-- =============================================================================
-- Server
-- =============================================================================
newtype Script = Script { runScript :: String }
instance ToMustache Script where
toMustache (Script value) = Mustache.object ["script" ~> value]
-- | Compile various mustache file.
--
-- Should avoid using this in any dynamic sense - compilation is slow.
loadHTML :: String -> IO (Maybe Template)
loadHTML filename = do
compiled <- automaticCompile [".", "./dist"] filename
return $ case compiled of
Left err -> Nothing
Right template -> Just template
main :: IO ()
main = do
-- Because this would mean our site wouldn't load, try loading our index file
-- before we bother starting up the server at all. Force unwrap to raise an
-- error if it cannot be found.
indexTemplate <- loadHTML "index.html"
let index = fromJust indexTemplate
-- Initialize our database before running the Scotty app to avoid this
-- being hit everytime we process an action.
conn <- Simple.open databaseName
let config = Config { _configConnection = conn }
runReaderT initialize config
-- Allow passing in the config throughout our various Scotty actions. Example
-- pulled from:
-- https://github.com/scotty-web/scotty/blob/master/examples/reader.hs
scottyOptsT def (\m -> runReaderT (runConfigM m) config) $ do
Scotty.middleware $ Static.staticPolicyWithOptions
Static.defaultOptions $ Static.addBase "dist"
-- Retrieve HTML of our index page.
get "" $ do
Scotty.html $ fromStrict $ substitute index (Script "index")
-- Retrieve HTML of a specific post.
get "/post/:slug" $ do
slug <- Scotty.param "slug"
Scotty.html $ fromStrict $ substitute index (Script slug)
-- Retrieve a list of all posts currently on our blog. This is used within
-- our homepage.
get "/api/posts" $ do
posts <- getPostList
Scotty.json posts
-- Retrieve details of a specific post.
get "/api/post/:slug" $ do
slug <- Scotty.param "slug"
post <- getPost slug
case post of
Just r -> Scotty.json r
Nothing -> Scotty.status status404
| jrpotter/portfolio | src/Main.hs | gpl-3.0 | 6,417 | 0 | 18 | 1,256 | 1,416 | 777 | 639 | 122 | 2 |
{-# LANGUAGE NoImplicitPrelude, TemplateHaskell, DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
-- TODO: Move to more general place?
module Lamdu.Compiler.Flatten
( Composite(..), tags, rest
, case_, Case
, recExtend, Record
) where
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Data.Map (Map)
import qualified Lamdu.Calc.Type as T
import Lamdu.Calc.Val.Annotated (Val(..))
import qualified Lamdu.Calc.Val as V
import Prelude.Compat
data Composite p a = Composite
{ _tags :: Map T.Tag a
, _rest :: Maybe a
} deriving (Functor, Foldable, Traversable)
Lens.makeLenses ''Composite
type Case = Composite T.SumTag
type Record = Composite T.ProductTag
case_ :: V.Case (Val pl) -> Case (Val pl)
case_ (V.Case tag handler r) =
caseVal r
& tags . Lens.at tag ?~ handler
where
caseVal val@(Val _ body) =
case body of
V.BLeaf V.LAbsurd -> Composite mempty Nothing
V.BCase x -> case_ x
_ -> Composite mempty (Just val)
recExtend :: V.RecExtend (Val pl) -> Record (Val pl)
recExtend (V.RecExtend tag field r) =
recExtendVal r
& tags . Lens.at tag ?~ field
where
recExtendVal val@(Val _ body) =
case body of
V.BLeaf V.LRecEmpty -> Composite mempty Nothing
V.BRecExtend x -> recExtend x
_ -> Composite mempty (Just val)
| da-x/lamdu | Lamdu/Compiler/Flatten.hs | gpl-3.0 | 1,450 | 0 | 12 | 398 | 465 | 248 | 217 | -1 | -1 |
module Inkvizitor.LocFile (
locFile
) where
import Geocode
( Coords(..)
, Location(..)
)
locFile :: [Location] -> String
locFile locations = header ++ concat (map locEntry locations) ++ footer
where locEntry :: Location -> String
locEntry loc =
"<waypoint>\n"
++ " <name id=\"" ++ getLocId loc ++ "\">"
++ "<![CDATA[" ++ getFullAddress loc ++ "\"]]>"
++ "</name>\n"
++ " <coord"
++ " lat=\"" ++ (show . getLatitude . getCoords $ loc) ++ "\""
++ " lon=\"" ++ (show . getLongitude . getCoords $ loc) ++ "\"/>\n"
++ "</waypoint>\n\n"
footer = "</loc>\n"
header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
++ "<loc src=\"Geocode\" version=\"1.0\">\n\n"
| honzasp/inkvizitor | Inkvizitor/LocFile.hs | gpl-3.0 | 800 | 0 | 21 | 246 | 198 | 106 | 92 | 20 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Gmail.Users.Settings.Delegates.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the delegates for the specified account. This method is only
-- available to service account clients that have been delegated
-- domain-wide authority.
--
-- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.settings.delegates.list@.
module Network.Google.Resource.Gmail.Users.Settings.Delegates.List
(
-- * REST Resource
UsersSettingsDelegatesListResource
-- * Creating a Request
, usersSettingsDelegatesList
, UsersSettingsDelegatesList
-- * Request Lenses
, usdlXgafv
, usdlUploadProtocol
, usdlAccessToken
, usdlUploadType
, usdlUserId
, usdlCallback
) where
import Network.Google.Gmail.Types
import Network.Google.Prelude
-- | A resource alias for @gmail.users.settings.delegates.list@ method which the
-- 'UsersSettingsDelegatesList' request conforms to.
type UsersSettingsDelegatesListResource =
"gmail" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"settings" :>
"delegates" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListDelegatesResponse
-- | Lists the delegates for the specified account. This method is only
-- available to service account clients that have been delegated
-- domain-wide authority.
--
-- /See:/ 'usersSettingsDelegatesList' smart constructor.
data UsersSettingsDelegatesList =
UsersSettingsDelegatesList'
{ _usdlXgafv :: !(Maybe Xgafv)
, _usdlUploadProtocol :: !(Maybe Text)
, _usdlAccessToken :: !(Maybe Text)
, _usdlUploadType :: !(Maybe Text)
, _usdlUserId :: !Text
, _usdlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersSettingsDelegatesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usdlXgafv'
--
-- * 'usdlUploadProtocol'
--
-- * 'usdlAccessToken'
--
-- * 'usdlUploadType'
--
-- * 'usdlUserId'
--
-- * 'usdlCallback'
usersSettingsDelegatesList
:: UsersSettingsDelegatesList
usersSettingsDelegatesList =
UsersSettingsDelegatesList'
{ _usdlXgafv = Nothing
, _usdlUploadProtocol = Nothing
, _usdlAccessToken = Nothing
, _usdlUploadType = Nothing
, _usdlUserId = "me"
, _usdlCallback = Nothing
}
-- | V1 error format.
usdlXgafv :: Lens' UsersSettingsDelegatesList (Maybe Xgafv)
usdlXgafv
= lens _usdlXgafv (\ s a -> s{_usdlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
usdlUploadProtocol :: Lens' UsersSettingsDelegatesList (Maybe Text)
usdlUploadProtocol
= lens _usdlUploadProtocol
(\ s a -> s{_usdlUploadProtocol = a})
-- | OAuth access token.
usdlAccessToken :: Lens' UsersSettingsDelegatesList (Maybe Text)
usdlAccessToken
= lens _usdlAccessToken
(\ s a -> s{_usdlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
usdlUploadType :: Lens' UsersSettingsDelegatesList (Maybe Text)
usdlUploadType
= lens _usdlUploadType
(\ s a -> s{_usdlUploadType = a})
-- | User\'s email address. The special value \"me\" can be used to indicate
-- the authenticated user.
usdlUserId :: Lens' UsersSettingsDelegatesList Text
usdlUserId
= lens _usdlUserId (\ s a -> s{_usdlUserId = a})
-- | JSONP
usdlCallback :: Lens' UsersSettingsDelegatesList (Maybe Text)
usdlCallback
= lens _usdlCallback (\ s a -> s{_usdlCallback = a})
instance GoogleRequest UsersSettingsDelegatesList
where
type Rs UsersSettingsDelegatesList =
ListDelegatesResponse
type Scopes UsersSettingsDelegatesList =
'["https://mail.google.com/",
"https://www.googleapis.com/auth/gmail.modify",
"https://www.googleapis.com/auth/gmail.readonly",
"https://www.googleapis.com/auth/gmail.settings.basic"]
requestClient UsersSettingsDelegatesList'{..}
= go _usdlUserId _usdlXgafv _usdlUploadProtocol
_usdlAccessToken
_usdlUploadType
_usdlCallback
(Just AltJSON)
gmailService
where go
= buildClient
(Proxy :: Proxy UsersSettingsDelegatesListResource)
mempty
| brendanhay/gogol | gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Settings/Delegates/List.hs | mpl-2.0 | 5,383 | 0 | 19 | 1,266 | 718 | 422 | 296 | 110 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.CreativeFieldValues.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing creative field value. This method supports patch
-- semantics.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.creativeFieldValues.patch@.
module Network.Google.Resource.DFAReporting.CreativeFieldValues.Patch
(
-- * REST Resource
CreativeFieldValuesPatchResource
-- * Creating a Request
, creativeFieldValuesPatch
, CreativeFieldValuesPatch
-- * Request Lenses
, cfvpCreativeFieldId
, cfvpXgafv
, cfvpUploadProtocol
, cfvpAccessToken
, cfvpUploadType
, cfvpProFileId
, cfvpPayload
, cfvpId
, cfvpCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.creativeFieldValues.patch@ method which the
-- 'CreativeFieldValuesPatch' request conforms to.
type CreativeFieldValuesPatchResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"creativeFields" :>
Capture "creativeFieldId" (Textual Int64) :>
"creativeFieldValues" :>
QueryParam "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CreativeFieldValue :>
Patch '[JSON] CreativeFieldValue
-- | Updates an existing creative field value. This method supports patch
-- semantics.
--
-- /See:/ 'creativeFieldValuesPatch' smart constructor.
data CreativeFieldValuesPatch =
CreativeFieldValuesPatch'
{ _cfvpCreativeFieldId :: !(Textual Int64)
, _cfvpXgafv :: !(Maybe Xgafv)
, _cfvpUploadProtocol :: !(Maybe Text)
, _cfvpAccessToken :: !(Maybe Text)
, _cfvpUploadType :: !(Maybe Text)
, _cfvpProFileId :: !(Textual Int64)
, _cfvpPayload :: !CreativeFieldValue
, _cfvpId :: !(Textual Int64)
, _cfvpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CreativeFieldValuesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cfvpCreativeFieldId'
--
-- * 'cfvpXgafv'
--
-- * 'cfvpUploadProtocol'
--
-- * 'cfvpAccessToken'
--
-- * 'cfvpUploadType'
--
-- * 'cfvpProFileId'
--
-- * 'cfvpPayload'
--
-- * 'cfvpId'
--
-- * 'cfvpCallback'
creativeFieldValuesPatch
:: Int64 -- ^ 'cfvpCreativeFieldId'
-> Int64 -- ^ 'cfvpProFileId'
-> CreativeFieldValue -- ^ 'cfvpPayload'
-> Int64 -- ^ 'cfvpId'
-> CreativeFieldValuesPatch
creativeFieldValuesPatch pCfvpCreativeFieldId_ pCfvpProFileId_ pCfvpPayload_ pCfvpId_ =
CreativeFieldValuesPatch'
{ _cfvpCreativeFieldId = _Coerce # pCfvpCreativeFieldId_
, _cfvpXgafv = Nothing
, _cfvpUploadProtocol = Nothing
, _cfvpAccessToken = Nothing
, _cfvpUploadType = Nothing
, _cfvpProFileId = _Coerce # pCfvpProFileId_
, _cfvpPayload = pCfvpPayload_
, _cfvpId = _Coerce # pCfvpId_
, _cfvpCallback = Nothing
}
-- | CreativeField ID.
cfvpCreativeFieldId :: Lens' CreativeFieldValuesPatch Int64
cfvpCreativeFieldId
= lens _cfvpCreativeFieldId
(\ s a -> s{_cfvpCreativeFieldId = a})
. _Coerce
-- | V1 error format.
cfvpXgafv :: Lens' CreativeFieldValuesPatch (Maybe Xgafv)
cfvpXgafv
= lens _cfvpXgafv (\ s a -> s{_cfvpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cfvpUploadProtocol :: Lens' CreativeFieldValuesPatch (Maybe Text)
cfvpUploadProtocol
= lens _cfvpUploadProtocol
(\ s a -> s{_cfvpUploadProtocol = a})
-- | OAuth access token.
cfvpAccessToken :: Lens' CreativeFieldValuesPatch (Maybe Text)
cfvpAccessToken
= lens _cfvpAccessToken
(\ s a -> s{_cfvpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cfvpUploadType :: Lens' CreativeFieldValuesPatch (Maybe Text)
cfvpUploadType
= lens _cfvpUploadType
(\ s a -> s{_cfvpUploadType = a})
-- | User profile ID associated with this request.
cfvpProFileId :: Lens' CreativeFieldValuesPatch Int64
cfvpProFileId
= lens _cfvpProFileId
(\ s a -> s{_cfvpProFileId = a})
. _Coerce
-- | Multipart request metadata.
cfvpPayload :: Lens' CreativeFieldValuesPatch CreativeFieldValue
cfvpPayload
= lens _cfvpPayload (\ s a -> s{_cfvpPayload = a})
-- | CreativeFieldValue ID.
cfvpId :: Lens' CreativeFieldValuesPatch Int64
cfvpId
= lens _cfvpId (\ s a -> s{_cfvpId = a}) . _Coerce
-- | JSONP
cfvpCallback :: Lens' CreativeFieldValuesPatch (Maybe Text)
cfvpCallback
= lens _cfvpCallback (\ s a -> s{_cfvpCallback = a})
instance GoogleRequest CreativeFieldValuesPatch where
type Rs CreativeFieldValuesPatch = CreativeFieldValue
type Scopes CreativeFieldValuesPatch =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient CreativeFieldValuesPatch'{..}
= go _cfvpProFileId _cfvpCreativeFieldId
(Just _cfvpId)
_cfvpXgafv
_cfvpUploadProtocol
_cfvpAccessToken
_cfvpUploadType
_cfvpCallback
(Just AltJSON)
_cfvpPayload
dFAReportingService
where go
= buildClient
(Proxy :: Proxy CreativeFieldValuesPatchResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/CreativeFieldValues/Patch.hs | mpl-2.0 | 6,516 | 0 | 22 | 1,566 | 1,008 | 580 | 428 | 145 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.BackendBuckets.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of BackendBucket resources available to the specified
-- project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.backendBuckets.list@.
module Network.Google.Resource.Compute.BackendBuckets.List
(
-- * REST Resource
BackendBucketsListResource
-- * Creating a Request
, backendBucketsList
, BackendBucketsList
-- * Request Lenses
, bblReturnPartialSuccess
, bblOrderBy
, bblProject
, bblFilter
, bblPageToken
, bblMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.backendBuckets.list@ method which the
-- 'BackendBucketsList' request conforms to.
type BackendBucketsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"backendBuckets" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] BackendBucketList
-- | Retrieves the list of BackendBucket resources available to the specified
-- project.
--
-- /See:/ 'backendBucketsList' smart constructor.
data BackendBucketsList =
BackendBucketsList'
{ _bblReturnPartialSuccess :: !(Maybe Bool)
, _bblOrderBy :: !(Maybe Text)
, _bblProject :: !Text
, _bblFilter :: !(Maybe Text)
, _bblPageToken :: !(Maybe Text)
, _bblMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BackendBucketsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bblReturnPartialSuccess'
--
-- * 'bblOrderBy'
--
-- * 'bblProject'
--
-- * 'bblFilter'
--
-- * 'bblPageToken'
--
-- * 'bblMaxResults'
backendBucketsList
:: Text -- ^ 'bblProject'
-> BackendBucketsList
backendBucketsList pBblProject_ =
BackendBucketsList'
{ _bblReturnPartialSuccess = Nothing
, _bblOrderBy = Nothing
, _bblProject = pBblProject_
, _bblFilter = Nothing
, _bblPageToken = Nothing
, _bblMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
bblReturnPartialSuccess :: Lens' BackendBucketsList (Maybe Bool)
bblReturnPartialSuccess
= lens _bblReturnPartialSuccess
(\ s a -> s{_bblReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
bblOrderBy :: Lens' BackendBucketsList (Maybe Text)
bblOrderBy
= lens _bblOrderBy (\ s a -> s{_bblOrderBy = a})
-- | Project ID for this request.
bblProject :: Lens' BackendBucketsList Text
bblProject
= lens _bblProject (\ s a -> s{_bblProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
bblFilter :: Lens' BackendBucketsList (Maybe Text)
bblFilter
= lens _bblFilter (\ s a -> s{_bblFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
bblPageToken :: Lens' BackendBucketsList (Maybe Text)
bblPageToken
= lens _bblPageToken (\ s a -> s{_bblPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
bblMaxResults :: Lens' BackendBucketsList Word32
bblMaxResults
= lens _bblMaxResults
(\ s a -> s{_bblMaxResults = a})
. _Coerce
instance GoogleRequest BackendBucketsList where
type Rs BackendBucketsList = BackendBucketList
type Scopes BackendBucketsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient BackendBucketsList'{..}
= go _bblProject _bblReturnPartialSuccess _bblOrderBy
_bblFilter
_bblPageToken
(Just _bblMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy BackendBucketsListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/BackendBuckets/List.hs | mpl-2.0 | 7,172 | 0 | 19 | 1,550 | 758 | 454 | 304 | 109 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.RuntimeConfig
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- The Runtime Configurator allows you to dynamically configure and expose
-- variables through Google Cloud Platform. In addition, you can also set
-- Watchers and Waiters that will watch for changes to your data and return
-- based on certain conditions.
--
-- /See:/ <https://cloud.google.com/deployment-manager/runtime-configurator/ Cloud Runtime Configuration API Reference>
module Network.Google.RuntimeConfig
(
-- * Service Configuration
runtimeConfigService
-- * OAuth Scopes
, cloudPlatformScope
, cloudruntimeConfigScope
-- * API Declaration
, RuntimeConfigAPI
-- * Resources
-- ** runtimeconfig.operations.cancel
, module Network.Google.Resource.RuntimeConfig.Operations.Cancel
-- ** runtimeconfig.operations.delete
, module Network.Google.Resource.RuntimeConfig.Operations.Delete
-- ** runtimeconfig.operations.list
, module Network.Google.Resource.RuntimeConfig.Operations.List
-- * Types
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** CancelOperationRequest
, CancelOperationRequest
, cancelOperationRequest
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Empty
, Empty
, empty
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** Xgafv
, Xgafv (..)
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
) where
import Network.Google.Prelude
import Network.Google.Resource.RuntimeConfig.Operations.Cancel
import Network.Google.Resource.RuntimeConfig.Operations.Delete
import Network.Google.Resource.RuntimeConfig.Operations.List
import Network.Google.RuntimeConfig.Types
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Runtime Configuration API service.
type RuntimeConfigAPI =
OperationsListResource :<|> OperationsCancelResource
:<|> OperationsDeleteResource
| brendanhay/gogol | gogol-runtimeconfig/gen/Network/Google/RuntimeConfig.hs | mpl-2.0 | 2,786 | 0 | 6 | 573 | 237 | 177 | 60 | 52 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Cloudbuild.Projects.Triggers.Webhook
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- ReceiveTriggerWebhook [Experimental] is called when the API receives a
-- webhook request targeted at a specific trigger.
--
-- /See:/ <https://cloud.google.com/cloud-build/docs/ Cloud Build API Reference> for @cloudbuild.projects.triggers.webhook@.
module Network.Google.Resource.Cloudbuild.Projects.Triggers.Webhook
(
-- * REST Resource
ProjectsTriggersWebhookResource
-- * Creating a Request
, projectsTriggersWebhook
, ProjectsTriggersWebhook
-- * Request Lenses
, ptwXgafv
, ptwUploadProtocol
, ptwAccessToken
, ptwUploadType
, ptwSecret
, ptwPayload
, ptwName
, ptwTrigger
, ptwProjectId
, ptwCallback
) where
import Network.Google.ContainerBuilder.Types
import Network.Google.Prelude
-- | A resource alias for @cloudbuild.projects.triggers.webhook@ method which the
-- 'ProjectsTriggersWebhook' request conforms to.
type ProjectsTriggersWebhookResource =
"v1" :>
"projects" :>
Capture "projectId" Text :>
"triggers" :>
CaptureMode "trigger" "webhook" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "secret" Text :>
QueryParam "name" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] HTTPBody :>
Post '[JSON] ReceiveTriggerWebhookResponse
-- | ReceiveTriggerWebhook [Experimental] is called when the API receives a
-- webhook request targeted at a specific trigger.
--
-- /See:/ 'projectsTriggersWebhook' smart constructor.
data ProjectsTriggersWebhook =
ProjectsTriggersWebhook'
{ _ptwXgafv :: !(Maybe Xgafv)
, _ptwUploadProtocol :: !(Maybe Text)
, _ptwAccessToken :: !(Maybe Text)
, _ptwUploadType :: !(Maybe Text)
, _ptwSecret :: !(Maybe Text)
, _ptwPayload :: !HTTPBody
, _ptwName :: !(Maybe Text)
, _ptwTrigger :: !Text
, _ptwProjectId :: !Text
, _ptwCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsTriggersWebhook' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptwXgafv'
--
-- * 'ptwUploadProtocol'
--
-- * 'ptwAccessToken'
--
-- * 'ptwUploadType'
--
-- * 'ptwSecret'
--
-- * 'ptwPayload'
--
-- * 'ptwName'
--
-- * 'ptwTrigger'
--
-- * 'ptwProjectId'
--
-- * 'ptwCallback'
projectsTriggersWebhook
:: HTTPBody -- ^ 'ptwPayload'
-> Text -- ^ 'ptwTrigger'
-> Text -- ^ 'ptwProjectId'
-> ProjectsTriggersWebhook
projectsTriggersWebhook pPtwPayload_ pPtwTrigger_ pPtwProjectId_ =
ProjectsTriggersWebhook'
{ _ptwXgafv = Nothing
, _ptwUploadProtocol = Nothing
, _ptwAccessToken = Nothing
, _ptwUploadType = Nothing
, _ptwSecret = Nothing
, _ptwPayload = pPtwPayload_
, _ptwName = Nothing
, _ptwTrigger = pPtwTrigger_
, _ptwProjectId = pPtwProjectId_
, _ptwCallback = Nothing
}
-- | V1 error format.
ptwXgafv :: Lens' ProjectsTriggersWebhook (Maybe Xgafv)
ptwXgafv = lens _ptwXgafv (\ s a -> s{_ptwXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ptwUploadProtocol :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwUploadProtocol
= lens _ptwUploadProtocol
(\ s a -> s{_ptwUploadProtocol = a})
-- | OAuth access token.
ptwAccessToken :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwAccessToken
= lens _ptwAccessToken
(\ s a -> s{_ptwAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ptwUploadType :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwUploadType
= lens _ptwUploadType
(\ s a -> s{_ptwUploadType = a})
-- | Secret token used for authorization if an OAuth token isn\'t provided.
ptwSecret :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwSecret
= lens _ptwSecret (\ s a -> s{_ptwSecret = a})
-- | Multipart request metadata.
ptwPayload :: Lens' ProjectsTriggersWebhook HTTPBody
ptwPayload
= lens _ptwPayload (\ s a -> s{_ptwPayload = a})
-- | The name of the \`ReceiveTriggerWebhook\` to retrieve. Format:
-- \`projects\/{project}\/locations\/{location}\/triggers\/{trigger}\`
ptwName :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwName = lens _ptwName (\ s a -> s{_ptwName = a})
-- | Name of the trigger to run the payload against
ptwTrigger :: Lens' ProjectsTriggersWebhook Text
ptwTrigger
= lens _ptwTrigger (\ s a -> s{_ptwTrigger = a})
-- | Project in which the specified trigger lives
ptwProjectId :: Lens' ProjectsTriggersWebhook Text
ptwProjectId
= lens _ptwProjectId (\ s a -> s{_ptwProjectId = a})
-- | JSONP
ptwCallback :: Lens' ProjectsTriggersWebhook (Maybe Text)
ptwCallback
= lens _ptwCallback (\ s a -> s{_ptwCallback = a})
instance GoogleRequest ProjectsTriggersWebhook where
type Rs ProjectsTriggersWebhook =
ReceiveTriggerWebhookResponse
type Scopes ProjectsTriggersWebhook = '[]
requestClient ProjectsTriggersWebhook'{..}
= go _ptwProjectId _ptwTrigger _ptwXgafv
_ptwUploadProtocol
_ptwAccessToken
_ptwUploadType
_ptwSecret
_ptwName
_ptwCallback
(Just AltJSON)
_ptwPayload
containerBuilderService
where go
= buildClient
(Proxy :: Proxy ProjectsTriggersWebhookResource)
mempty
| brendanhay/gogol | gogol-containerbuilder/gen/Network/Google/Resource/Cloudbuild/Projects/Triggers/Webhook.hs | mpl-2.0 | 6,509 | 0 | 21 | 1,562 | 1,022 | 592 | 430 | 145 | 1 |
{-| Implementation of the Ganeti Query2 node queries.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Query.Node
( Runtime
, fieldsMap
, collectLiveData
) where
import Control.Applicative
import Data.List
import Data.Maybe
import qualified Text.JSON as J
import Ganeti.Config
import Ganeti.Common
import Ganeti.Objects
import Ganeti.JSON (jsonHead)
import Ganeti.Rpc
import Ganeti.Types
import Ganeti.Query.Language
import Ganeti.Query.Common
import Ganeti.Query.Types
import Ganeti.Storage.Utils
import Ganeti.Utils (niceSort)
-- | Runtime is the resulting type for NodeInfo call.
type Runtime = Either RpcError RpcResultNodeInfo
-- | List of node live fields.
nodeLiveFieldsDefs :: [(FieldName, FieldTitle, FieldType, String, FieldDoc)]
nodeLiveFieldsDefs =
[ ("bootid", "BootID", QFTText, "bootid",
"Random UUID renewed for each system reboot, can be used\
\ for detecting reboots by tracking changes")
, ("cnodes", "CNodes", QFTNumber, "cpu_nodes",
"Number of NUMA domains on node (if exported by hypervisor)")
, ("cnos", "CNOs", QFTNumber, "cpu_dom0",
"Number of logical processors used by the node OS (dom0 for Xen)")
, ("csockets", "CSockets", QFTNumber, "cpu_sockets",
"Number of physical CPU sockets (if exported by hypervisor)")
, ("ctotal", "CTotal", QFTNumber, "cpu_total",
"Number of logical processors")
, ("dfree", "DFree", QFTUnit, "storage_free",
"Available storage space on storage unit")
, ("dtotal", "DTotal", QFTUnit, "storage_size",
"Total storage space on storage unit for instance disk allocation")
, ("spfree", "SpFree", QFTNumber, "spindles_free",
"Available spindles in volume group (exclusive storage only)")
, ("sptotal", "SpTotal", QFTNumber, "spindles_total",
"Total spindles in volume group (exclusive storage only)")
, ("mfree", "MFree", QFTUnit, "memory_free",
"Memory available for instance allocations")
, ("mnode", "MNode", QFTUnit, "memory_dom0",
"Amount of memory used by node (dom0 for Xen)")
, ("mtotal", "MTotal", QFTUnit, "memory_total",
"Total amount of memory of physical machine")
]
-- | Helper function to extract an attribute from a maybe StorageType
getAttrFromStorageInfo :: (J.JSON a) => (StorageInfo -> Maybe a)
-> Maybe StorageInfo -> J.JSValue
getAttrFromStorageInfo attr_fn (Just info) =
case attr_fn info of
Just val -> J.showJSON val
Nothing -> J.JSNull
getAttrFromStorageInfo _ Nothing = J.JSNull
-- | Check whether the given storage info fits to the given storage type
isStorageInfoOfType :: StorageType -> StorageInfo -> Bool
isStorageInfoOfType stype sinfo = storageInfoType sinfo ==
storageTypeToRaw stype
-- | Get storage info for the default storage unit
getStorageInfoForDefault :: [StorageInfo] -> Maybe StorageInfo
getStorageInfoForDefault sinfos = listToMaybe $ filter
(not . isStorageInfoOfType StorageLvmPv) sinfos
-- | Gets the storage info for a storage type
-- FIXME: This needs to be extended when storage pools are implemented,
-- because storage types are not necessarily unique then
getStorageInfoForType :: [StorageInfo] -> StorageType -> Maybe StorageInfo
getStorageInfoForType sinfos stype = listToMaybe $ filter
(isStorageInfoOfType stype) sinfos
-- | Map each name to a function that extracts that value from
-- the RPC result.
nodeLiveFieldExtract :: FieldName -> RpcResultNodeInfo -> J.JSValue
nodeLiveFieldExtract "bootid" res =
J.showJSON $ rpcResNodeInfoBootId res
nodeLiveFieldExtract "cnodes" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoCpuNodes
nodeLiveFieldExtract "cnos" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoCpuDom0
nodeLiveFieldExtract "csockets" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoCpuSockets
nodeLiveFieldExtract "ctotal" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoCpuTotal
nodeLiveFieldExtract "dfree" res =
getAttrFromStorageInfo storageInfoStorageFree (getStorageInfoForDefault
(rpcResNodeInfoStorageInfo res))
nodeLiveFieldExtract "dtotal" res =
getAttrFromStorageInfo storageInfoStorageSize (getStorageInfoForDefault
(rpcResNodeInfoStorageInfo res))
nodeLiveFieldExtract "spfree" res =
getAttrFromStorageInfo storageInfoStorageFree (getStorageInfoForType
(rpcResNodeInfoStorageInfo res) StorageLvmPv)
nodeLiveFieldExtract "sptotal" res =
getAttrFromStorageInfo storageInfoStorageSize (getStorageInfoForType
(rpcResNodeInfoStorageInfo res) StorageLvmPv)
nodeLiveFieldExtract "mfree" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoMemoryFree
nodeLiveFieldExtract "mnode" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoMemoryDom0
nodeLiveFieldExtract "mtotal" res =
jsonHead (rpcResNodeInfoHvInfo res) hvInfoMemoryTotal
nodeLiveFieldExtract _ _ = J.JSNull
-- | Helper for extracting field from RPC result.
nodeLiveRpcCall :: FieldName -> Runtime -> Node -> ResultEntry
nodeLiveRpcCall fname (Right res) _ =
case nodeLiveFieldExtract fname res of
J.JSNull -> rsNoData
x -> rsNormal x
nodeLiveRpcCall _ (Left err) _ =
ResultEntry (rpcErrorToStatus err) Nothing
-- | Builder for node live fields.
nodeLiveFieldBuilder :: (FieldName, FieldTitle, FieldType, String, FieldDoc)
-> FieldData Node Runtime
nodeLiveFieldBuilder (fname, ftitle, ftype, _, fdoc) =
( FieldDefinition fname ftitle ftype fdoc
, FieldRuntime $ nodeLiveRpcCall fname
, QffNormal)
-- | The docstring for the node role. Note that we use 'reverse' in
-- order to keep the same order as Python.
nodeRoleDoc :: String
nodeRoleDoc =
"Node role; " ++
intercalate ", "
(map (\nrole ->
"\"" ++ nodeRoleToRaw nrole ++ "\" for " ++ roleDescription nrole)
(reverse [minBound..maxBound]))
-- | Get node powered status.
getNodePower :: ConfigData -> Node -> ResultEntry
getNodePower cfg node =
case getNodeNdParams cfg node of
Nothing -> rsNoData
Just ndp -> if null (ndpOobProgram ndp)
then rsUnavail
else rsNormal (nodePowered node)
-- | List of all node fields.
nodeFields :: FieldList Node Runtime
nodeFields =
[ (FieldDefinition "drained" "Drained" QFTBool "Whether node is drained",
FieldSimple (rsNormal . nodeDrained), QffNormal)
, (FieldDefinition "master_candidate" "MasterC" QFTBool
"Whether node is a master candidate",
FieldSimple (rsNormal . nodeMasterCandidate), QffNormal)
, (FieldDefinition "master_capable" "MasterCapable" QFTBool
"Whether node can become a master candidate",
FieldSimple (rsNormal . nodeMasterCapable), QffNormal)
, (FieldDefinition "name" "Node" QFTText "Node name",
FieldSimple (rsNormal . nodeName), QffHostname)
, (FieldDefinition "offline" "Offline" QFTBool
"Whether node is marked offline",
FieldSimple (rsNormal . nodeOffline), QffNormal)
, (FieldDefinition "vm_capable" "VMCapable" QFTBool
"Whether node can host instances",
FieldSimple (rsNormal . nodeVmCapable), QffNormal)
, (FieldDefinition "pip" "PrimaryIP" QFTText "Primary IP address",
FieldSimple (rsNormal . nodePrimaryIp), QffNormal)
, (FieldDefinition "sip" "SecondaryIP" QFTText "Secondary IP address",
FieldSimple (rsNormal . nodeSecondaryIp), QffNormal)
, (FieldDefinition "master" "IsMaster" QFTBool "Whether node is master",
FieldConfig (\cfg node ->
rsNormal (uuidOf node ==
clusterMasterNode (configCluster cfg))),
QffNormal)
, (FieldDefinition "group" "Group" QFTText "Node group",
FieldConfig (\cfg node ->
rsMaybeNoData (groupName <$> getGroupOfNode cfg node)),
QffNormal)
, (FieldDefinition "group.uuid" "GroupUUID" QFTText "UUID of node group",
FieldSimple (rsNormal . nodeGroup), QffNormal)
, (FieldDefinition "ndparams" "NodeParameters" QFTOther
"Merged node parameters",
FieldConfig ((rsMaybeNoData .) . getNodeNdParams), QffNormal)
, (FieldDefinition "custom_ndparams" "CustomNodeParameters" QFTOther
"Custom node parameters",
FieldSimple (rsNormal . nodeNdparams), QffNormal)
-- FIXME: the below could be generalised a bit, like in Python
, (FieldDefinition "pinst_cnt" "Pinst" QFTNumber
"Number of instances with this node as primary",
FieldConfig (\cfg -> rsNormal . getNumInstances fst cfg), QffNormal)
, (FieldDefinition "sinst_cnt" "Sinst" QFTNumber
"Number of instances with this node as secondary",
FieldConfig (\cfg -> rsNormal . getNumInstances snd cfg), QffNormal)
, (FieldDefinition "pinst_list" "PriInstances" QFTOther
"List of instances with this node as primary",
FieldConfig (\cfg -> rsNormal . niceSort . mapMaybe instName . fst .
getNodeInstances cfg . uuidOf), QffNormal)
, (FieldDefinition "sinst_list" "SecInstances" QFTOther
"List of instances with this node as secondary",
FieldConfig (\cfg -> rsNormal . niceSort . mapMaybe instName . snd .
getNodeInstances cfg . uuidOf), QffNormal)
, (FieldDefinition "role" "Role" QFTText nodeRoleDoc,
FieldConfig ((rsNormal .) . getNodeRole), QffNormal)
, (FieldDefinition "powered" "Powered" QFTBool
"Whether node is thought to be powered on",
FieldConfig getNodePower, QffNormal)
-- FIXME: the two fields below are incomplete in Python, part of the
-- non-implemented node resource model; they are declared just for
-- parity, but are not functional
, (FieldDefinition "hv_state" "HypervisorState" QFTOther "Hypervisor state",
FieldSimple (const rsUnavail), QffNormal)
, (FieldDefinition "disk_state" "DiskState" QFTOther "Disk state",
FieldSimple (const rsUnavail), QffNormal)
] ++
map nodeLiveFieldBuilder nodeLiveFieldsDefs ++
map buildNdParamField allNDParamFields ++
timeStampFields ++
uuidFields "Node" ++
serialFields "Node" ++
tagsFields
-- | Helper function to retrieve the number of (primary or secondary) instances
getNumInstances :: (([Instance], [Instance]) -> [Instance])
-> ConfigData -> Node -> Int
getNumInstances get_fn cfg = length . get_fn . getNodeInstances cfg . uuidOf
-- | The node fields map.
fieldsMap :: FieldMap Node Runtime
fieldsMap = fieldListToFieldMap nodeFields
-- | Create an RPC result for a broken node
rpcResultNodeBroken :: Node -> (Node, Runtime)
rpcResultNodeBroken node = (node, Left (RpcResultError "Broken configuration"))
-- | Storage-related query fields
storageFields :: [String]
storageFields = ["dtotal", "dfree", "spfree", "sptotal"]
-- | Hypervisor-related query fields
hypervisorFields :: [String]
hypervisorFields = ["mnode", "mfree", "mtotal",
"cnodes", "csockets", "cnos", "ctotal"]
-- | Check if it is required to include domain-specific entities (for example
-- storage units for storage info, hypervisor specs for hypervisor info)
-- in the node_info call
queryDomainRequired :: -- domain-specific fields to look for (storage, hv)
[String]
-- list of requested fields
-> [String]
-> Bool
queryDomainRequired domain_fields fields = any (`elem` fields) domain_fields
-- | Collect live data from RPC query if enabled.
collectLiveData :: Bool
-> ConfigData
-> [String]
-> [Node]
-> IO [(Node, Runtime)]
collectLiveData False _ _ nodes =
return $ zip nodes (repeat $ Left (RpcResultError "Live data disabled"))
collectLiveData True cfg fields nodes = do
let hvs = [getDefaultHypervisorSpec cfg |
queryDomainRequired hypervisorFields fields]
good_nodes = nodesWithValidConfig cfg nodes
storage_units n = if queryDomainRequired storageFields fields
then getStorageUnitsOfNode cfg n
else []
rpcres <- executeRpcCalls
[(n, RpcCallNodeInfo (storage_units n) hvs) | n <- good_nodes]
return $ fillUpList (fillPairFromMaybe rpcResultNodeBroken pickPairUnique)
nodes rpcres
| yiannist/ganeti | src/Ganeti/Query/Node.hs | bsd-2-clause | 13,353 | 0 | 21 | 2,589 | 2,477 | 1,354 | 1,123 | 223 | 3 |
module Horbits.UI.VisibilityToggle where
import Graphics.UI.Gtk
visibilityToggleButton :: WidgetClass w => String -> w -> IO ToggleButton
visibilityToggleButton toggleText widget = do
button <- toggleButtonNewWithLabel toggleText
_ <- on button toggled $ set widget [ widgetVisible :~ not ]
return button
| chwthewke/horbits | src/horbits/Horbits/UI/VisibilityToggle.hs | bsd-3-clause | 329 | 0 | 11 | 64 | 90 | 44 | 46 | 7 | 1 |
module Linear.Grammar.Types.Syntax where
import Data.String (IsString (fromString))
import Test.QuickCheck (Arbitrary (arbitrary), Gen, sized, resize, scale, oneof, choose)
-- | User-facing abstract syntax tree, polymorphic in the numeric type used.
data LinAst k a =
-- | Variable names
EVar k
| -- | Numeric literals
ELit a
| -- | A literal coefficient multiplied by some abstract syntax tree
ECoeff (LinAst k a) a
| -- | Two abstract syntax trees added together
EAdd (LinAst k a) (LinAst k a)
deriving (Show, Eq)
instance (Arbitrary a, Arbitrary k) => Arbitrary (LinAst k a) where
arbitrary = sized go
where
go :: Arbitrary a => Arbitrary k => Int -> Gen (LinAst k a)
go s
| s <= 1 = oneof
[ EVar <$> arbitrary
, ELit <$> arbitrary
]
| otherwise = oneof
[ EVar <$> arbitrary
, ELit <$> arbitrary
, ECoeff <$> scale (subtract 1) arbitrary <*> arbitrary
, do
n <- choose (0,s-1)
EAdd <$> resize n arbitrary <*> resize n arbitrary
]
instance IsString k => IsString (LinAst k a) where
fromString = EVar . fromString
-- | Pushes 'ECoeff' down the tree, leaving 'EAdd' at the top level.
-- After using this funciton, all 'ECoeff' constructors\' 'LinAst' parameter will
-- be an 'EVar'.
multLin :: Num a => LinAst k a -> LinAst k a
multLin (EVar n) = EVar n
multLin (ELit x) = ELit x
multLin (ECoeff e x) = case multLin e of
(ELit y) -> ELit (y * x) -- evaluate coefficient multiplied by literal
(EVar n) -> ECoeff (EVar n) x -- do nothing
(ECoeff e' y) -> ECoeff e' (y * x) -- redundant coefficients
(EAdd e1 e2) -> EAdd (multLin (ECoeff e1 x)) (multLin (ECoeff e2 x)) -- recurse
multLin (EAdd e1 e2) = EAdd (multLin e1) (multLin e2)
| athanclark/cassowary-haskell | src/Linear/Grammar/Types/Syntax.hs | bsd-3-clause | 1,829 | 0 | 16 | 500 | 599 | 312 | 287 | -1 | -1 |
module Cards(
-- Data types
Suit (..),
Face (..),
Card (..),
SuitColor (..),
-- functions
suitColor,
sameColor,
pack36,
pack52,
shuffle
) where
import System.Random
import Data.List
import Test.QuickCheck
data Suit = Spades
| Clubs
| Diamonds
| Hearts
deriving Eq
instance Show Suit where
show = showSuit
instance Arbitrary Suit where
arbitrary = oneof $ map (return) [Spades, Clubs, Diamonds, Hearts]
showSuit :: Suit -> String
showSuit Spades = "s"
showSuit Clubs = "c"
showSuit Diamonds = "D"
showSuit Hearts = "H"
data SuitColor = Black
| Red
deriving (Eq, Show)
suitColor :: Suit -> SuitColor
suitColor Spades = Black
suitColor Clubs = Black
suitColor _ = Red
sameColor :: Suit -> Suit -> Bool
sameColor a b = (suitColor a) == (suitColor b)
data Face = Two
| Three
| Four
| Five
| Six
| Seven
| Eight
| Nine
| Ten
| Jack
| Queen
| King
| Ace
deriving (Eq, Enum)
instance Show Face where
show = showFace
instance Arbitrary Face where
arbitrary = oneof $ map (return) [Two, Three, Four, Five, Six, Seven,
Eight, Nine, Ten, Jack, Queen, King, Ace]
showFace :: Face -> String
showFace Two = " 2"
showFace Three = " 3"
showFace Four = " 4"
showFace Five = " 5"
showFace Six = " 6"
showFace Seven = " 7"
showFace Eight = " 8"
showFace Nine = " 9"
showFace Ten = "10"
showFace Jack = " J"
showFace Queen = " Q"
showFace King = " K"
showFace Ace = " A"
data Card = Card Face Suit
deriving Eq
instance Show Card where
show = showCard
showCard :: Card -> String
showCard (Card face suit) = show face ++ show suit
-- Carthesian product of two lists
cartProduct :: [a] -> [b] -> [(a, b)]
cartProduct a b =
let a' = concat $ map (replicate (length b)) a
b' = concat $ replicate (length a) b
in zip a' b'
prop_cartProduct len1 len2 =
let a = [1 .. len1]
b = [1 .. len2]
prod = cartProduct a b
sameLength = length prod == (length a) * (length b)
onlyOnce = length prod == length (nub prod)
known = and $ map (\(x, y) -> (elem x a) && (elem y b)) prod
in len1 > 1 && len2 > 1 ==>
sameLength && onlyOnce && known
-- Make a pack of cards.
mkPack :: [Face] -> [Suit] -> [Card]
mkPack faces suits =
let vs = cartProduct faces suits
in map (\(v, s) -> Card v s) vs
-- The standard pack of 36 cards.
pack36 :: [Card]
pack36 =
let faces = [Six, Seven, Eight, Nine, Ten, Jack, Queen, King, Ace]
suits = [Spades, Clubs, Diamonds, Hearts]
in mkPack faces suits
-- The standard pack of 52 cards.
pack52 :: [Card]
pack52 =
let faces = [Two, Three, Four, Five, Six, Seven, Eight, Nine, Ten,
Jack, Queen, King, Ace]
suits = [Spades, Clubs, Diamonds, Hearts]
in mkPack faces suits
-- Shuffle given pack of cards, using random seed.
shuffle :: Int -> [a] -> [a]
shuffle seed pack =
let stdGen = mkStdGen seed
-- A list of random indices.
rndIdx 0 _ = []
rndIdx len gen = [a] ++ (rndIdx (len - 1) gen')
where
(a, gen') = randomR (0, len - 1) gen
-- Extract of random cards from the pack pk.
result [] _ = []
result pk idx = [card] ++ result pk' (tail idx)
where
i = head idx -- current index
card = pk !! i -- chosen random card
pk' = take i pk ++ drop (i+1) pk -- pack without chosen card
rndIdx' = rndIdx (length pack) stdGen
in result pack rndIdx'
prop_shuffle i l =
let b = shuffle i a
a = [1 .. l]
sameLength = length a == length b
sameElements = null $ b \\ a
changed = or $ zipWith (/=) a b
in l > 1 ==>
sameLength && sameElements && changed
| sakhnik/FreeCell | Cards.hs | bsd-3-clause | 4,056 | 0 | 15 | 1,400 | 1,402 | 771 | 631 | 125 | 3 |
module Modify where
import MTable
import BruijnTerm hiding (incFree)
import LambdaF
-- TODO switch result around peek
--
-- | peek will apply modifications stored in mtabel and apply it on first node in ast
-- and will give back a new 'MTable' that can be used to view the subtrees
--
-- you should only use the new 'MTable' on the found subtrees because:
--
-- * 'MTable' keep track of depth of the subtree,
--
-- * if a variable is 'substituted' for new subtree, then that subtree need to be ajusted
-- mtable keep track of this.
--
peek :: MTable -> BruijnTerm () () -> (LamTermF () () Bound (BruijnTerm () () ), MTable)
peek modifications term = case term of
Val i v -> (ValF i v,modifications)
Var _ b -> case peekVar modifications b of
(Left newB) -> (VarF () newB,modifications)
(Right (t,newM)) -> peek newM t
Appl t1 t2 -> (ApplF t1 t2,modifications)
Lambda i n t -> (LambdaF i n t,extraSparceInsertUndefind 1 modifications)
Let i defs t -> (LetF i defs t,extraSparceInsertUndefind (length defs) modifications )
-- | will apply the modifications stored in mtable
proces :: MTable -> BruijnTerm () () -> BruijnTerm () ()
proces m t = if isNull m
then t
else unfold peek m t
| kwibus/myLang | src/Modify.hs | bsd-3-clause | 1,234 | 0 | 13 | 270 | 338 | 178 | 160 | 17 | 6 |
--------------------------------------------------------------------------------
module Language.Haskell.Stylish
( -- * Run
runSteps
-- * Steps
, imports
, languagePragmas
, records
, tabs
, trailingWhitespace
, unicodeSyntax
-- ** Data types
, Imports.Align (..)
, LanguagePragmas.Style (..)
-- ** Helpers
, stepName
-- * Config
, module Language.Haskell.Stylish.Config
-- * Misc
, module Language.Haskell.Stylish.Verbose
, version
, Lines
, Step
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad (foldM)
--------------------------------------------------------------------------------
import Language.Haskell.Stylish.Config
import Language.Haskell.Stylish.Step
import Language.Haskell.Stylish.Verbose
import Language.Haskell.Stylish.Parse
import Paths_stylish_haskell (version)
import qualified Language.Haskell.Stylish.Step.Imports as Imports
import qualified Language.Haskell.Stylish.Step.LanguagePragmas as LanguagePragmas
import qualified Language.Haskell.Stylish.Step.Records as Records
import qualified Language.Haskell.Stylish.Step.Tabs as Tabs
import qualified Language.Haskell.Stylish.Step.TrailingWhitespace as TrailingWhitespace
import qualified Language.Haskell.Stylish.Step.UnicodeSyntax as UnicodeSyntax
--------------------------------------------------------------------------------
imports :: Int -- ^ columns
-> Imports.Align
-> Step
imports = Imports.step
--------------------------------------------------------------------------------
languagePragmas :: Int -- ^ columns
-> LanguagePragmas.Style
-> Bool -- ^ remove redundant?
-> Step
languagePragmas = LanguagePragmas.step
--------------------------------------------------------------------------------
records :: Step
records = Records.step
--------------------------------------------------------------------------------
tabs :: Int -- ^ number of spaces
-> Step
tabs = Tabs.step
--------------------------------------------------------------------------------
trailingWhitespace :: Step
trailingWhitespace = TrailingWhitespace.step
--------------------------------------------------------------------------------
unicodeSyntax :: Bool -- ^ add language pragma?
-> Step
unicodeSyntax = UnicodeSyntax.step
--------------------------------------------------------------------------------
runStep :: Extensions -> Maybe FilePath -> Lines -> Step -> Either String Lines
runStep exts mfp ls step =
stepFilter step ls <$> parseModule exts mfp (unlines ls)
--------------------------------------------------------------------------------
runSteps :: Extensions -> Maybe FilePath -> [Step] -> Lines
-> Either String Lines
runSteps exts mfp steps ls = foldM (runStep exts mfp) ls steps
| eigengrau/stylish-haskell | src/Language/Haskell/Stylish.hs | bsd-3-clause | 2,966 | 0 | 9 | 457 | 452 | 286 | 166 | 55 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE TypeSynonymInstances #-}
module RazorsLambda.TypeCheck where
import Data.Map (Map)
import qualified Data.Map as Map
import MonadLib hiding (Id)
import MonadLib.Derive
import MonadLib.Monads hiding (Id)
import Text.PrettyPrint.Annotated.Leijen
import RazorsLambda.AST
import RazorsLambda.PP
data TCError
= TCMismatch Expr Type Type -- ^ decl, expr, expected, actual
| TCNonFun Expr Type -- ^ expr expected function, actual
| TCUnbound Id -- ^ unbound id
deriving (Show, Eq)
instance PP TCError where
pp = \case
TCMismatch e tExp tAct ->
"Type mismatch for expression:" <+> pp e <+>
"expected:" <+> pp tExp <+> "but found:" <+> pp tAct
TCNonFun e t ->
"Not a function, or applied to too many arguments:" <+> pp e <+>
"found type:" <+> pp t
TCUnbound x ->
"Unbound variable:" <+> pp x
type TCEnv = Map Id Type
newtype TypeCheck a = TC { unTC :: ExceptionT TCError (Reader TCEnv) a }
tcIso :: Iso (ExceptionT TCError (Reader TCEnv)) TypeCheck
tcIso = Iso TC unTC
instance Functor TypeCheck where
fmap = derive_fmap tcIso
instance Applicative TypeCheck where
pure = derive_pure tcIso
(<*>) = derive_apply tcIso
instance Monad TypeCheck where
(>>=) = derive_bind tcIso
instance ExceptionM TypeCheck TCError where
raise = derive_raise tcIso
instance RunExceptionM TypeCheck TCError where
try = derive_try tcIso
instance ReaderM TypeCheck TCEnv where
ask = derive_ask tcIso
instance RunReaderM TypeCheck TCEnv where
local = derive_local tcIso
runTC :: TypeCheck a -> Either TCError a
runTC m = runReader Map.empty (runExceptionT (unTC m))
runTCIn :: TCEnv -> TypeCheck a -> Either TCError a
runTCIn env m = runTC (local env m)
typeCheckConst :: Const -> TypeCheck Type
typeCheckConst = \case
CInteger _ -> return TInteger
CBool _ -> return TBool
CUnit -> return TUnit
typeCheckUnop :: Unop -> TypeCheck Type
typeCheckUnop = \case
BNot -> return (TBool ~> TBool)
typeCheckBinop :: Binop -> TypeCheck Type
typeCheckBinop = \case
BAnd -> return b2
BOr -> return b2
BXor -> return b2
IPlus -> return i2
IMinus -> return i2
ITimes -> return i2
IDiv -> return i2
IEq -> return (TInteger ~> TInteger ~> TBool)
where b2 = (TBool ~> TBool ~> TBool)
i2 = (TInteger ~> TInteger ~> TInteger)
typeCheckExpr :: Expr -> TypeCheck Type
typeCheckExpr = \case
EVar x -> do
env <- ask
case Map.lookup x env of
Just varTy -> return varTy
Nothing -> raise (TCUnbound x)
ELam x tArg body -> do
tRet <- mapReader (Map.insert x tArg) (typeCheckExpr body)
return (TFun tArg tRet)
EApp e1 e2 -> do
t1 <- typeCheckExpr e1
t2 <- typeCheckExpr e2
case t1 of
TFun tArg tRet | tArg == t2 -> return tRet
| otherwise -> raise (TCMismatch e2 tArg t2)
_ -> raise (TCNonFun e1 t1)
EConst c -> typeCheckConst c
EUnop u e -> do
tOp <- typeCheckUnop u
t <- typeCheckExpr e
case tOp of
TFun tArg tRet | tArg == t -> return tRet
| otherwise -> raise (TCMismatch e tArg t)
_ -> error "impossible: unop not a function type"
EBinop b e1 e2 -> do
tOp <- typeCheckBinop b
t1 <- typeCheckExpr e1
t2 <- typeCheckExpr e2
case tOp of
TFun tArg1 (TFun tArg2 tRet)
| tArg1 == t1 && tArg2 == t2 -> return tRet
| tArg1 == t1 -> raise (TCMismatch e2 tArg2 t2)
| tArg2 == t2 -> raise (TCMismatch e1 tArg1 t1)
_ -> error "impossible: binop not a function type"
EIfThenElse e1 e2 e3 -> do
t1 <- typeCheckExpr e1
unless (t1 == TBool) $ raise (TCMismatch e1 TBool t1)
t2 <- typeCheckExpr e2
t3 <- typeCheckExpr e3
unless (t2 == t3) $ raise (TCMismatch e3 t2 t3)
return t2
typeCheckDecl :: Decl -> TypeCheck Type
typeCheckDecl = \case
Decl x args ret e -> do
let t = foldr TFun ret (map snd args)
extend env = Map.insert x t (Map.union env (Map.fromList args))
ret' <- mapReader extend (typeCheckExpr e)
unless (ret == ret') $ raise (TCMismatch e ret ret')
return t
typeCheckModule :: Module -> TypeCheck TCEnv
typeCheckModule = \case
Module _ _is decls -> do
let tcDecl d@(Decl x _ _ _) = mapReader (Map.delete x) $ typeCheckDecl d
tDecl (Decl x args ret _) = (x, foldr TFun ret (map snd args))
tDecls = map tDecl decls
-- shadow existing bindings
extend env = Map.union (Map.fromList tDecls) env
_ <- mapReader extend (mapM tcDecl decls)
return (extend Map.empty)
| acfoltzer/RazorsLambda | src/RazorsLambda/TypeCheck.hs | bsd-3-clause | 4,750 | 0 | 18 | 1,212 | 1,656 | 799 | 857 | 132 | 11 |
module Examples.ProxyToUpper (main) where
import Pipes
import Pipes.Network.TCP
import Control.Concurrent.Async
import Control.Monad
main :: IO ()
main = serve (Host "127.0.0.1") "4002" $ \(client, _) ->
connect "127.0.0.1" "4000" $ \(server, _) ->
do let act1 = runEffect $ fromSocket client 4096 >-> toSocket server
act2 = runEffect $ fromSocket server 4096 >-> toSocket client
concurrently act1 act2
return ()
-- ------------------------------------------------------------------
-- {-# LANGUAGE OverloadedStrings #-}
-- import Conduit
-- import Control.Concurrent.Async (concurrently)
-- import Control.Monad (void)
-- import Data.Conduit.Network
-- main :: IO ()
-- main =
-- runTCPServer (serverSettings 4002 "*") $ \client ->
-- runTCPClient (clientSettings 4000 "localhost") $ \server -> void $ concurrently
-- (appSource server $$ appSink client)
-- (appSource client $$ appSink server)
| michaelt/pipes-network-tcp-examples | Examples/ProxyToUpper.hs | bsd-3-clause | 1,016 | 0 | 16 | 240 | 166 | 92 | 74 | 12 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{- | A module providing access to internals (in case you really need them).
Can change at any time, though probably won't.
-}
module Fmt.Internal
(
-- * Classes
FormatAsHex(..),
FormatAsBase64(..),
-- * Reexports
module Fmt.Internal.Core,
module Fmt.Internal.Formatters,
module Fmt.Internal.Template,
module Fmt.Internal.Tuple,
module Fmt.Internal.Numeric,
module Fmt.Internal.Generic,
)
where
-- Text
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy.Encoding as TL
-- 'Buildable' and raw 'Builder' formatters
import qualified Formatting.Internal.Raw as F
-- Text 'Builder'
import Data.Text.Lazy.Builder hiding (fromString)
-- Bytestring
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
-- Formatting bytestrings
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Base64.Lazy as B64L
import qualified Data.ByteString.Base64.URL as B64U
import qualified Data.ByteString.Base64.URL.Lazy as B64UL
import Fmt.Internal.Core
import Fmt.Internal.Formatters
import Fmt.Internal.Template
import Fmt.Internal.Tuple
import Fmt.Internal.Numeric
import Fmt.Internal.Generic
-- $setup
-- >>> import Fmt
----------------------------------------------------------------------------
-- Hex
----------------------------------------------------------------------------
class FormatAsHex a where
{- |
Format a number or bytestring as hex:
>>> hexF 3635
"e33"
>>> hexF ("\0\50\63\80" :: BS.ByteString)
"00323f50"
-}
hexF :: a -> Builder
instance FormatAsHex BS.ByteString where
hexF = fromLazyText . TL.decodeLatin1 . BB.toLazyByteString . BB.byteStringHex
instance FormatAsHex BSL.ByteString where
hexF = fromLazyText . TL.decodeLatin1 . BB.toLazyByteString . BB.lazyByteStringHex
instance {-# OVERLAPPABLE #-} Integral a => FormatAsHex a where
hexF i = sgn <> F.hex (abs i)
where
sgn = if i<0 then "-" else ""
----------------------------------------------------------------------------
-- Base64
----------------------------------------------------------------------------
class FormatAsBase64 a where
{- |
Convert a bytestring to base64:
>>> base64F ("\0\50\63\80" :: BS.ByteString)
"ADI/UA=="
-}
base64F :: a -> Builder
{- |
Convert a bytestring to base64url (a variant of base64 which omits @\/@ and
thus can be used in URLs):
>>> base64UrlF ("\0\50\63\80" :: BS.ByteString)
"ADI_UA=="
-}
base64UrlF :: a -> Builder
instance FormatAsBase64 BS.ByteString where
base64F = fromText . T.decodeLatin1 . B64.encode
base64UrlF = fromText . T.decodeLatin1 . B64U.encode
instance FormatAsBase64 BSL.ByteString where
base64F = fromLazyText . TL.decodeLatin1 . B64L.encode
base64UrlF = fromLazyText . TL.decodeLatin1 . B64UL.encode
| aelve/fmt | lib/Fmt/Internal.hs | bsd-3-clause | 2,972 | 0 | 9 | 453 | 487 | 308 | 179 | 48 | 0 |
module Main where
main :: IO ()
main = putStrLn "test"
| chengzh2008/hpffp | src/ch14-Testing/exercise/app/Main.hs | bsd-3-clause | 56 | 0 | 6 | 12 | 22 | 12 | 10 | 3 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Guide.Api.Error
(
ErrorResponse,
)
where
import Imports
import Data.Swagger
import GHC.TypeLits
import Servant
import Servant.Swagger
-- Taken from https://github.com/haskell-servant/servant-swagger/issues/59
data ErrorResponse (code :: Nat) (description :: Symbol)
instance
( HasSwagger api
, KnownNat code
, KnownSymbol desc )
=> HasSwagger (ErrorResponse code desc :> api) where
toSwagger _ = toSwagger (Proxy :: Proxy api)
& setResponse (fromInteger code) (return responseSchema)
where
code = natVal (Proxy :: Proxy code)
desc = symbolVal (Proxy :: Proxy desc)
responseSchema = mempty
& description .~ toText desc
instance HasLink sub => HasLink (ErrorResponse code desc :> sub) where
type MkLink (ErrorResponse code desc :> sub) a = MkLink sub a
toLink f _ l = toLink f (Proxy :: Proxy sub) l
instance HasServer api ctx => HasServer (ErrorResponse code desc :> api) ctx where
type ServerT (ErrorResponse code desc :> api) m = ServerT api m
route _ = route (Proxy :: Proxy api)
hoistServerWithContext _ pc nt s = hoistServerWithContext (Proxy :: Proxy api) pc nt s
| aelve/guide | back/src/Guide/Api/Error.hs | bsd-3-clause | 1,183 | 0 | 10 | 242 | 381 | 200 | 181 | -1 | -1 |
{-# LANGUAGE TypeFamilies ,KindSignatures #-}
module Local where
import Linear.V1
import Linear.V2
import Linear.V3
import Linear.V4
type family CLocal (f :: *) :: * -> *
type family Local (f :: * -> * ) :: * -> *
type instance Local V1 = V1
type instance Local V2 = V2
type instance Local V3 = V3
type instance Local V4 = V4
| massudaw/mtk | Local.hs | bsd-3-clause | 331 | 0 | 7 | 68 | 107 | 66 | 41 | 12 | 0 |
-- Copyright (c) 2012 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- | This module contains an implementation of multivariate
-- polynomials, with basic instances and mathematical operations. At
-- the present, this uses the (likely inefficient) implementation of
-- rings in NumericPrelude.
module Data.Polynomial.Multivariate(
Polynomial
) where
import Control.Applicative
import Data.Array(Array)
import Data.Foldable
import Data.Map(Map)
import Data.List(intercalate)
import Data.Traversable
import Data.Word
import NumericPrelude hiding (concat, mapM, sequence,
foldr, foldl, foldr1, foldl1)
import qualified Data.Array as Array
import qualified Data.Map as Map
import qualified MathObj.Algebra as Algebra
import qualified Algebra.Additive as Additive
import qualified Algebra.Monoid as Monoid
import qualified Algebra.Ring as Ring
-- | An internal type for monomials, parameterizable on the exponent.
data Monomial e = M (Map Word e)
instance Functor Monomial where
fmap f (M m) = M (fmap f m)
instance (Eq e, Additive.C e) => Monoid.C (Monomial e) where
idt = M Map.empty
(M m1) <*> (M m2) =
M (Map.filter (/= zero) (Map.unionWith (+) m1 m2))
instance Show e => Show (Monomial e) where
show (M m) = concat (map (\(k, e) -> "x_" ++ show k ++ "^" ++ show e)
(Map.toDescList m))
instance Ord e => Ord (Monomial e) where
compare (M m1) (M m2) = compare (Map.toDescList m1) (Map.toDescList m2)
instance Eq e => Eq (Monomial e) where
(M m1) == (M m2) = m1 == m2
-- | Multivariate polynomials. Uses a map to only store one copy of
-- the term in question, and uses indexes to refer to it throughout
-- the actual algebraic structure.
data Polynomial c e t = P (Algebra.T (Monomial e) c) (Array Word t)
instance (Show t, Show c, Show e) => Show (Polynomial t c e) where
show (P alg terms) =
show alg ++ " where " ++
intercalate ", " (map (\(k, t) -> "x_" ++ show k ++ " = " ++ show t)
(Array.assocs terms))
-- Deal with the politics of remapping variables, when combining two
-- polynomials with a binary function.
combine :: (Ord t, Ord e) =>
(Algebra.T (Monomial e) c -> Algebra.T (Monomial e) c ->
Algebra.T (Monomial e) c) ->
Polynomial c e t -> Polynomial c e t -> Polynomial c e t
combine op (P (Algebra.Cons map1) t1) (P (Algebra.Cons map2) t2) =
let
mapMonoVars :: (Word -> Word) -> Monomial e -> Monomial e
mapMonoVars f (M m) = M (Map.mapKeys f m)
-- Merge two sorted arrays of terms, mapping words to terms.
-- Produce a merged array, and two mappings.
merge :: Ord a => Array Word a -> Array Word a ->
(Array Word a, Array Word Word, Array Word Word)
merge a1 a2 =
let
-- Helper function
merge' :: Ord a => ([a], [Word], [Word], Word) -> [a] -> [a] ->
(Array Word a, Array Word Word, Array Word Word)
-- Dual induction case, we compare the first elements of the lists.
merge' (merged, amap, bmap, ind) (a : as) (b : bs) =
-- Pick the lesser of the two and append it to the merged
-- list. Also, build two renaming lists, which map indexes
-- in each of the original lists to indexes in the new list.
case compare a b of
LT -> merge' (a : merged, ind : amap, bmap, ind + 1) as (b : bs)
GT -> merge' (b : merged, amap, ind : bmap, ind + 1) (a : as) bs
EQ -> merge' (a : merged, ind : amap, ind : bmap, ind + 1) as bs
-- Single induction cases, if we run out of one list, append
-- the other, building the renaming maps accordingly.
merge' (merged, amap, bmap, ind) (a : as) [] =
merge' (a : merged, ind : amap, bmap, ind + 1) as []
merge' (merged, amap, bmap, ind) [] (b : bs)=
merge' (b : merged, amap, ind : bmap, ind + 1) [] bs
-- When both are empty, build the arrays.
merge' (merged, amap, bmap, ind) [] [] =
(Array.listArray (0, ind) (reverse merged),
Array.listArray (0, (fromIntegral (length amap)) - 1) (reverse amap),
Array.listArray (0, (fromIntegral (length bmap)) - 1) (reverse bmap))
in
merge' ([], [], [], 0) (Array.elems a1) (Array.elems a2)
-- Merge the arrays
(mergedarr, rename1, rename2) = merge t1 t2
-- Rename the variables in
p1 = Algebra.Cons (Map.mapKeys (mapMonoVars (rename1 Array.!)) map1)
p2 = Algebra.Cons (Map.mapKeys (mapMonoVars (rename2 Array.!)) map2)
in
P (op p1 p2) mergedarr
instance (Ord c, Ord e, Ord t, Additive.C c, Additive.C e) =>
Additive.C (Polynomial c e t) where
zero = P zero (Array.listArray (1, 0) [])
negate (P alg terms) = P (negate alg) terms
(+) = combine (+)
(-) = combine (-)
instance (Ord c, Ord e, Ord t, Ring.C c, Additive.C e) =>
Ring.C (Polynomial c e t) where
one = P one (Array.listArray (1, 0) [])
fromInteger n = P (fromInteger n) (Array.listArray (1, 0) [])
(P alg terms) ^ e = P (alg ^ e) terms
(*) = combine (*)
instance Functor (Polynomial c e) where
fmap f (P alg terms) = P alg (fmap f terms)
instance Foldable (Polynomial c e) where
fold (P _ terms) = fold terms
foldMap f (P _ terms) = foldMap f terms
foldr f i (P _ terms) = foldr f i terms
foldl f i (P _ terms) = foldl f i terms
foldr1 f (P _ terms) = foldr1 f terms
foldl1 f (P _ terms) = foldl1 f terms
instance Traversable (Polynomial c e) where
traverse f (P alg terms) = P alg <$> traverse f terms
sequenceA (P alg terms) = P alg <$> sequenceA terms
mapM f (P alg terms) = mapM f terms >>= return . P alg
sequence (P alg terms) = sequence terms >>= return . P alg
{-
instance Applicative (Polynomial c e) where
pure x = P (Array.listArray (0, 0) [x])
(Algebra.monomial (Map.singleton 1 one) one)
instance Monad (Polynomial c e) where
return x = P (Algebra.monomial (Map.singleton 1 one) one)
(Array.listArray (0, 0) [x])
(P alg terms) >>= f = mapM f terms >>= return . P alg
-} | emc2/proglang-util | Data/Polynomial/Multivariate.hs | bsd-3-clause | 7,693 | 0 | 20 | 1,882 | 2,263 | 1,210 | 1,053 | 97 | 6 |
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Control.Monad
import Control.Monad.Trans.Free
import Control.Monad.Free.TH
import Control.Monad.IO.Class
import Control.Monad.Trans.Maybe
import qualified Data.Foldable as F
import Text.Read (readMaybe)
-- | A data type representing basic commands for a retriable eDSL.
data RetryF m next where
Output :: String -> next -> RetryF m next
Input :: Read a => (a -> next) -> RetryF m next
WithRetry :: (RetryT m) m a -> (a -> next) -> RetryF m next
Retry :: RetryF m next
-- | Unfortunately this Functor instance cannot yet be derived
-- automatically by GHC.
instance Functor (RetryF m) where
fmap f (Output s x) = Output s (f x)
fmap f (Input g) = Input (f . g)
fmap f (WithRetry block g) = WithRetry block (f . g)
fmap _ Retry = Retry
-- | The monad for a retriable eDSL.
type RetryT m = FreeT (RetryF m)
-- | Simple output command.
makeFreeCon 'Output
-- | Get anything readable from input.
makeFreeCon 'Input
-- | Force retry command (retries innermost retriable block).
makeFreeCon 'Retry
makeFreeCon 'WithRetry
-- | Run a retryable block.
-- withRetry :: MonadFree (RetryF m) m =>
-- (RetryT m) m a -- ^ Computation to retry.
-- -> m a -- ^ Computation that retries until succeeds.
-- | We can run a retriable program in any MonadIO.
runRetryT :: MonadIO m => (RetryT m) m a -> m a
runRetryT = iterT run
where
run :: MonadIO m => (RetryF m) (m a) -> m a
run (Output s next) = do
liftIO $ putStrLn s
next
run (Input next) = do
s <- liftIO getLine
case readMaybe s of
Just x -> next x
Nothing -> fail "invalid input"
run (WithRetry block next) = do
-- Here we use
-- runRetryT :: MonadIO m => (RetryT m) m a -> MaybeT (m a)
-- to control failure with MaybeT.
-- We repeatedly run retriable block until we get it to work.
--Just x <- runMaybeT . F.msum $ repeat (runRetryT block)
--
-- XXX dhess: note that the actual retry mechanism is disabled.
-- Because RetryT is a transformer, the monad which it wraps
-- may perform side effects in the retried block; how do
-- we un-do those effects when we retry, or how could we
-- prevent the monad from performing side effects in the first
-- place?
--
-- One way to do this might be to only wrap STM, so that the
-- transaction could be rolled back upon failure. Anyway, the
-- point of this DSL is only to make a 'with'-style command
-- available, such that the interpreter could be run within the
-- interpreter. Therefore, we don't address the larger problem
-- of retrying with side effects and we simply punt here.
x <- runRetryT block
next x
run Retry = fail "forced retry"
-- | Sample program.
test :: (Monad m) => (RetryT m) m ()
test = do
n <- withRetry $ do
output "Enter any positive number: "
n <- input
when (n <= 0) $ do
output "The number should be positive."
retry
return n
output $ "You've just entered " ++ show (n :: Int)
main :: IO ()
main = runRetryT test
| dhess/free-experiments | src/RetryTransTH.hs | bsd-3-clause | 3,263 | 0 | 14 | 850 | 669 | 350 | 319 | 54 | 5 |
{-# LANGUAGE TemplateHaskell #-}
module Monto.DeregisterService where
import Data.Aeson.TH
import qualified Data.Text as T
import Monto.Types
data DeregisterService =
DeregisterService
{ deregisterServiceID :: ServiceID
} deriving (Eq)
$(deriveJSON (defaultOptions {
fieldLabelModifier = \s -> case s of
"deregisterServiceID" -> "deregister_service_id"
label -> label
}) ''DeregisterService)
instance Show DeregisterService where
show (DeregisterService i) =
concat [ "{", T.unpack i
, "}"
]
| wpmp/monto-broker | src/Monto/DeregisterService.hs | bsd-3-clause | 566 | 0 | 14 | 135 | 133 | 75 | 58 | 18 | 0 |
{- Compile, Run and Clean.
A helper program for running standalone tests for haskell-mpi.
Intended to be used in conjunction with shelltestrunner.
Use like so:
haskell-mpi-comprunclean -np 2 Pi.hs
The last argument is the name of a haskell file to compile
(should be the Main module). All other arguments are given
to mpirun.
The program is compiled. The resulting executable is run
underneath mpirun.
The executable is deleted and so are temporary files.
XXX should allow program to be run to accept its own command
line arguments.
-}
module Main where
import System.Environment (getArgs)
import System.Process (system)
import System.Exit (ExitCode (..), exitWith)
import Control.Monad (when)
import Data.List (isSuffixOf)
main :: IO ()
main = do
args <- getArgs
when (length args > 0) $ do
let mpirunFlags = init args
(sourceFile, exeFile) = getFileNames $ last args
run $ "ghc -v0 --make -O2 " ++ sourceFile
run $ "mpirun " ++ unwords (mpirunFlags ++ [exeFile])
run $ "rm -f *.o *.hi " ++ exeFile
run :: String -> IO ()
run cmd = do
-- putStrLn cmd
status <- system cmd
if status /= ExitSuccess
then do
putStrLn $ "Command failed with status: " ++ show status
exitWith status
else return ()
getFileNames :: String -> (String, String)
getFileNames str
| isSuffixOf ".hs" str = (str, take (length str - 3) str)
| otherwise = error $ "Not a Haskell filename: " ++ str
| bjpop/haskell-mpi | test/CompileRunClean.hs | bsd-3-clause | 1,505 | 0 | 14 | 371 | 326 | 166 | 160 | 27 | 2 |
-- | Classic word count MapReduce algorithm written with the monad
--
-- Takes as argument:
--
-- * The name of file of word-based text
--
-- * (Optional) the number of mappers to use in the first stage
-- (defaults to 16)
module Main where
import System.IO
import System.Environment (getArgs)
import Parallel.MapReduce.WordCount
showNice :: [(String,Int)] -> IO()
showNice [] = return ()
showNice (x:xs) = do
putStrLn $ fst x ++ " occurs "++ show ( snd x) ++ " times"
showNice xs
main::IO()
main = do
args <- getArgs
out <- case length args of
0 -> error "Usage: wordcount [filename] ([num mappers])"
_ -> do
let nMap = case length args of
1 -> 16
_ -> read $ args!!1
state <- getLines (head args)
let res = mapReduce nMap state
return res
showNice out
-- put data
putLines :: FilePath -> [String] -> IO ()
putLines file text = do
h <- openFile file WriteMode
hPutStr h $ unwords text
hClose h
return ()
-- get input
getLines :: FilePath -> IO [String]
getLines file = do
h <- openFile file ReadMode
text <- hGetContents h
return $ words text
| Julianporter/Haskell-MapReduce | src/WordCount.hs | bsd-3-clause | 1,437 | 0 | 20 | 588 | 379 | 184 | 195 | 33 | 3 |
module Hyph_UTF8.Language (tags) where
tags :: [String]
tags =
[
"af"
, "hy"
, "as"
, "eu"
, "bn"
, "bg"
, "ca"
, "zh-latn-pinyin"
, "cop"
, "hr"
, "cs"
, "da"
, "nl"
, "en-gb"
, "en-us"
, "eo"
, "et"
, "mul-ethi"
, "fi"
, "fr"
, "fur"
, "gl"
, "ka"
, "de-1901"
, "de-1996"
, "de-ch-1901"
, "grc"
, "el-monoton"
, "el-polyton"
, "gu"
, "hi"
, "hu"
, "is"
, "id"
, "ia"
, "ga"
, "it"
, "kn"
, "kmr"
, "la"
, "la-x-classic"
, "lv"
, "lt"
, "ml"
, "mr"
, "mn-cyrl"
, "nb"
, "nn"
, "oc"
, "or"
, "pa"
, "pms"
, "pl"
, "pt"
, "ro"
, "rm"
, "ru"
, "sa"
, "sr-cyrl"
, "sh-cyrl"
, "sh-latn"
, "cu"
, "sk"
, "sl"
, "es"
, "sv"
, "ta"
, "te"
, "th"
, "tr"
, "tk"
, "uk"
, "hsb"
, "cy"
]
| ndr-qef/hyph-utf8.json | src/Hyph_UTF8/Language.hs | bsd-3-clause | 832 | 0 | 5 | 313 | 246 | 163 | 83 | 78 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
-- Module : Network.AWS
-- Copyright : (c) 2013-2015 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
-- | The core module for making requests to the various AWS services.
module Network.AWS
(
-- * Requests
-- ** Synchronous
send
-- ** Paginated
, paginate
-- ** Eventual consistency
, await
-- ** Pre-signing URLs
, presign
, presignURL
-- * Environment
, Env
-- ** Lenses
, envRegion
, envLogger
, envRetryCheck
, envRetryPolicy
, envManager
, envAuth
-- ** Creating the environment
, newEnv
, getEnv
-- ** Specifying credentials
, Credentials (..)
, fromKeys
, fromSession
, getAuth
, accessKey
, secretKey
-- * Logging
, newLogger
-- ** Streaming body helpers
, sourceBody
, sourceHandle
, sourceFile
, sourceFileIO
-- * Types
, module Network.AWS.Types
, module Network.AWS.Error
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Except
import Control.Monad.Trans.Resource
import Data.ByteString (ByteString)
import Data.Conduit hiding (await)
import Data.Monoid
import Data.Time (getCurrentTime)
import Network.AWS.Data
import Network.AWS.Error
import Network.AWS.Internal.Auth
import Network.AWS.Internal.Body
import Network.AWS.Internal.Env
import Network.AWS.Internal.Log
import Network.AWS.Internal.Retry
import qualified Network.AWS.Signing as Sign
import Network.AWS.Types
import Network.AWS.Waiters
import Network.HTTP.Conduit hiding (Request, Response)
import qualified Network.HTTP.Conduit as Client
-- | This creates a new environment without debug logging and uses 'getAuth'
-- to expand/discover the supplied 'Credentials'.
--
-- Lenses such as 'envLogger' can be used to modify the 'Env' with a debug logger.
newEnv :: (Functor m, MonadIO m)
=> Region
-> Credentials
-> Manager
-> ExceptT String m Env
newEnv r c m = Env r logger check Nothing m `liftM` getAuth m c
where
logger _ _ = return ()
check _ _ = return True
-- | Create a new environment in the specified 'Region' with silent log output
-- and a new 'Manager'.
--
-- Any errors are thrown using 'error'.
--
-- /See:/ 'newEnv' for safe 'Env' instantiation.
getEnv :: Region -> Credentials -> IO Env
getEnv r c = do
m <- newManager conduitManagerSettings
runExceptT (newEnv r c m) >>= either error return
-- | Send a data type which is an instance of 'AWSRequest', returning either the
-- associated 'Rs' response type in the success case, or the related service's
-- 'Er' type in the error case.
--
-- This includes 'HTTPExceptions', serialisation errors, and any service
-- errors returned as part of the 'Response'.
--
-- /Note:/ Requests will be retried depending upon each service's respective
-- strategy. This can be overriden using 'envRetry'. Requests which contain
-- streaming request bodies (such as S3's 'PutObject') are never considered for retries.
send :: (MonadCatch m, MonadResource m, AWSRequest a)
=> Env
-> a
-> m (Response a)
send e@Env{..} (request -> rq) = fmap snd <$> retrier e rq (raw e rq)
-- | Poll the API until a predefined condition is fulfilled using the
-- supplied 'Wait' specification from the respective service.
--
-- The response will be either the first error returned that is not handled
-- by the specification, or the successful response from the await request.
--
-- /Note:/ You can find any available 'Wait' specifications under then
-- @Network.AWS.<ServiceName>.Waiters@ namespace for supported services.
await :: (MonadCatch m, MonadResource m, AWSRequest a)
=> Env
-> Wait a
-> a
-> m (Response a)
await e w (request -> rq) = fmap snd <$> waiter e w rq (raw e rq)
-- | Send a data type which is an instance of 'AWSPager' and paginate over
-- the associated 'Rs' response type in the success case, or the related service's
-- 'Er' type in the error case.
--
-- /Note:/ The 'ResumableSource' will close when there are no more results or the
-- 'ResourceT' computation is unwrapped. See: 'runResourceT' for more information.
paginate :: (MonadCatch m, MonadResource m, AWSPager a)
=> Env
-> a
-> Source m (Response a)
paginate e = go
where
go x = do
y <- lift (send e x)
yield y
either (const (return ()))
(maybe (return ()) go . page x)
y
-- | Presign an HTTP request that expires at the specified amount of time
-- in the future.
--
-- /Note:/ Requires the service's signer to be an instance of 'AWSPresigner'.
-- Not all signing process support this.
presign :: (MonadIO m, AWSRequest a, AWSPresigner (Sg (Sv a)))
=> Env
-> a -- ^ Request to presign.
-> UTCTime -- ^ Signing time.
-> Integer -- ^ Expiry time in seconds.
-> m Client.Request
presign Env{..} (request -> rq) t ex =
_sgRequest `liftM` Sign.presign _envAuth _envRegion rq t ex
-- | Presign a URL that expires at the specified amount of time in the future.
--
-- /See:/ 'presign'
presignURL :: (MonadIO m, AWSRequest a, AWSPresigner (Sg (Sv a)))
=> Env
-> a -- ^ Request to presign.
-> UTCTime -- ^ Signing time.
-> Integer -- ^ Expiry time in seconds.
-> m ByteString
presignURL e x t ex = (toBS . uri) `liftM` presign e x t ex
where
uri rq =
scheme (secure rq)
<> build (host rq)
<> port' (port rq)
<> build (path rq)
<> build (queryString rq)
scheme True = "https://"
scheme _ = "http://"
port' = \case
80 -> ""
443 -> ""
n -> build ':' <> build n
raw :: (MonadCatch m, MonadResource m, AWSRequest a)
=> Env
-> Request a
-> m (Response' a)
raw Env{..} rq = catch go err >>= response _envLogger rq
where
go = do
trace _envLogger (build rq)
t <- liftIO getCurrentTime
Signed m s <- Sign.sign _envAuth _envRegion rq t
debug _envLogger (build s)
trace _envLogger (build m)
rs <- liftResourceT (http s _envManager)
debug _envLogger (build rs)
return (Right rs)
err ex = return (Left (ex :: HttpException))
| romanb/amazonka | amazonka/src/Network/AWS.hs | mpl-2.0 | 7,314 | 0 | 15 | 2,049 | 1,378 | 762 | 616 | 136 | 4 |
-- Usage:
-- cabal build Holostress && ./dist/build/Holostress/Holostress +RTS -T -RTS
--
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# OPTIONS_GHC -Wextra -Wno-unticked-promoted-constructors -Wno-type-defaults #-}
import qualified Data.ByteString.Char8 as SB
import qualified Data.List
import qualified Data.Map.Strict as Map
import qualified Data.Vector as V
import Linear hiding (trace)
import qualified GI.PangoCairo.Functions as GIPC
import qualified Graphics.Rendering.Cairo as GRC
import qualified Graphics.GL.Core33 as GL
import qualified "GLFW-b" Graphics.UI.GLFW as GLFW
import qualified LambdaCube.GL.Mesh as GL
import qualified LambdaCube.GL.Input as GL
import qualified LambdaCube.Linear as LCLin
import LambdaCube.Mesh as LC
import qualified Data.Text as T
import qualified Data.Text.Zipper as T
import Flatland
import HoloTypes
import qualified HoloCairo as Cr
import qualified Holo.System as HOS
import HoloPort
main ∷ IO ()
main = do
HOS.unbufferStdout
_ ← GLFW.init
GLFW.defaultWindowHints
mapM_ GLFW.windowHint
[ GLFW.WindowHint'ContextVersionMajor 3
, GLFW.WindowHint'ContextVersionMinor 3
, GLFW.WindowHint'OpenGLProfile GLFW.OpenGLProfile'Core
, GLFW.WindowHint'OpenGLForwardCompat True
]
Just win ← GLFW.createWindow 1024 768 "repro" Nothing Nothing
GLFW.makeContextCurrent $ Just win
GL.glEnable GL.GL_FRAMEBUFFER_SRGB
GLFW.swapInterval 0
let (,) osName uniName = ("portStream", "portMtl")
schema = pipelineSchema [(osName, uniName)]
portGLStorage ← liftIO $ GL.allocStorage schema
let ObjectStream{..} = ObjectStream portGLStorage osName uniName
-- * Holo
let Settings{..} = defaultSettings
timeStart ← HOS.fromSec <$> HOS.getTime
let text n = [ T.pack $ printf "Object #%d:" n
, " Esc: quit"
, " F1: toggle per-frame object stream"
, " Editing keys: edit"
, ""
, "Yay!"] ∷ [T.Text]
zipper = T.textZipper $ text (42 ∷ Int)
dim@(Di (V2 w h)) = di 256 256
navg = 10
loop (iterN, timePre) avgPre preKB = do
dSurface ← GRC.createImageSurface GRC.FormatARGB32 w h
cairo ← Cr.cairoCreate dSurface
dGIC ← Cr.cairoToGICairo cairo
let (_dx, _dy) = (fromIntegral w, fromIntegral $ -h)
_position = V.fromList [ LCLin.V2 0 _dy, LCLin.V2 0 0, LCLin.V2 _dx 0, LCLin.V2 0 _dy, LCLin.V2 _dx 0, LCLin.V2 _dx _dy ]
_texcoord = V.fromList [ LCLin.V2 0 1, LCLin.V2 0 0, LCLin.V2 1 0, LCLin.V2 0 1, LCLin.V2 1 0, LCLin.V2 1 1 ]
_dMesh = LC.Mesh { mPrimitive = P_Triangles
, mAttributes = Map.fromList [ ("position", A_V2F _position)
, ("uv", A_V2F _texcoord) ] }
-- _ ← GL.uploadMeshToGPU _dMesh
-- (GIP.Context _gipc@(GI.ManagedPtr _fptr ownedR))
-- owned ← IO.readIORef ownedR
_ ← GIPC.createContext dGIC
-- cDrawable ← makeDrawable stream $ fromIntegral <$> dim
do
dSurface ← GRC.createImageSurface GRC.FormatARGB32 w h
dCairo ← Cr.cairoCreate dSurface
dGIC ← Cr.cairoToGICairo dCairo
let (dx, dy) = (fromIntegral w, fromIntegral $ -h)
position = V.fromList [ LCLin.V2 0 dy, LCLin.V2 0 0, LCLin.V2 dx 0, LCLin.V2 0 dy, LCLin.V2 dx 0, LCLin.V2 dx dy ]
texcoord = V.fromList [ LCLin.V2 0 1, LCLin.V2 0 0, LCLin.V2 1 0, LCLin.V2 0 1, LCLin.V2 1 0, LCLin.V2 1 1 ]
dMesh = LC.Mesh { mPrimitive = P_Triangles
, mAttributes = Map.fromList [ ("position", A_V2F position)
, ("uv", A_V2F texcoord) ] }
dGPUMesh ← GL.uploadMeshToGPU dMesh
dGLObject ← GL.addMeshToObjectArray osStorage (fromOANS osObjArray) [SB.unpack $ fromUNS osUniform, "viewProj"] dGPUMesh
GL.removeObject osStorage dGLObject
GL.disposeMesh dGPUMesh
-- Canvas (RRect T.Text)
-- let cStyle@(In (CanvasS cFontKey) innerStyle) = style
-- innerContent = zipperText zipper
-- vis ← do
-- cPSpace ← sPin (po 0 0) <$> query stts innerStyle innerContent
-- cDrawable ← makeDrawable stream $ spaceDim cPSpace
-- cFont ← bindFont (lookupFont' fontmap cFontKey) dGIC
-- let w = Canvas{..} where cInner = (⊥) -- resolve circularity due to *ToInner..
-- cInner ← make stts (CW w) innerStyle innerContent cPSpace
-- pure w { cInner = cInner }
-- render vis
--- do stats
timePreGC ← HOS.fromSec <$> HOS.getTime
HOS.gc
new ← HOS.gcKBytesUsed
timePost ← HOS.fromSec <$> HOS.getTime
let dt = timePost - timePre
nonGCt = timePreGC - timePre
avgPost@(avgVal, _) = avgStep dt avgPre
when (0 == mod iterN 40) $
printf " frame used dFrMem avgFrMem avgFrTime frTimeNonGC\n"
-- when (preKB /= new) $
printf "%5dn %dk %4ddK %5dK/f %4.2fms %4.2fms\n"
iterN new (new - preKB) (ceiling $ (fromIntegral new / fromIntegral iterN) ∷ Int)
(avgVal ⋅ 1000) (nonGCt ⋅ 1000)
loop (iterN + 1, timePost) avgPost new
loop (0 ∷ Integer, timeStart) (0.0, (navg, 0, [])) =<< HOS.gcKBytesUsed
type Avg a = (Int, Int, [a])
avgStep ∷ Fractional a ⇒ a → (a, Avg a) → (a, Avg a)
avgStep x (_, (lim, cur, xs)) =
let (ncur, nxs) = if cur < lim
then (cur + 1, x:xs)
else (lim, x:Data.List.init xs)
in ((sum nxs) / fromIntegral ncur, (lim, ncur, nxs))
| deepfire/mood | tests/Holostress.hs | agpl-3.0 | 6,847 | 5 | 22 | 2,312 | 1,565 | 848 | 717 | 112 | 2 |
{-
- Copyright (c) 2017 The Agile Monkeys S.L. <[email protected]>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module HaskellDo.Compilation.View where
import Control.Monad.IO.Class
import Control.Monad (when)
import Prelude hiding (div, id)
import AxiomUtils
import Foreign.Highlight
import GHCJS.HPlay.View hiding (atr, id)
import qualified Ulmus
import HaskellDo.Compilation.Types
import Foreign.JQuery
outputDisplay :: State -> Widget ()
outputDisplay state = rawHtml $
div ! id "output-frame" $ noHtml `setContents` compiledOutput state
errorDisplay :: State -> Widget ()
errorDisplay state
| null (compilationError state) = return ()
| otherwise = rawHtml $
pre
! atr "class" "card-panel red darken-1 white-text"
! atr "role" "alert"
$ code (compilationError state)
updateDisplays :: State -> Widget ()
updateDisplays state =
when (dirtyCompile state) $ do
Ulmus.newWidget "outputDisplay" (outputDisplay state)
Ulmus.newWidget "errorDisplay" (errorDisplay state)
liftIO $ activateScriptTags "#output-frame"
liftIO $ setHeightFromElement ".error-placeholder" "#errorDisplay"
liftIO highlightCode
| J2RGEZ/haskell-do | src/common/HaskellDo/Compilation/View.hs | apache-2.0 | 1,789 | 0 | 10 | 408 | 302 | 154 | 148 | 29 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Database.DSH.VSL.Opt.Rewrite.PruneEmpty(pruneEmpty) where
-- import Control.Monad
import Database.DSH.Common.Opt
import Database.DSH.Common.VectorLang
import Database.DSH.VSL.Opt.Properties.Types
import Database.DSH.VSL.Opt.Rewrite.Common
-- import Database.Algebra.Dag.Common
-- import Database.DSH.VSL.Lang
pruneEmpty :: VSLRewrite TExpr TExpr Bool
pruneEmpty = applyToAll inferBottomUp emptyRules
emptyRules :: VSLRuleSet TExpr TExpr BottomUpProps
emptyRules = [ -- emptyAppendLeftR1
-- , emptyAppendLeftR2
-- , emptyAppendLeftR3
-- , emptyAppendRightR1
-- , emptyAppendRightR2
-- , emptyAppendRightR3
]
-- FIXME pruning data vectors (R1) alone is not sufficient when
-- dealing with natural keys. We need to treat R2 and R3 outputs as
-- well, because otherwise inner vectors will be re-keyed and no
-- longer be aligned with the outer vector.
-- isEmpty :: AlgNode -> VSLMatch BottomUpProps Bool
-- isEmpty q = do
-- ps <- liftM emptyProp $ properties q
-- case ps of
-- VProp b -> return b
-- x -> error $ "PruneEmpty.isEmpty: non-vector input " ++ show x
-- {- If the left input is empty and the other is not, the resulting value vector
-- is simply the right input. -}
-- emptyAppendLeftR1 :: VSLRule BottomUpProps
-- emptyAppendLeftR1 q =
-- $(dagPatMatch 'q "R1 ((q1) [Append | AppendS] (q2))"
-- [| do
-- predicate =<< ((&&) <$> (isEmpty $(v "q1")) <*> (not <$> isEmpty $(v "q2")))
-- return $ do
-- logRewrite "Empty.Append.Left.R1" q
-- replace q $(v "q2") |])
-- FIXME re-add rules when
{-
-- If the left input is empty, renaming will make the inner vector
-- empty as well.
emptyAppendLeftR2 :: VSLRule BottomUpProps
emptyAppendLeftR2 q =
$(dagPatMatch 'q "(R2 ((q1) Append (q2))) PropRename (qv)"
[| do
predicate =<< ((&&) <$> (isEmpty $(v "q1")) <*> (not <$> isEmpty $(v "q2")))
VProp (ValueVector w) <- vectorTypeProp <$> properties $(v "qv")
return $ do
logRewrite "Empty.Append.Left.R2" q
void $ replaceWithNew q (NullaryOp $ Empty w) |])
-- If the left input is empty, the rename vector for the right inner
-- vectors is simply identity
emptyAppendLeftR3 :: VSLRule BottomUpProps
emptyAppendLeftR3 q =
$(dagPatMatch 'q "(R3 ((q1) Append (q2))) PropRename (qv)"
[| do
predicate =<< ((&&) <$> (isEmpty $(v "q1")) <*> (not <$> isEmpty $(v "q2")))
return $ do
logRewrite "Empty.Append.Left.R3" q
replace q $(v "qv") |])
-}
-- emptyAppendRightR1 :: VSLRule BottomUpProps
-- emptyAppendRightR1 q =
-- $(dagPatMatch 'q "R1 ((q1) [Append | AppendS] (q2))"
-- [| do
-- predicate =<< ((&&) <$> (isEmpty $(v "q2")) <*> (not <$> isEmpty $(v "q1")))
-- return $ do
-- logRewrite "Empty.Append.Right.R1" q
-- replace q $(v "q1") |])
{-
-- If the right input is empty, renaming will make the inner vector
-- empty as well.
emptyAppendRightR3 :: VSLRule BottomUpProps
emptyAppendRightR3 q =
$(dagPatMatch 'q "(R3 ((q1) Append (q2))) PropRename (qv)"
[| do
predicate =<< ((&&) <$> (not <$> isEmpty $(v "q1")) <*> (isEmpty $(v "q2")))
VProp (ValueVector w) <- vectorTypeProp <$> properties $(v "qv")
return $ do
logRewrite "Empty.Append.Right.R3" q
void $ replaceWithNew q $ NullaryOp $ Empty w |])
-- If the right input is empty, the rename vector for the left inner
-- vectors is simply identity
emptyAppendRightR2 :: VSLRule BottomUpProps
emptyAppendRightR2 q =
$(dagPatMatch 'q "(R2 ((q1) Append (q2))) PropRename (qv)"
[| do
predicate =<< ((&&) <$> (isEmpty $(v "q2")) <*> (not <$> isEmpty $(v "q1")))
return $ do
logRewrite "Empty.Append.Right.R2" q
void $ replace q $(v "qv") |])
-}
| ulricha/dsh | src/Database/DSH/VSL/Opt/Rewrite/PruneEmpty.hs | bsd-3-clause | 3,986 | 0 | 5 | 998 | 129 | 97 | 32 | 10 | 1 |
module Halfs.Directory
( DirHandle(..)
, FileStat(..)
, FileMode(..)
, AccessRight(..)
, FileType(..)
, addDirEnt
, addDirEnt_lckd
, addDirEnt_lckd'
, closeDirectory
, find
, findInDir
, getDHINR_lckd
, makeDirectory
, newDirHandle
, openDirectory
, removeDirectory
, rmDirEnt
, rmDirEnt_lckd
, syncDirectory
, syncDirectory_lckd
, withDirectory
-- * for testing
, DirectoryEntry(..)
, DirectoryState(..)
)
where
import Control.Exception (assert)
import qualified Data.ByteString as BS
import qualified Data.Map as M
import Data.Serialize
import Foreign.C.Error
import Halfs.BlockMap
import Halfs.Classes
import Halfs.Errors
import Halfs.HalfsState
import Halfs.Monad
import Halfs.MonadUtils
import Halfs.Inode ( Inode(..)
, atomicReadInode
, blockAddrToInodeRef
, buildEmptyInodeEnc
, drefInode
, freeInode
, inodeRefToBlockAddr
, readStream
, withLockedInode
, writeStream
)
import Halfs.Protection
import Halfs.Types
import Halfs.Utils
import System.Device.BlockDevice
-- import Debug.Trace
type HalfsM b r l m a = HalfsT HalfsError (Maybe (HalfsState b r l m)) m a
--------------------------------------------------------------------------------
-- Directory manipulation and query functions
-- | Given a parent directory's inoderef, its owner, and its group,
-- generate a new, empty directory with the given name.
makeDirectory :: HalfsCapable b t r l m =>
InodeRef -- ^ inr to parent directory
-> String -- ^ directory name
-> UserID -- ^ user id for created directory
-> GroupID -- ^ group id for created directory
-> FileMode -- ^ initial perms for new directory
-> HalfsM b r l m InodeRef -- ^ on success, the inode ref to the
-- created directory
makeDirectory parentIR dname user group perms =
withDirectory parentIR $ \pdh -> do
withDHLock pdh $ do
-- Begin critical section over parent's DirHandle
contents <- readRef (dhContents pdh)
if M.member dname contents
then throwError $ HE_ObjectExists dname
else do
bm <- hasks hsBlockMap
mir <- fmap blockAddrToInodeRef `fmap` alloc1 bm
case mir of
Nothing -> throwError HE_AllocFailed
Just thisIR -> do
-- Build the directory inode and persist it
dev <- hasks hsBlockDev
bstr <- lift $ buildEmptyInodeEnc
dev
Directory
perms
thisIR
parentIR
user
group
assert (BS.length bstr == fromIntegral (bdBlockSize dev)) $ do
lift $ bdWriteBlock dev (inodeRefToBlockAddr thisIR) bstr
-- Add 'dname' to parent directory's contents
addDirEnt_lckd pdh dname thisIR user group perms Directory
return thisIR
-- End critical section over parent's DirHandle
-- | Given a parent directory's inode ref, remove the directory with the given name.
removeDirectory :: HalfsCapable b t r l m =>
Maybe String -- ^ name to remove from parent
-- directory's content map (when Nothing,
-- leaves the the parent directory's
-- content map alone)
-> InodeRef -- ^ inr of directory to remove
-> HalfsM b r l m ()
removeDirectory mdname inr = do
-- TODO: Perms check (write perms on parent directory, etc.)
dhMap <- hasks hsDHMap
-- We lock the dirhandle map so (a) there's no contention for
-- dirhandle lookup/creation for the directory we're removing and (b)
-- so we can ensure that the directory is empty.
withLockedRscRef dhMap $ \dhMapRef -> do
dh <- lookupRM inr dhMapRef >>= maybe (newDirHandle inr) return
withDHLock dh $ do
-- begin dirhandle critical section
contents <- readRef (dhContents dh)
unless (M.null contents) $ HE_DirectoryNotEmpty `annErrno` eNOTEMPTY
-- When we've been given a directory name, purge this dir's dirent from
-- the parent directory.
case mdname of
Nothing -> return ()
Just dname ->
withLockedInode inr $ do
pinr <- inoParent `fmap` drefInode inr
pdh <- lookupRM pinr dhMapRef >>= maybe (newDirHandle pinr) return
rmDirEnt pdh dname
-- Invalidate dh so that all subsequent DH-mediated access fails
writeRef (dhInode dh) Nothing
deleteRM inr dhMapRef
freeInode inr
-- end dirhandle critical section
-- | Syncs directory contents to disk
syncDirectory :: HalfsCapable b t r l m =>
DirHandle r l
-> HalfsM b r l m ()
syncDirectory dh = withDHLock dh $ syncDirectory_lckd dh
syncDirectory_lckd :: HalfsCapable b t r l m =>
DirHandle r l
-> HalfsM b r l m ()
syncDirectory_lckd dh = do
-- Precond: (dhLock dh) is currently held (can we assert this? TODO)
state <- readRef $ dhState dh
-- TODO: Currently, we overwrite the entire DirectoryEntry list, truncating
-- the directory's inode data stream as needed. This is _braindead_, however.
-- For OnlyAdded, we can just append to the stream; for OnlyDeleted, we can
-- write only invalidating entries and employ incremental coalescing, etc.
-- overwriteAll should be reserved for the VeryDirty case only.
case state of
Clean -> return ()
OnlyAdded -> overwriteAll
OnlyDeleted -> overwriteAll
VeryDirty -> overwriteAll
where
overwriteAll = do
inr <- getDHINR_lckd dh
writeStream inr 0 True
=<< (encode . M.elems) `fmap` readRef (dhContents dh)
lift . bdFlush =<< hasks hsBlockDev
modifyRef (dhState dh) dirStTransClean
-- | Obtains an active directory handle for the directory at the given InodeRef
openDirectory :: HalfsCapable b t r l m =>
InodeRef
-> HalfsM b r l m (DirHandle r l)
openDirectory inr = do
-- TODO FIXME permissions checks!
dhMap <- hasks hsDHMap
mdh <- withLockedRscRef dhMap (lookupRM inr)
case mdh of
Just dh -> return dh
Nothing -> do
dh <- newDirHandle inr
withLockedRscRef dhMap $ \ref -> do
-- If there's now a DirHandle in the map for our inode ref, prefer it to
-- the one we just created; this is to safely avoid race conditions
-- without extending the critical section over this entire function,
-- which performs a potentially expensive BlockDevice read.
mdh' <- lookupRM inr ref
case mdh' of
Just dh' -> return dh'
Nothing -> do
insertRM inr dh ref
return dh
closeDirectory :: HalfsCapable b t r l m =>
DirHandle r l
-> HalfsM b r l m ()
closeDirectory dh = do
syncDirectory dh
return ()
-- | Add a directory entry for a file, directory, or symlink; expects
-- that the item does not already exist in the directory. Thread-safe.
addDirEnt :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> InodeRef
-> UserID
-> GroupID
-> FileMode
-> FileType
-> HalfsM b r l m ()
addDirEnt dh name ir u g mode ftype =
withDHLock dh $ addDirEnt_lckd dh name ir u g mode ftype
addDirEnt_lckd :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> InodeRef
-> UserID
-> GroupID
-> FileMode
-> FileType
-> HalfsM b r l m ()
addDirEnt_lckd dh name inr u g mode ftype =
addDirEnt_lckd' False dh $ DirEnt name inr u g mode ftype
addDirEnt_lckd' :: HalfsCapable b t r l m =>
Bool
-> DirHandle r l
-> DirectoryEntry
-> HalfsM b r l m ()
addDirEnt_lckd' replaceOK dh de = do
-- Precond: (dhLock dh) is currently held (can we assert this? TODO)
when (not replaceOK) $ do
mfound <- lookupDE name dh
maybe (return ()) (const $ throwError $ HE_ObjectExists name) mfound
insertRM name de (dhContents dh)
modifyRef (dhState dh) dirStTransAdd
where
name = deName de
-- | Remove a directory entry for a file, directory, or symlink; expects
-- that the item exists in the directory. Thread-safe.
rmDirEnt :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> HalfsM b r l m ()
rmDirEnt dh name =
withDHLock dh $ rmDirEnt_lckd dh name
rmDirEnt_lckd :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> HalfsM b r l m ()
rmDirEnt_lckd dh name = do
-- Precond: (dhLock dh) is currently held (can we assert this? TODO)
-- begin sanity check
mfound <- lookupDE name dh
maybe (throwError $ HE_ObjectDNE name) (const $ return ()) mfound
-- end sanity check
deleteRM name (dhContents dh)
modifyRef (dhState dh) dirStTransRm
-- | Finds a directory, file, or symlink given a starting inode
-- reference (i.e., the directory inode at which to begin the search)
-- and a list of path components. Success is denoted using the DF_Found
-- constructor of the DirFindRslt type.
find :: HalfsCapable b t r l m =>
InodeRef -- ^ The starting inode reference
-> FileType -- ^ A match must be of this filetype
-> [FilePath] -- ^ Path components
-> HalfsM b r l m (DirFindRslt InodeRef)
--
find startINR ftype [] = do
ft <- atomicReadInode startINR inoFileType
return $ foundRslt startINR ft ftype
--
find startINR ftype (pathComp:rest) = do
dh <- openDirectory startINR
sr <- findDE dh pathComp (if null rest then ftype else Directory)
case sr of
DF_NotFound -> return $ DF_NotFound
DF_WrongFileType ft -> return $ DF_WrongFileType ft
DF_Found (de, _) -> find (deInode de) ftype rest
-- | Locate the given directory entry typed file by filename in the
-- DirHandle's content map
findDE :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> FileType
-> HalfsM b r l m (DirFindRslt DirectoryEntry)
findDE dh fname ftype = do
mde <- withDHLock dh $ lookupDE fname dh
case mde of
Nothing -> return DF_NotFound
Just de -> return $ foundRslt de (deType de) ftype
-- Exportable version of findDE; doesn't expose DirectoryEntry to caller
findInDir :: HalfsCapable b t r l m =>
DirHandle r l
-> String
-> FileType
-> HalfsM b r l m (DirFindRslt InodeRef)
findInDir dh fname ftype = fmap deInode `fmap` findDE dh fname ftype
foundRslt :: a -> FileType -> FileType -> DirFindRslt a
foundRslt inr ft ftype =
if ft `isFileType` ftype
then DF_Found (inr, ft)
else DF_WrongFileType ft
--------------------------------------------------------------------------------
-- Utility functions
newDirHandle :: HalfsCapable b t r l m =>
InodeRef
-> HalfsM b r l m (DirHandle r l)
newDirHandle inr = do
rawDirBytes <- readStream inr 0 Nothing
dirEnts <- if BS.null rawDirBytes
then do return []
else case decode rawDirBytes of
Left msg -> throwError $ HE_DecodeFail_Directory msg
Right x -> return x
DirHandle
`fmap` newRef (Just inr)
`ap` newRef (M.fromList $ map deName dirEnts `zip` dirEnts)
`ap` newRef Clean
`ap` newLock
-- Get directory handle's inode reference...
getDHINR_lckd :: HalfsCapable b t r l m =>
DirHandle r l
-> HalfsM b r l m InodeRef
getDHINR_lckd dh = do
-- Precond: (dhLock dh) has been acquired (TODO: can we assert this?)
readRef (dhInode dh) >>= maybe (throwError HE_InvalidDirHandle) return
withDirectory :: HalfsCapable b t r l m =>
InodeRef
-> (DirHandle r l -> HalfsM b r l m a)
-> HalfsM b r l m a
withDirectory ir = hbracket (openDirectory ir) closeDirectory
isFileType :: FileType -> FileType -> Bool
isFileType _ AnyFileType = True
isFileType t1 t2 = t1 == t2
_showDH :: HalfsCapable b t r l m => DirHandle r l -> HalfsM b r l m String
_showDH dh = do
withDHLock dh $ do
state <- readRef $ dhState dh
contents <- readRef $ dhContents dh
inr <- getDHINR_lckd dh
return $ "DirHandle { dhInode = " ++ show inr
++ ", dhContents = " ++ show contents
++ ", dhState = " ++ show state
dirStTransAdd :: DirectoryState -> DirectoryState
dirStTransAdd Clean = OnlyAdded
dirStTransAdd OnlyAdded = OnlyAdded
dirStTransAdd _ = VeryDirty
dirStTransRm :: DirectoryState -> DirectoryState
dirStTransRm Clean = OnlyDeleted
dirStTransRm OnlyDeleted = OnlyDeleted
dirStTransRm _ = VeryDirty
dirStTransClean :: DirectoryState -> DirectoryState
dirStTransClean = const Clean
| hackern/halfs | Halfs/Directory.hs | bsd-3-clause | 13,422 | 0 | 26 | 4,300 | 3,043 | 1,515 | 1,528 | 278 | 4 |
-- Compile this with 'ghc -o Game Game.hs' and run it with './Game'.
import Graphics.Gloss.Game
-- A sprite representing our character
slimeSprite = bmp "Slime.bmp"
-- Our game world consists purely of the location of our character.
data World = World Point
-- This starts our gamein a window with a give size, running at 30 frames per second.
--
-- The argument 'World (0, 0)' is the initial state of our game world, where our character is at the centre of the
-- window.
--
main = play (InWindow "Slime is here!" (600, 400) (50, 50)) white 30 (World (0, 0)) draw handle []
-- To draw a frame, we position the character sprite at the location as determined by the current state of the world.
-- We shrink the sprite by 50%.
draw (World (x, y)) = translate x y (scale 0.5 0.5 slimeSprite)
-- Whenever any of the keys 'a', 'd', 'w', or 's' have been pushed down, move our character in the corresponding
-- direction.
handle (EventKey (Char 'a') Down _ _) (World (x, y)) = World (x - 10, y)
handle (EventKey (Char 'd') Down _ _) (World (x, y)) = World (x + 10, y)
handle (EventKey (Char 'w') Down _ _) (World (x, y)) = World (x, y + 10)
handle (EventKey (Char 's') Down _ _) (World (x, y)) = World (x, y - 10)
handle event world = world -- don't change the world in case of any other events
| mchakravarty/lets-program | step1/Game.hs | bsd-3-clause | 1,303 | 0 | 9 | 263 | 352 | 193 | 159 | 10 | 1 |
import Language.Haskell.Pretty (prettyPrint)
import Language.Haskell.Parser (ParseResult(ParseOk, ParseFailed), parseModule)
import System.Environment (getArgs)
import System.Exit (exitWith, ExitCode(ExitFailure))
import System.IO (hPutStrLn, stderr)
import Control.Monad (when)
import Text.PrettyPrint.HughesPJ (render, text, (<+>), hsep)
import Language.C (parseCFile)
import Language.C.System.GCC (newGCC)
usageMsg :: String -> String
usageMsg prg = render $ text "Usage:" <+> text prg <+> hsep (map text ["CPP_OPTIONS","input_file.c"])
main :: IO ()
main = do
let usageErr = (hPutStrLn stderr (usageMsg "./ParseAndPrint") >> exitWith (ExitFailure 1))
args <- getArgs
when (length args < 1) usageErr
let (opts,input_file) = (init args, last args)
ast <- errorOnLeftM "Parse Error" $ parseCFile (newGCC "gcc") Nothing opts input_file
putStrLn $ (decorate (shows (fmap (const ShowPlaceholder) ast)) "")
errorOnLeft :: (Show a) => String -> (Either a b) -> IO b
errorOnLeft msg = either (error . ((msg ++ ": ")++).show) return
errorOnLeftM :: (Show a) => String -> IO (Either a b) -> IO b
errorOnLeftM msg action = action >>= errorOnLeft msg
data ShowPlaceholder = ShowPlaceholder
instance Show ShowPlaceholder where
showsPrec _ ShowPlaceholder = showString "_"
decorate app = showString "(" . app . showString ")"
| llelf/language-c | examples/DumpAst.hs | bsd-3-clause | 1,332 | 0 | 15 | 196 | 515 | 272 | 243 | 27 | 1 |
-- A variant of T5654 where instead of evaluating directly to a
-- funciton, f evaluates to a new PAP. This exposes a slightly
-- different but related bug, where when we create a new PAP by
-- applying arguments to an existing PAP, we should take into account
-- the stack on the original PAP.
-- The stack we should see is main->f->g->h, but if we get this wrong
-- (GHC 7.10) then the stack is main->f->h.
{-# NOINLINE f #-}
f :: Int -> Int
f = g 3
{-# NOINLINE g #-}
g :: Int -> Int -> Int
g = h 4
{-# NOINLINE h #-}
h :: Int -> Int -> Int -> Int
h x y z = x + y + z
main = return $! f 5
| nushio3/ghc | testsuite/tests/profiling/should_run/T5654b.hs | bsd-3-clause | 599 | 0 | 7 | 142 | 98 | 55 | 43 | 10 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
************************************************************************
* *
\section[FloatIn]{Floating Inwards pass}
* *
************************************************************************
The main purpose of @floatInwards@ is floating into branches of a
case, so that we don't allocate things, save them on the stack, and
then discover that they aren't needed in the chosen branch.
-}
{-# LANGUAGE CPP #-}
module FloatIn ( floatInwards ) where
#include "HsVersions.h"
import CoreSyn
import MkCore
import CoreUtils ( exprIsDupable, exprIsExpandable, exprType,
exprOkForSideEffects, mkTicks )
import CoreFVs ( CoreExprWithFVs, freeVars, freeVarsOf, idRuleAndUnfoldingVars )
import Id ( isOneShotBndr, idType )
import Var
import Type ( Type, isUnLiftedType, isFunTy, splitFunTy, applyTy )
import VarSet
import Util
import UniqFM
import DynFlags
import Outputable
import Data.List( mapAccumL )
{-
Top-level interface function, @floatInwards@. Note that we do not
actually float any bindings downwards from the top-level.
-}
floatInwards :: DynFlags -> CoreProgram -> CoreProgram
floatInwards dflags = map fi_top_bind
where
fi_top_bind (NonRec binder rhs)
= NonRec binder (fiExpr dflags [] (freeVars rhs))
fi_top_bind (Rec pairs)
= Rec [ (b, fiExpr dflags [] (freeVars rhs)) | (b, rhs) <- pairs ]
{-
************************************************************************
* *
\subsection{Mail from Andr\'e [edited]}
* *
************************************************************************
{\em Will wrote: What??? I thought the idea was to float as far
inwards as possible, no matter what. This is dropping all bindings
every time it sees a lambda of any kind. Help! }
You are assuming we DO DO full laziness AFTER floating inwards! We
have to [not float inside lambdas] if we don't.
If we indeed do full laziness after the floating inwards (we could
check the compilation flags for that) then I agree we could be more
aggressive and do float inwards past lambdas.
Actually we are not doing a proper full laziness (see below), which
was another reason for not floating inwards past a lambda.
This can easily be fixed. The problem is that we float lets outwards,
but there are a few expressions which are not let bound, like case
scrutinees and case alternatives. After floating inwards the
simplifier could decide to inline the let and the laziness would be
lost, e.g.
\begin{verbatim}
let a = expensive ==> \b -> case expensive of ...
in \ b -> case a of ...
\end{verbatim}
The fix is
\begin{enumerate}
\item
to let bind the algebraic case scrutinees (done, I think) and
the case alternatives (except the ones with an
unboxed type)(not done, I think). This is best done in the
SetLevels.hs module, which tags things with their level numbers.
\item
do the full laziness pass (floating lets outwards).
\item
simplify. The simplifier inlines the (trivial) lets that were
created but were not floated outwards.
\end{enumerate}
With the fix I think Will's suggestion that we can gain even more from
strictness by floating inwards past lambdas makes sense.
We still gain even without going past lambdas, as things may be
strict in the (new) context of a branch (where it was floated to) or
of a let rhs, e.g.
\begin{verbatim}
let a = something case x of
in case x of alt1 -> case something of a -> a + a
alt1 -> a + a ==> alt2 -> b
alt2 -> b
let a = something let b = case something of a -> a + a
in let b = a + a ==> in (b,b)
in (b,b)
\end{verbatim}
Also, even if a is not found to be strict in the new context and is
still left as a let, if the branch is not taken (or b is not entered)
the closure for a is not built.
************************************************************************
* *
\subsection{Main floating-inwards code}
* *
************************************************************************
-}
type FreeVarSet = IdSet
type BoundVarSet = IdSet
data FloatInBind = FB BoundVarSet FreeVarSet FloatBind
-- The FreeVarSet is the free variables of the binding. In the case
-- of recursive bindings, the set doesn't include the bound
-- variables.
type FloatInBinds = [FloatInBind]
-- In reverse dependency order (innermost binder first)
fiExpr :: DynFlags
-> FloatInBinds -- Binds we're trying to drop
-- as far "inwards" as possible
-> CoreExprWithFVs -- Input expr
-> CoreExpr -- Result
fiExpr _ to_drop (_, AnnLit lit) = ASSERT( null to_drop ) Lit lit
fiExpr _ to_drop (_, AnnType ty) = ASSERT( null to_drop ) Type ty
fiExpr _ to_drop (_, AnnVar v) = wrapFloats to_drop (Var v)
fiExpr _ to_drop (_, AnnCoercion co) = wrapFloats to_drop (Coercion co)
fiExpr dflags to_drop (_, AnnCast expr (fvs_co, co))
= wrapFloats (drop_here ++ co_drop) $
Cast (fiExpr dflags e_drop expr) co
where
[drop_here, e_drop, co_drop] = sepBindsByDropPoint dflags False [freeVarsOf expr, fvs_co] to_drop
{-
Applications: we do float inside applications, mainly because we
need to get at all the arguments. The next simplifier run will
pull out any silly ones.
-}
fiExpr dflags to_drop ann_expr@(_,AnnApp {})
= mkTicks ticks $ wrapFloats drop_here $ wrapFloats extra_drop $
mkApps (fiExpr dflags fun_drop ann_fun)
(zipWith (fiExpr dflags) arg_drops ann_args)
where
(ann_fun@(fun_fvs, _), ann_args, ticks)
= collectAnnArgsTicks tickishFloatable ann_expr
fun_ty = exprType (deAnnotate ann_fun)
((_,extra_fvs), arg_fvs) = mapAccumL mk_arg_fvs (fun_ty, emptyVarSet) ann_args
-- All this faffing about is so that we can get hold of
-- the types of the arguments, to pass to noFloatIntoRhs
mk_arg_fvs :: (Type, FreeVarSet) -> CoreExprWithFVs -> ((Type, FreeVarSet), FreeVarSet)
mk_arg_fvs (fun_ty, extra_fvs) (_, AnnType ty)
= ((applyTy fun_ty ty, extra_fvs), emptyVarSet)
mk_arg_fvs (fun_ty, extra_fvs) (arg_fvs, ann_arg)
| ASSERT( isFunTy fun_ty ) noFloatIntoRhs ann_arg arg_ty
= ((res_ty, extra_fvs `unionVarSet` arg_fvs), emptyVarSet)
| otherwise
= ((res_ty, extra_fvs), arg_fvs)
where
(arg_ty, res_ty) = splitFunTy fun_ty
drop_here : extra_drop : fun_drop : arg_drops
= sepBindsByDropPoint dflags False (extra_fvs : fun_fvs : arg_fvs) to_drop
{-
Note [Do not destroy the let/app invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Watch out for
f (x +# y)
We don't want to float bindings into here
f (case ... of { x -> x +# y })
because that might destroy the let/app invariant, which requires
unlifted function arguments to be ok-for-speculation.
Note [Floating in past a lambda group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* We must be careful about floating inside a value lambda.
That risks losing laziness.
The float-out pass might rescue us, but then again it might not.
* We must be careful about type lambdas too. At one time we did, and
there is no risk of duplicating work thereby, but we do need to be
careful. In particular, here is a bad case (it happened in the
cichelli benchmark:
let v = ...
in let f = /\t -> \a -> ...
==>
let f = /\t -> let v = ... in \a -> ...
This is bad as now f is an updatable closure (update PAP)
and has arity 0.
* Hack alert! We only float in through one-shot lambdas,
not (as you might guess) through lone big lambdas.
Reason: we float *out* past big lambdas (see the test in the Lam
case of FloatOut.floatExpr) and we don't want to float straight
back in again.
It *is* important to float into one-shot lambdas, however;
see the remarks with noFloatIntoRhs.
So we treat lambda in groups, using the following rule:
Float in if (a) there is at least one Id,
and (b) there are no non-one-shot Ids
Otherwise drop all the bindings outside the group.
This is what the 'go' function in the AnnLam case is doing.
Urk! if all are tyvars, and we don't float in, we may miss an
opportunity to float inside a nested case branch
-}
fiExpr dflags to_drop lam@(_, AnnLam _ _)
| okToFloatInside bndrs -- Float in
-- NB: Must line up with noFloatIntoRhs (AnnLam...); see Trac #7088
= mkLams bndrs (fiExpr dflags to_drop body)
| otherwise -- Dump it all here
= wrapFloats to_drop (mkLams bndrs (fiExpr dflags [] body))
where
(bndrs, body) = collectAnnBndrs lam
{-
We don't float lets inwards past an SCC.
ToDo: keep info on current cc, and when passing
one, if it is not the same, annotate all lets in binds with current
cc, change current cc to the new one and float binds into expr.
-}
fiExpr dflags to_drop (_, AnnTick tickish expr)
| tickish `tickishScopesLike` SoftScope
= Tick tickish (fiExpr dflags to_drop expr)
| otherwise -- Wimp out for now - we could push values in
= wrapFloats to_drop (Tick tickish (fiExpr dflags [] expr))
{-
For @Lets@, the possible ``drop points'' for the \tr{to_drop}
bindings are: (a)~in the body, (b1)~in the RHS of a NonRec binding,
or~(b2), in each of the RHSs of the pairs of a @Rec@.
Note that we do {\em weird things} with this let's binding. Consider:
\begin{verbatim}
let
w = ...
in {
let v = ... w ...
in ... v .. w ...
}
\end{verbatim}
Look at the inner \tr{let}. As \tr{w} is used in both the bind and
body of the inner let, we could panic and leave \tr{w}'s binding where
it is. But \tr{v} is floatable further into the body of the inner let, and
{\em then} \tr{w} will also be only in the body of that inner let.
So: rather than drop \tr{w}'s binding here, we add it onto the list of
things to drop in the outer let's body, and let nature take its
course.
Note [extra_fvs (1): avoid floating into RHS]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider let x=\y....t... in body. We do not necessarily want to float
a binding for t into the RHS, because it'll immediately be floated out
again. (It won't go inside the lambda else we risk losing work.)
In letrec, we need to be more careful still. We don't want to transform
let x# = y# +# 1#
in
letrec f = \z. ...x#...f...
in ...
into
letrec f = let x# = y# +# 1# in \z. ...x#...f... in ...
because now we can't float the let out again, because a letrec
can't have unboxed bindings.
So we make "extra_fvs" which is the rhs_fvs of such bindings, and
arrange to dump bindings that bind extra_fvs before the entire let.
Note [extra_fvs (2): free variables of rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let x{rule mentioning y} = rhs in body
Here y is not free in rhs or body; but we still want to dump bindings
that bind y outside the let. So we augment extra_fvs with the
idRuleAndUnfoldingVars of x. No need for type variables, hence not using
idFreeVars.
-}
fiExpr dflags to_drop (_,AnnLet (AnnNonRec id rhs@(rhs_fvs, ann_rhs)) body)
= fiExpr dflags new_to_drop body
where
body_fvs = freeVarsOf body `delVarSet` id
rhs_ty = idType id
rule_fvs = idRuleAndUnfoldingVars id -- See Note [extra_fvs (2): free variables of rules]
extra_fvs | noFloatIntoRhs ann_rhs rhs_ty = rule_fvs `unionVarSet` rhs_fvs
| otherwise = rule_fvs
-- See Note [extra_fvs (1): avoid floating into RHS]
-- No point in floating in only to float straight out again
-- Ditto ok-for-speculation unlifted RHSs
[shared_binds, extra_binds, rhs_binds, body_binds]
= sepBindsByDropPoint dflags False [extra_fvs, rhs_fvs, body_fvs] to_drop
new_to_drop = body_binds ++ -- the bindings used only in the body
[FB (unitVarSet id) rhs_fvs'
(FloatLet (NonRec id rhs'))] ++ -- the new binding itself
extra_binds ++ -- bindings from extra_fvs
shared_binds -- the bindings used both in rhs and body
-- Push rhs_binds into the right hand side of the binding
rhs' = fiExpr dflags rhs_binds rhs
rhs_fvs' = rhs_fvs `unionVarSet` floatedBindsFVs rhs_binds `unionVarSet` rule_fvs
-- Don't forget the rule_fvs; the binding mentions them!
fiExpr dflags to_drop (_,AnnLet (AnnRec bindings) body)
= fiExpr dflags new_to_drop body
where
(ids, rhss) = unzip bindings
rhss_fvs = map freeVarsOf rhss
body_fvs = freeVarsOf body
-- See Note [extra_fvs (1,2)]
rule_fvs = mapUnionVarSet idRuleAndUnfoldingVars ids
extra_fvs = rule_fvs `unionVarSet`
unionVarSets [ fvs | (fvs, rhs) <- rhss
, noFloatIntoExpr rhs ]
(shared_binds:extra_binds:body_binds:rhss_binds)
= sepBindsByDropPoint dflags False (extra_fvs:body_fvs:rhss_fvs) to_drop
new_to_drop = body_binds ++ -- the bindings used only in the body
[FB (mkVarSet ids) rhs_fvs'
(FloatLet (Rec (fi_bind rhss_binds bindings)))] ++
-- The new binding itself
extra_binds ++ -- Note [extra_fvs (1,2)]
shared_binds -- Used in more than one place
rhs_fvs' = unionVarSets rhss_fvs `unionVarSet`
unionVarSets (map floatedBindsFVs rhss_binds) `unionVarSet`
rule_fvs -- Don't forget the rule variables!
-- Push rhs_binds into the right hand side of the binding
fi_bind :: [FloatInBinds] -- one per "drop pt" conjured w/ fvs_of_rhss
-> [(Id, CoreExprWithFVs)]
-> [(Id, CoreExpr)]
fi_bind to_drops pairs
= [ (binder, fiExpr dflags to_drop rhs)
| ((binder, rhs), to_drop) <- zipEqual "fi_bind" pairs to_drops ]
{-
For @Case@, the possible ``drop points'' for the \tr{to_drop}
bindings are: (a)~inside the scrutinee, (b)~inside one of the
alternatives/default [default FVs always {\em first}!].
Floating case expressions inward was added to fix Trac #5658: strict bindings
not floated in. In particular, this change allows array indexing operations,
which have a single DEFAULT alternative without any binders, to be floated
inward. SIMD primops for unpacking SIMD vectors into an unboxed tuple of unboxed
scalars also need to be floated inward, but unpacks have a single non-DEFAULT
alternative that binds the elements of the tuple. We now therefore also support
floating in cases with a single alternative that may bind values.
-}
fiExpr dflags to_drop (_, AnnCase scrut case_bndr _ [(con,alt_bndrs,rhs)])
| isUnLiftedType (idType case_bndr)
, exprOkForSideEffects (deAnnotate scrut)
-- See PrimOp, Note [PrimOp can_fail and has_side_effects]
= wrapFloats shared_binds $
fiExpr dflags (case_float : rhs_binds) rhs
where
case_float = FB (mkVarSet (case_bndr : alt_bndrs)) scrut_fvs
(FloatCase scrut' case_bndr con alt_bndrs)
scrut' = fiExpr dflags scrut_binds scrut
[shared_binds, scrut_binds, rhs_binds]
= sepBindsByDropPoint dflags False [scrut_fvs, rhs_fvs] to_drop
rhs_fvs = freeVarsOf rhs `delVarSetList` (case_bndr : alt_bndrs)
scrut_fvs = freeVarsOf scrut
fiExpr dflags to_drop (_, AnnCase scrut case_bndr ty alts)
= wrapFloats drop_here1 $
wrapFloats drop_here2 $
Case (fiExpr dflags scrut_drops scrut) case_bndr ty
(zipWith fi_alt alts_drops_s alts)
where
-- Float into the scrut and alts-considered-together just like App
[drop_here1, scrut_drops, alts_drops]
= sepBindsByDropPoint dflags False [scrut_fvs, all_alts_fvs] to_drop
-- Float into the alts with the is_case flag set
(drop_here2 : alts_drops_s) = sepBindsByDropPoint dflags True alts_fvs alts_drops
scrut_fvs = freeVarsOf scrut
alts_fvs = map alt_fvs alts
all_alts_fvs = unionVarSets alts_fvs
alt_fvs (_con, args, rhs) = foldl delVarSet (freeVarsOf rhs) (case_bndr:args)
-- Delete case_bndr and args from free vars of rhs
-- to get free vars of alt
fi_alt to_drop (con, args, rhs) = (con, args, fiExpr dflags to_drop rhs)
okToFloatInside :: [Var] -> Bool
okToFloatInside bndrs = all ok bndrs
where
ok b = not (isId b) || isOneShotBndr b
-- Push the floats inside there are no non-one-shot value binders
noFloatIntoRhs :: AnnExpr' Var (UniqFM Var) -> Type -> Bool
-- ^ True if it's a bad idea to float bindings into this RHS
-- Preconditio: rhs :: rhs_ty
noFloatIntoRhs rhs rhs_ty
= isUnLiftedType rhs_ty -- See Note [Do not destroy the let/app invariant]
|| noFloatIntoExpr rhs
noFloatIntoExpr :: AnnExpr' Var (UniqFM Var) -> Bool
noFloatIntoExpr (AnnLam bndr e)
= not (okToFloatInside (bndr:bndrs))
-- NB: Must line up with fiExpr (AnnLam...); see Trac #7088
where
(bndrs, _) = collectAnnBndrs e
-- IMPORTANT: don't say 'True' for a RHS with a one-shot lambda at the top.
-- This makes a big difference for things like
-- f x# = let x = I# x#
-- in let j = \() -> ...x...
-- in if <condition> then normal-path else j ()
-- If x is used only in the error case join point, j, we must float the
-- boxing constructor into it, else we box it every time which is very bad
-- news indeed.
noFloatIntoExpr rhs = exprIsExpandable (deAnnotate' rhs)
-- We'd just float right back out again...
-- Should match the test in SimplEnv.doFloatFromRhs
{-
************************************************************************
* *
\subsection{@sepBindsByDropPoint@}
* *
************************************************************************
This is the crucial function. The idea is: We have a wad of bindings
that we'd like to distribute inside a collection of {\em drop points};
insides the alternatives of a \tr{case} would be one example of some
drop points; the RHS and body of a non-recursive \tr{let} binding
would be another (2-element) collection.
So: We're given a list of sets-of-free-variables, one per drop point,
and a list of floating-inwards bindings. If a binding can go into
only one drop point (without suddenly making something out-of-scope),
in it goes. If a binding is used inside {\em multiple} drop points,
then it has to go in a you-must-drop-it-above-all-these-drop-points
point.
We have to maintain the order on these drop-point-related lists.
-}
sepBindsByDropPoint
:: DynFlags
-> Bool -- True <=> is case expression
-> [FreeVarSet] -- One set of FVs per drop point
-> FloatInBinds -- Candidate floaters
-> [FloatInBinds] -- FIRST one is bindings which must not be floated
-- inside any drop point; the rest correspond
-- one-to-one with the input list of FV sets
-- Every input floater is returned somewhere in the result;
-- none are dropped, not even ones which don't seem to be
-- free in *any* of the drop-point fvs. Why? Because, for example,
-- a binding (let x = E in B) might have a specialised version of
-- x (say x') stored inside x, but x' isn't free in E or B.
type DropBox = (FreeVarSet, FloatInBinds)
sepBindsByDropPoint _ _is_case drop_pts []
= [] : [[] | _ <- drop_pts] -- cut to the chase scene; it happens
sepBindsByDropPoint dflags is_case drop_pts floaters
= go floaters (map (\fvs -> (fvs, [])) (emptyVarSet : drop_pts))
where
go :: FloatInBinds -> [DropBox] -> [FloatInBinds]
-- The *first* one in the argument list is the drop_here set
-- The FloatInBinds in the lists are in the reverse of
-- the normal FloatInBinds order; that is, they are the right way round!
go [] drop_boxes = map (reverse . snd) drop_boxes
go (bind_w_fvs@(FB bndrs bind_fvs bind) : binds) drop_boxes@(here_box : fork_boxes)
= go binds new_boxes
where
-- "here" means the group of bindings dropped at the top of the fork
(used_here : used_in_flags) = [ fvs `intersectsVarSet` bndrs
| (fvs, _) <- drop_boxes]
drop_here = used_here || not can_push
-- For case expressions we duplicate the binding if it is
-- reasonably small, and if it is not used in all the RHSs
-- This is good for situations like
-- let x = I# y in
-- case e of
-- C -> error x
-- D -> error x
-- E -> ...not mentioning x...
n_alts = length used_in_flags
n_used_alts = count id used_in_flags -- returns number of Trues in list.
can_push = n_used_alts == 1 -- Used in just one branch
|| (is_case && -- We are looking at case alternatives
n_used_alts > 1 && -- It's used in more than one
n_used_alts < n_alts && -- ...but not all
floatIsDupable dflags bind) -- and we can duplicate the binding
new_boxes | drop_here = (insert here_box : fork_boxes)
| otherwise = (here_box : new_fork_boxes)
new_fork_boxes = zipWithEqual "FloatIn.sepBinds" insert_maybe fork_boxes used_in_flags
insert :: DropBox -> DropBox
insert (fvs,drops) = (fvs `unionVarSet` bind_fvs, bind_w_fvs:drops)
insert_maybe box True = insert box
insert_maybe box False = box
go _ _ = panic "sepBindsByDropPoint/go"
floatedBindsFVs :: FloatInBinds -> FreeVarSet
floatedBindsFVs binds = mapUnionVarSet fbFVs binds
fbFVs :: FloatInBind -> VarSet
fbFVs (FB _ fvs _) = fvs
wrapFloats :: FloatInBinds -> CoreExpr -> CoreExpr
-- Remember FloatInBinds is in *reverse* dependency order
wrapFloats [] e = e
wrapFloats (FB _ _ fl : bs) e = wrapFloats bs (wrapFloat fl e)
floatIsDupable :: DynFlags -> FloatBind -> Bool
floatIsDupable dflags (FloatCase scrut _ _ _) = exprIsDupable dflags scrut
floatIsDupable dflags (FloatLet (Rec prs)) = all (exprIsDupable dflags . snd) prs
floatIsDupable dflags (FloatLet (NonRec _ r)) = exprIsDupable dflags r
| da-x/ghc | compiler/simplCore/FloatIn.hs | bsd-3-clause | 23,054 | 0 | 17 | 6,139 | 2,976 | 1,612 | 1,364 | 190 | 4 |
module BigNum where
{-@ type Foo = { v : Integer | 0 <= v && v < 4611686018427387903 * 8 } @-}
{-@ f :: i : Foo -> { o : Foo | i < o } @-}
f :: Integer -> Integer
f i = i * 2
| abakst/liquidhaskell | tests/neg/BigNum.hs | bsd-3-clause | 177 | 0 | 5 | 53 | 27 | 16 | 11 | 3 | 1 |
module HAD.Y2014.M04.D16.Exercise where
{- | reverseMap
No definition, look to types
-}
reverseMap :: [a -> b] -> a -> [b]
reverseMap = undefined
| 1HaskellADay/1HAD | exercises/HAD/Y2014/M04/D16/Exercise.hs | mit | 151 | 0 | 7 | 29 | 38 | 24 | 14 | 3 | 1 |
{-# LANGUAGE CPP #-}
module Lib
( someFunc
) where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
#if !WORK
#error Not going to work, sorry
#endif
| AndreasPK/stack | test/integration/tests/335-multi-package-flags/files/src/Lib.hs | bsd-3-clause | 159 | 0 | 6 | 37 | 31 | 19 | 12 | -1 | -1 |
module Language.HPHP.AST (
PHP(..),
Expr(..)
) where
import Data.Text
data PHP = Expr Expr
deriving Show
data Expr = Var Expr
| StringLiteral Text
| Concat Expr Expr
deriving Show
| pikajude/hphp | Language/HPHP/AST.hs | mit | 230 | 0 | 6 | 80 | 64 | 40 | 24 | 10 | 0 |
-- Warm-up and review
-- For the following set of exercises, you are not expected to use folds. These
-- are intended to review material from previous chapters. Feel free to use any
-- syntax or structure from previous chapters that seems appropriate.
-- 1. Given the following sets of consonants and vowels:
stops = "pbtdkg"
vowels = "aeiou"
-- a) Write a function that takes inputs from stops and vowels and makes
-- 3-tuples of all possible stop-vowel-stop combinations. These will not all
-- correspond to real words in English, although the stop-vowel-stop pattern is
-- common enough that many of them will.
combos = [(a,b,c)
| a <- stops,
b <- vowels,
c <- stops]
-- b) Modify that function so that it only returns the combinations that begin
-- with a p.
combos2 = [(a,b,c)
| a <- stops,
b <- vowels,
c <- stops,
a == 'p']
-- c) Now set up lists of nouns and verbs (instead of stops and vowels) and
-- modify the function to make tuples representing possible noun-verb-noun
-- sentences.
combos3 = [(a,b,c)
| a <- nouns,
b <- verbs,
c <- nouns]
where nouns = ["person", "place", "puppy"]
verbs = ["run", "sleep", "punch"]
-- 2. What does the following mystery function do? What is its type? Try to get
-- a good sense of what it does before you test it in the REPL to verify it.
seekritFunc x = div (sum (map length (words x))) (length (words x))
-- this gives a crappy average of the length of each word that throws away the
-- remainer.
-- 3. We’d really like the answer to be more precise. Can you rewrite that using
-- fractional division?
seekritFuncDouble x = (/) (fromIntegral (sum (map length (words x)))) (fromIntegral (length (words x)))
-- Rewriting functions using folds
-- In the previous chapter, you wrote these functions using direct recur- sion
-- over lists. The goal now is to rewrite them using folds. Where possible, to
-- gain a deeper understanding of folding, try rewriting the fold version so
-- that it is point-free.
-- The goal here is to converge on the final version where possible. You don’t
-- need to write all variations for each example, but the more variations you
-- write, the deeper your understanding of these functions will become.
-- 1. myOr returns True if any Bool in the list is True.
myOr :: [Bool] -> Bool
myOr [] = False
myOr (x:xs) = x || myOr xs
myOrFold :: [Bool] -> Bool
myOrFold = foldr (||) False
-- 2. myAny returns True if a -> Bool applied to any of the values in the list
-- returns True.
myAny :: (a -> Bool) -> [a] -> Bool
myAny _ [] = False
myAny f (x:xs) = f x || myAny f xs
myAnyFold :: (a -> Bool) -> [a] -> Bool
myAnyFold f = foldr (\x y -> f x || y) False
-- 3. In addition to the recursive and fold based myElem, write a version that
-- uses any.
myElem :: Eq a => a -> [a] -> Bool
myElem _ [] = False
myElem a (x:xs) = x == a || myElem a xs
myElemFold :: Eq a => a -> [a] -> Bool
myElemFold a = foldr (\x y -> x == a || y) False
myElemAny :: Eq a => a -> [a] -> Bool
myElemAny a = myAnyFold (==a)
-- 4. ImplementmyReverse,don’t worry about trying to make it lazy.
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
myReverseFold :: [a] -> [a]
myReverseFold = foldr (\x y -> y ++ [x]) []
-- 5. Write myMap in terms of foldr. It should have the same behavior as the
-- built-in map.
myMap :: (a -> b) -> [a] -> [b]
myMap f = foldr (\x y -> f x : y) []
-- 6. Write myFilter in terms of foldr. It should have the same behavior as the
-- built-in filter.
myFilter :: (a -> Bool) -> [a] -> [a]
myFilter f = foldr (\x y -> case f x of
True -> x : y
False -> y) []
-- 7. squish flattens a list of lists into a list
squish :: [[a]] -> [a]
squish = foldr (++) []
-- 8. squishMap maps a function over a list and concatenates the results.
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap f = foldr (\x y -> f x ++ y) []
-- 9. squishAgain flattens a list of lists into a list. This time re-use the
-- squishMap function.
squishAgain :: [[a]] -> [a]
squishAgain = squishMap id
-- 10. myMaximumBy takes a comparison function and a list and returns the
-- greatest element of the list based on the last value that the comparison
-- returned GT for.
myMaximumBy :: (a -> a -> Ordering) -> [a] -> a
myMaximumBy f (x:xs) = foldr (\x y -> case f x y of
LT -> y
_ -> x) x xs
-- 11. myMinimumBy takes a comparison function and a list and returns the least
-- element of the list based on the last value that the com- parison returned LT
-- for.
myMinimumBy :: (a -> a -> Ordering) -> [a] -> a
myMinimumBy f (x:xs) = foldr (\x y -> case f x y of
LT -> x
_ -> y) x xs
| diminishedprime/.org | reading-list/haskell_programming_from_first_principles/10_10.hs | mit | 4,919 | 0 | 13 | 1,280 | 1,177 | 653 | 524 | 61 | 2 |
module Language.Imperia.Compiler.Operation (perform) where
import Processor.Sprockell (Assembly (..), Value (..), OpCode (..))
import Language.Imperia.Compiler.Store
perform :: (Store -> a -> (Store, [Assembly])) -> Store -> OpCode -> a -> a -> (Store, [Assembly])
perform compile store opCode expr1 expr2 =
let
offset = registerOffset store
store' = store { registerOffset = offset + 1 }
store'' = store { registerOffset = offset + 2 }
in
( -- Leave the store as is
store,
-- Evaluate the first expression
-- The result will be at offset + 1
(snd $ compile store' expr1) ++
-- Likewise for the second expression
-- The result for this expression will be at offset + 2
(snd $ compile store'' expr2) ++
-- Perform calculation and store the result into the register
[ Calc opCode (offset + 1) (offset + 2) offset
]
)
| thomasbrus/imperia | src/Language/Imperia/Compiler/Operation.hs | mit | 906 | 0 | 12 | 231 | 236 | 138 | 98 | 14 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# LANGUAGE TupleSections, TemplateHaskell #-}
module IHaskell.Display.Rlangqq
( module RlangQQ,
rDisp,
rDisplayAll,
rOutputParsed,
rOutput,
getPlotNames,
getCaptions,
) where
import RlangQQ
import RlangQQ.ParseKnitted
import System.Directory
import System.FilePath
import Data.Maybe
import Data.List
import Text.Read
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as Char
import qualified Data.ByteString.Base64 as Base64
import IHaskell.Display
import IHaskell.Display.Blaze () -- to confirm it's installed
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as H
import Data.Monoid
import Data.Char
import Control.Monad
import Data.Ord
import Data.List.Split
import Text.XFormat.Show hiding ((<>))
import Control.Applicative
import Control.Concurrent
import Data.Monoid
import Data.Typeable
import Control.Concurrent.STM
import Language.Haskell.TH.Quote
-- | same as 'RlangQQ.r', but displays plots at the end too
rDisp = QuasiQuoter { quoteExp = \s -> [| do
result <- $(quoteExp r s)
p <- rDisplayAll
printDisplay p
return result
|] }
rOutput :: IO [Int]
rOutput = do
fs <- mapMaybe (readMaybe <=< stripPrefix "raw" <=< stripSuffix ".md")
<$> getDirectoryContents "Rtmp"
fs' <- forM fs $ \f -> (,f) <$> getModificationTime (showf ("Rtmp/raw"%Int%".md") f)
return $ map snd $ sortBy (flip (comparing fst)) fs'
-- | like 'stripPrefix' except on the end
stripSuffix :: String -> String -> Maybe String
stripSuffix s x = fmap reverse $ stripPrefix (reverse s) $ reverse x
rOutputParsed :: IO [KnitInteraction]
rOutputParsed = do
ns <- rOutput
case ns of
[] -> return []
n : _ -> parseKnitted <$> readFile (showf ("Rtmp/raw"%Int%".md") n)
getPlotNames :: IO [String]
getPlotNames = do
interactions <- rOutputParsed
return [ p | KnitInteraction _ is <- interactions, KnitImage _ p <- is ]
getCaptions :: IO [String]
getCaptions = do
interactions <- rOutputParsed
return [ c | KnitInteraction _ is <- interactions,
KnitImage c _ <- is,
not (isBoringCaption c) ]
-- | true when the caption name looks like one knitr will automatically
-- generate
isBoringCaption :: String -> Bool
isBoringCaption s = maybe False
(all isDigit)
(stripPrefix "plot of chunk unnamed-chunk-" s)
rDisplayAll :: IO Display
rDisplayAll = do
ns <- rOutputParsed
imgs <- sequence [ displayInteraction o | KnitInteraction _ os <- ns, o <- os]
display (mconcat imgs)
displayInteraction :: KnitOutput -> IO Display
displayInteraction (KnitPrint c) = display (plain c)
displayInteraction (KnitWarning c) = display (plain c)
displayInteraction (KnitError c) = display (plain c)
displayInteraction (KnitAsIs c) = display (plain c)
displayInteraction (KnitImage cap img) = do
let caption
| isBoringCaption cap = mempty
| otherwise = H.p (H.toMarkup cap)
encoded <- Base64.encode <$> B.readFile img
display $ H.img H.! H.src (H.unsafeByteStringValue
-- assumes you use the default device which is png
(Char.pack "data:image/png;base64," <> encoded))
<> caption
| aostiles/LiveHaskell | ihaskell-display/ihaskell-rlangqq/IHaskell/Display/Rlangqq.hs | mit | 3,295 | 0 | 16 | 691 | 942 | 497 | 445 | 87 | 2 |
module MyGraphics (initializeWorld, callShowWorld) where
import Data.Array.IArray
import Data.Fixed (mod')
import Data.IORef
import Graphics.UI.GLUT
import qualified Parameters as P
import Fitness
import Types
callShowWorld :: IO ()
callShowWorld = mainLoopEvent >> postRedisplay Nothing
initializeWorld :: IORef World -> IO ()
initializeWorld worldRef = do
_ <- getArgsAndInitialize
let pixelsPerUnit = 10
w = pixelsPerUnit * fromIntegral P.width
h = pixelsPerUnit * fromIntegral P.height
initialDisplayMode $= [RGBMode, DoubleBuffered]
initialWindowSize $= Size w h
(Size screenSizeX screenSizeY) <- get screenSize
let initialPos = Position
(fromIntegral (screenSizeX - w) `div` 2)
(fromIntegral (screenSizeY - h) `div` 2)
initialWindowPosition $= initialPos
_ <- createWindow "Evolverbetert v1"
matrixMode $= Projection
loadIdentity
ortho2D 0 (fromIntegral w / fromIntegral pixelsPerUnit)
0 (fromIntegral h / fromIntegral pixelsPerUnit)
displayCallback $= showWorld worldRef
actionOnWindowClose $= MainLoopReturns
showWorld :: IORef World -> IO ()
showWorld worldRef = do
clear [ColorBuffer]
world <- readIORef worldRef
drawSquares world
swapBuffers
drawSquares :: World -> IO ()
drawSquares world = renderPrimitive Quads $ mapM_ drawQuad P.worldCoods
where
ags = agents world
drawQuad :: (Int,Int) -> IO ()
drawQuad (x, y) = do
currentColor $= c
vertex $ Vertex2 x0 y0
vertex $ Vertex2 x1 y0
vertex $ Vertex2 x1 y1
vertex $ Vertex2 x0 y1
where
x0 :: GLint
x0 = fromIntegral x
x1 = fromIntegral x + 1
y0 = fromIntegral y
y1 = fromIntegral y + 1
c = colorHammDist (ags!(x,y)) (env world)
colorHammDist :: Agent -> Env -> Color4 Float
colorHammDist NoAgent _ = Color4 0 0 0 0
colorHammDist ag e = myHSV (1 - relHammDist) 0.5 1
where relHammDist = fromIntegral (hammDist e ag) / fromIntegral P.nrGeneTypes
_colorFit :: Agent -> Env -> Color4 Float
_colorFit NoAgent _ = Color4 0 0 0 0
_colorFit ag e = myHSV (realToFrac fit) 0.7 1
where fit = fitness e ag
-- | Colors from red to green, purple if fitness is optimal
myHSV :: Float -> Float -> Float -> Color4 Float
myHSV h s v
| h' < 1 = plusM (x,0,c)
| h' < 2 = plusM (c,0,x)
| h' < 3 = plusM (c,x,0)
| h' < 4 = plusM (x,c,0)
| otherwise = plusM (0,x,c)
where
plusM (i, j, k) = Color4 (i+m) (j+m) (k+m) 1
h' = h * 4
x = c * (1- abs (h' `mod'` 2 - 1))
c = s * v
m = v - c
-- hsv :: Int -> Float -> Float -> Color4 Float
-- hsv h s v
-- | h' < 1 = plusM (c,x,0) --Color4 (c+m) (x+m) m 1
-- | h' < 2 = plusM (x,c,0) --Color4 (x+m) (c+m) m 1
-- | h' < 3 = plusM (0,c,x) --Color4 m (c+m) (x+m) 1
-- | h' < 4 = plusM (0,x,c) --Color4 m (x+m) (c+m) 1
-- | h' < 5 = plusM (x,0,c) --Color4 (x+m) m (c+m) 1
-- | otherwise = plusM (c,0,x) --Color4 (c+m) m (x+m) 1
-- where
-- plusM (a, b, c) = Color4 (a+m) (b+m) (c+m) 1
-- h' = fromIntegral h / 60
-- c = s * v
-- x = c * (1- abs (h' `mod'` 2 - 1))
-- m = v - c
-- hSVtoRGB :: Int -> Double -> Double -> Color4 GLfloat
-- hSVtoRGB h s v = case h' of
-- 0 -> Color4 c x 0 1
-- 1 -> Color4 x c 0 1
-- 2 -> Color4 0 c x 1
-- 3 -> Color4 0 x c 1
-- 4 -> Color4 x 0 c 1
-- 5 -> Color4 c 0 x 1
-- where
-- h' = h `div` 60
-- c = fromInteger $ floor $ s * v
-- x = c * fromIntegral (1 - abs ((h' `mod` 2) - 1))
| KarimxD/Evolverbetert | src/MyGraphics.hs | mit | 3,861 | 0 | 15 | 1,297 | 1,024 | 528 | 496 | 72 | 1 |
{-# LANGUAGE
ScopedTypeVariables
, TypeApplications
, FlexibleContexts
#-}
module Main
( main
) where
import Data.Word
import Data.Time.Clock
import qualified Graphics.Image.Interface as HIP
import qualified Graphics.Image as HIP
import qualified Graphics.Image.IO as HIP
import qualified Repa
type Pixel = HIP.Pixel HIP.RGBA HIP.Word8
type Image = HIP.Image HIP.VS HIP.RGBA HIP.Word8
type ImageGS = HIP.Image HIP.VS HIP.Y Double
type PixelGS = HIP.Pixel HIP.Y Double
saturate x
| x > 1 = 1
| x < 0 = 0
| otherwise = x
toGrayscale :: Image -> ImageGS
toGrayscale img = HIP.makeImage imgDims (cv . HIP.index img)
where
imgDims = HIP.dims img
cv :: Pixel -> PixelGS
cv p =
-- https://docs.opencv.org/3.4/de/d25/imgproc_color_conversions.html
HIP.PixelY $ saturate $ (fI r / 255) * 0.299 + (fI g / 255) * 0.587 + (fI b / 255) * 0.114
where
fI = fromIntegral @Word8 @Double
r = HIP.getPxC p HIP.RedRGBA
g = HIP.getPxC p HIP.GreenRGBA
b = HIP.getPxC p HIP.BlueRGBA
computeCcorr :: ImageGS -> ImageGS -> HIP.Image HIP.RPU HIP.Y Double
computeCcorr img tmpl = HIP.makeImage resultDims computePx
where
computePx :: (Int, Int) -> HIP.Pixel HIP.Y Double
computePx coord = HIP.PixelY $ computePxNumer coord / sqrt (sumTmpl * sumImg)
where
sumTmpl = computeSqSum tmpl (0,0)
sumImg = computeSqSum img coord
computePxNumer :: (Int, Int) -> Double
computePxNumer (dRow, dCol) =
sum $ do
r <- [0 .. tmplRows - 1]
c <- [0 .. tmplCols - 1]
let imgPx = HIP.getPxC (HIP.index img (dRow + r, dCol + c)) HIP.LumaY
let tmplPx = HIP.getPxC (HIP.index tmpl (r, c)) HIP.LumaY
pure $ imgPx * tmplPx
resultDims = (imgRows - tmplRows + 1, imgCols - tmplCols + 1)
(imgRows, imgCols) = HIP.dims img
(tmplRows, tmplCols) = HIP.dims tmpl
computeSqSum imgX (offRow, offCol) = sum $ do
r <- [0 .. tmplRows - 1]
c <- [0 .. tmplCols - 1]
let v = HIP.getPxC (HIP.index imgX (offRow + r, offCol + c)) HIP.LumaY
pure $ v * v
main :: IO ()
main = do
Right (sample :: Image) <- HIP.readImageExact HIP.PNG "sample.png"
Right (templ :: Image) <- HIP.readImageExact HIP.PNG "templ.png"
tBefore <- getCurrentTime
let sampleG = toGrayscale sample
templG = toGrayscale templ
computed = Repa.computeCcorr sampleG templG
resultDims = HIP.dims computed
computed' = HIP.toManifest computed
{-
maxVal = maximumBy (comparing snd) $ do
let (rows, cols) = resultDims
r <- [0 .. rows - 1]
c <- [0 .. cols - 1]
let v = HIP.getPxC (HIP.index computed (r, c)) HIP.LumaY
pure ((r,c), v) -}
tAfter <- HIP.deepSeqImage computed' getCurrentTime
putStrLn $ "Total time in seconds: " <> show (realToFrac @_ @Double (diffUTCTime tAfter tBefore))
HIP.displayImage computed'
_ <- getLine
pure ()
| Javran/misc | hip-match-template/src/Main.hs | mit | 2,954 | 0 | 17 | 761 | 997 | 512 | 485 | 67 | 1 |
module ControlFlow (
-- optionsWindow
---,visualize
makeAnalViewInterface
,convertSimFileWindow
,getRestriction
, ResultType ( CSV, SScanner, Accident, SScrim)
,ioLogger
-- , WindowTypes (NetworkAnalysis, SchemAnalysis, AccidentAnalysis
-- , SchemeBuilder, AnalyseOrExtract, DNAdisplay)
) where
import qualified Graphics.UI.Gtk as G
import qualified Data.ByteString.Lazy.Char8 as L
--import Data.List (find,foldl',nub)
import Data.List (sort,find,delete,foldl', delete,findIndex, nub)
import Data.Maybe(isJust, fromJust,listToMaybe)
import System.Random (newStdGen)
import Data.Char (isAlpha,isDigit,isSpace,isAlphaNum)
import Control.Monad (liftM,liftM2)
import Control.Concurrent (forkIO)
import Data.IORef
--import Diaplay (DataType (..))
import System.Directory (doesFileExist, doesDirectoryExist,createDirectory,removeFile,
getPermissions,setPermissions,writable, readable,getDirectoryContents,createDirectoryIfMissing)
-- my definitions
import DrawingFrame (fileBrowser,mkDnaDisplayFrame,isValid)
--import InterfaceQAWindows (mkResultBox)
import FileIO (getAlreadyExtracted)
import RprGD2 (mml2DsGD)
import LMisc (mkIntervals,tokens)
import Parallel (forkProcess,forkProcessWithScroll,processWithScroll,myTry)
--import Parallel (forkProcess,forkProcessWithScroll,processWithScro,ll,myTry,forkProcessWithID)
import ProcessFile (Alignment (..),display,SchemeRec (..), SchemeInRec (..)
,readSchemeRec1,schemeRec2String)
import HMDIFPrelude (conditions, typesAndDetails,string2SCODE ,Defect (..),
bs2Int',scode2align,comma,splitAtChunkBS,bsReadChr) -- , typesAndDetails,scode2align,(<>))
import ReadFromDataFiles2 (bs2HMDIF2bs_aux)
import RprGD2 (OptimisationType(..))
import FileIO (checkIfExtracted,removeExtracted,csv2HMDIF_aux)
import RprMix2 (OModel) -- ,printElm,Mmodel (MkModel),r_n)
import NiceFork
import GlobalParameters
{--
--
import ControlFlow (makeAnalViewInterface, convertSimFileWindow, ResultType (..)) --- ,W
data LoadFile = HMDIF
| SchemeFile -- HMDIF files
| CommaSeparated Int -- comma separated file, indicating the number of commas
-- in the file
--}
data ResultType = CSV | SScanner | Accident | SScrim -- NScanner | SScanner | Accident | NScrim | SScrim
--- files used in the analysys
--getHmDifpath = do
--hmDifPath = "./SimulationFiles/hmdifPath.txt" --- "./FilePaths/pathsFile.txt"
-- mPath <- liftM (listToMaybe . L.lines ) $ L.readFile
mixfile = "./mmlTdsFiles/mixture.txt"
cutsfile = "./mmlTdsFiles/onlycuts.txt"
seedfile = "./mmlTdsFiles/seed.txt"
roadClasses = "./roadAttributes/roadClasses.txt"
--
-- paths
defectsDir = "./Defects"
hmdifPath = "./SimulationFiles/hmdifPath.txt"
{-
scannerPath = "./SimulationFiles/scannerPath.txt"
scrimPath = "./SimulationFiles/scrimPath.txt"
csvPath = "./SimulationFiles/csvPath.txt"
--
resultsFile = "./SectionResults/results.txt"
--}
roadNamesPath = "./SimulationFiles/roadNamesPath.txt"
--mixfile = "./mmlTdsFiles/mixture.txt"
--
data Scheme = Scheme {
--comboBox :: G.HBox,
condSCmb :: G.ComboBox,
rClassCmb :: G.ComboBox,
rNameCmb :: G.ComboBox,
sCodeCmb :: G.ComboBox,
rCodeCmb :: G.ComboBox,
sStart :: G.Entry,
sEnd :: G.Entry
}
--- get the restriction
-- get the negative slope option
getRestriction :: IO (Maybe Bool)
getRestriction = do
let setRestr bs = if bs == L.pack "2" then Nothing else (Just (bs == L.pack "0"))
(liftM (head . head) $ bsReadChr ',' mixfile) >>= return . setRestr
----
getNewScheme :: IO Scheme
getNewScheme = do
combos <- sequence $ replicate 5 G.comboBoxNewText
entries <- sequence $ replicate 2 G.entryNew
let [a,b,c,d,e] = combos
mapM_ (\(a,n) -> G.widgetSetSizeRequest a n (-1)) $ zip combos [260,100,130,100,100,100]
mapM_ (\ent -> do
G.entrySetWidthChars ent 6
G.entrySetMaxLength ent 6) entries
let [e1,e2] = entries
return $ Scheme a b c d e e1 e2
--- get the combos from a scheme
schemeCombos :: Scheme -> [G.ComboBox]
schemeCombos schems = [condSCmb schems,
rClassCmb schems,
rNameCmb schems,
sCodeCmb schems,
rCodeCmb schems]
--schemeEnries
schemeEntries :: Scheme -> [G.Entry]
schemeEntries scm = [sStart scm, sEnd scm]
--
scheme2Strings :: Scheme -> IO [String]
scheme2Strings scm = do
let mStr = maybe "" id
comboStrings <- mapM ((liftM mStr). G.comboBoxGetActiveText) $ schemeCombos scm
liftM (comboStrings ++) . mapM G.entryGetText $ schemeEntries scm
--
ioLogger :: IO (IORef (String -> IO()))
ioLogger = newIORef (\_ -> return ())
--
data Analysis = Analysis {
condsCmb :: G.ComboBox,
cwCmb :: G.ComboBox,
mfYrs :: G.Entry,
trLen :: G.Entry
}
--
anal2Strings :: Analysis -> IO [String]
anal2Strings anl = do
let mStr = maybe "" id
comStr <- liftM mStr . G.comboBoxGetActiveText $ condsCmb anl
liftM (comStr : ) . mapM G.entryGetText $ [mfYrs anl, trLen anl]
--
getNewAnalysis :: IO Analysis
getNewAnalysis = do
[combo, cr] <- sequence $ replicate 2 G.comboBoxNewText
G.widgetSetSizeRequest combo 160 (-1)
entries <- sequence $ replicate 2 G.entryNew
mapM_ (\ent -> do
G.entrySetWidthChars ent 6
G.entrySetMaxLength ent 6) entries
return $ Analysis combo cr (entries !! 0) (entries !! 1)
--------
--- the options for analysis
--data CondType = SCANNER | SCRIM deriving Show
data AnalType = NetworkAnalysis | SchemeAnalysis | SectionAnalysis deriving (Eq, Show)
data ForAnalysis = ForAnalysis {
atype :: AnalType,
fDefect :: String,
fSgSrg :: Maybe (String,String),
fCway :: Maybe String, -- [String],
yrsDst :: (Double,Double),
nSlope :: Maybe Bool,
optType :: OptimisationType -- Rapid , FromData
} deriving (Eq,Show)
-- fDefect,fCway
data Three a b c = Lft a | Mid b | Rgt c
---------------------
-- applying the analysis function ot a list of selections
applyAnalysis :: (String -> IO ()) -> (String -> IO ()) -> FilePath -> [ForAnalysis] -> IO ()
applyAnalysis screenLogger write path = mapM_ fanl2AnlFun
--createDirectoryIfMissing False defectsDir
--
where
---
--mkInc a = if a then (L.pack "1") else (L.pack "0")
---
getAlnAndDefect :: String -> Maybe (String, Alignment)
getAlnAndDefect str =
case (string2SCODE str) of
Nothing -> Nothing
Just scd -> Just (str, if scd `elem` crackingParms then Avg else Max)
where
crackingParms = [LTRC,LWCL,LWCR,LECR,LRCR,LMAP,LSUR,LOVD]
---
fanl2AnlFun :: ForAnalysis -> IO ()
fanl2AnlFun fa =
case (getAlnAndDefect $ fDefect fa) of
Nothing -> return ()
Just faa -> do
--L.writeFile mixfile (mkInc $ nSlope fa) -- the the negative slope flag
bs2HMDIF2bs_aux (fSgSrg fa)
path
[write, screenLogger, \_ -> return ()] -- a printing function
Nothing -- we do not analyze schemes
(nSlope fa)
faa
(yrsDst fa)
(fCway fa)
{--
Maybe (String, String) -> --- section and sorrogate
FilePath -> -- the filepath with the HMDif file
(String -> IO ()) -> -- function to write to a progress dialog
Maybe SchemeRec -> -- determine whether of not u are analysing schemes
(String,Alignment) ->
(Double, Double) -> -- years and distance
Maybe String -> --- if should use both carraige ways
--}
-----------------------------------------------------------------------
{-- file Paths
hmdifPath = "./FilePaths/hmdif.txt"
schemePath = "./FilePaths/schemeFile.txt"
pathsFile = "./FilePaths/pathsFile.txt"
toAnalFile = "./defectYearsDistance.txt"
--}
setFrameTitle :: G.Frame -> String -> IO ()
setFrameTitle frame str = G.set frame [G.frameLabel G.:= str ]
----------------------------------------------------------------------------------------------------
--- User Interface for analysis
----------------------------------------------------------------------------------------------------
--mkAnalysisDia --- continue from here ---
mkAnalysisDia :: [G.TextBuffer] -> Bool -> Bool -> [String] -> IO MyDialog
mkAnalysisDia buffs analysis isScanner forCmbo = do
mdia <- myDialogNew
modifyIORef (title mdia) (++ "Analysis Options")
anlRef <- newIORef []
myDialogAddWidget mdia =<< mkAnalysysBox anlRef forCmbo
buttons <- mapM G.buttonNewWithMnemonic ["_Analyse","_Cancel"]
mapM (myDialogAddButton mdia) buttons
--- handles for buttons
G.on (buttons !! 1) G.buttonActivated $ G.widgetDestroy (myDia mdia)
---
G.on (buttons !! 0) G.buttonActivated $ do
-- applyAnalysis :: FilePath -> [ForAnalysis] -> IO ()
mPath <- liftM (listToMaybe . L.lines ) $ L.readFile hmdifPath
case mPath of
Nothing -> runMsgDialog (Just "Path Retrieve Error") "Error retrieving HMDIF source path " Error
Just bs -> do
let remJust xs = [a | ys <- xs , isJust ys, let (Just a) = ys]
anlList <- liftM remJust $ readIORef anlRef
if null anlList then
runMsgDialog (Just "Nothing to Analyse") "No options set for analysis" Error
else do
let (deft, pth) = splitAtChunkBS (L.pack ",") bs -- deft
let path = L.unpack pth
let def = L.unpack deft
-- print ("path is: "++ path)
pathExist <- doesFileExist path
if pathExist then do
-- "SCANNER HMDIF" , "SCRIM HMDIF"
let aType = if isScanner then "SCANNER HMDIF" else "SCRIM HMDIF"
if def /= (filter isAlpha aType) then do
let str = "The current HMDIF filein not valid for" ++ aType ++ " files.\
\ Please update the a valid HMDIF file using the \
\ 'update source file' option, and try again. "
runMsgDialog (Just "Invalid HMDIF File ") str Error
else do
G.widgetDestroy $ myDia mdia
labl <- G.labelNew Nothing
let wrt string = G.postGUIAsync $ G.labelSetText labl string
let screenLogger = maybe (\_ -> return ()) (\bf str -> G.postGUIAsync $ updateBufferAtEnd bf str) (listToMaybe buffs)
let discp = "Running mml2ds on selected conditions" -- ++ defect
proGdia <- myDialogNew
G.set (myDia proGdia) [G.windowTitle G.:= discp]
processWithScroll (Just proGdia) (Just discp) labl wrt $ applyAnalysis screenLogger wrt path anlList
else do
let str = "HMDIF file does not exist. Please point to a HMDIF file,\
\ using the 'update source file' option, and try again. "
runMsgDialog (Just "File Read Error") str Error
--G.on nextButt G.buttonActivated $ do
--}
return mdia
where
-- get Analysis options from selections
getAnalisiOptions :: String -> -- Bool -> -- Maybe (String, String) ->
Maybe String -> (Int,Int) -> IO (Maybe ForAnalysis)
getAnalisiOptions selectedString cWays (y,d) = do
nSlp <- getRestriction
if anlTyp == "Network" then
return . Just $ ForAnalysis NetworkAnalysis deft Nothing cWays (yy,dd) nSlp optY
else if anlTyp == "Scheme" then
return . Just $ ForAnalysis SchemeAnalysis deft (mSSorg 3) cWays (yy,dd) nSlp optY
else if anlTyp == "Section" then
return . Just $ ForAnalysis SectionAnalysis deft mssN cWays (yy,dd) nSlp optY
else return Nothing
where
optY = FromData
(anlTyp, rest) = break (== ':') (filter (not . isSpace) selectedString)
rss = tokens (dropWhile (not . isAlphaNum) rest) ','
deft = takeWhile isAlphaNum $ rss !! 0
(yy,dd) = (fromIntegral y, fromIntegral d)
splitAtChe = break (== '-')
mssN = Just (rss !! 2, "")
---
mSSorg n = Just (a,tail b) -- ) (
where
(a,b) = break (== '-') (rss !! n)
---
mkAnalysysBox :: IORef [Maybe ForAnalysis] -> [String] -> IO G.VBox
mkAnalysysBox nextRef choices = do
anl <- getNewAnalysis
--negSlopesCB <- G.checkButtonNewWithLabel "Include Negative Slopes"
--- <- G.frameNew
[mainBox,buttonRow] <- sequence . replicate 2 $ G.hBoxNew False 0
G.widgetSetSizeRequest mainBox 700 (-1)
optionsBox <- G.vBoxNew False 1
[entryFrame, optionsFrame] <- sequence $ replicate 2 G.frameNew
G.containerAdd optionsFrame optionsBox
(swn, tBuff) <- makeScrolledEntryArea BScroll False True
G.containerAdd entryFrame swn
seps <- sequence $ replicate 2 G.hSeparatorNew
---
let strList = ["Average treatment length for selected condition ",
"Average years between maintenance interventions "] -- Enter number to limit entry file: "]
let labelEnt a b = labelLeftofWidgetE' a (6 * length a) b True
let clearEntry entry = G.entrySetText entry ""
let entries = [mfYrs anl, trLen anl]
let combo = condsCmb anl
--let options = ["Enter options individually ", "Enter options for all selections "]
--let optimisations = ["Optimise from data: ", "Rapid selection: "]
let mkHTable tf homo sp w = tableWithWidgetList tf w homo sp
--
mapM_ (G.comboBoxAppendText combo) choices
mapM_ (G.comboBoxAppendText $ cwCmb anl) ["CR1","CL1"]
ydCol <- (mkHTable True False 6) =<< mapM (uncurry labelEnt) (zip strList entries)
--- buttons
let buttonNames = ["Advanced Options", " Next "]
[adv, nextButt] <- mapM G.buttonNewWithLabel buttonNames
--mapM_ (\b -> G.widgetSetSizeRequest b 30 (-1)) [adv, nextButt]
mapM_ (\w -> G.boxPackEnd buttonRow w G.PackNatural 3) [nextButt] --, adv]
(analBothCW,nsBox) <- checkBoxWithLeftLabel "Analyse both carriage ways"
----
ckButtons <- makeRadioButtonGroupFromStrings ["","Restrain to positive ",
"Restrain to negative",
"Unrestrained "]
--
let setRst (b,n) = G.on b G.toggled $ do
isTogg <- G.toggleButtonGetActive b
if isTogg then L.writeFile mixfile (L.pack $ show n) else return ()
mapM_ setRst $ zip (tail ckButtons) [0,1 .. ]
----}
selectWayLabel <- G.labelNew (Just "Select carriage way: ")
----
(optsRow,_) <- horizontalWidgetList True (tail ckButtons) True
let setCWoption = G.widgetSetSensitive (cwCmb anl) =<< (liftM not $ G.toggleButtonGetActive analBothCW )
setCWoption >> G.on analBothCW G.toggled setCWoption
--G.boxPackEnd nsBox chkButton G.PackNatural 3
--G.boxPackEnd nsBox (chkButtons !! 1) G.PackNatural 3
G.boxPackEnd nsBox (cwCmb anl) G.PackNatural 3
G.boxPackEnd nsBox selectWayLabel G.PackNatural 3
-- packing into the entrie box --- condsCmb
G.boxPackStart optionsBox combo G.PackNatural 3
--G.boxPackStart optionsBox radiosRow G.PackNatural 3
G.boxPackStart optionsBox (seps !! 0) G.PackNatural 0
G.boxPackStart optionsBox ydCol G.PackNatural 3
G.boxPackStart optionsBox nsBox G.PackNatural 3 -- optRadiosRow
G.boxPackStart optionsBox optsRow G.PackNatural 3
--
G.boxPackStart optionsBox (seps !! 1) G.PackNatural 0
G.boxPackStart optionsBox buttonRow G.PackNatural 3
--- into the main packing box
G.boxPackStart mainBox optionsFrame G.PackNatural 4
G.boxPackStart mainBox entryFrame G.PackGrow 4
--
let updateButt b io = G.on b G.toggled io --
-- handle for the combos, etc --
optionRef <- newIORef []
---
G.on nextButt G.buttonActivated $ do
restr <- liftM (maybe [] (\_ -> "1") . listToMaybe . filter (== True)) $ mapM G.toggleButtonGetActive (tail ckButtons)
options <- anal2Strings anl
let mStr = Just . maybe [] (: [])
let messages = ["select from chosen conditions for analysis",
"enter the average treatment length for selection ",
"enter the years between maintenance interventions. ",
"select an option for restraining slopes"]
let missing = [i | (s,i) <- zip (options ++ [restr]) [0..], null s]
let getMissing xs = [messages !! i | i <- xs]
--- validate entries
let getVWay b = if b then (return Nothing) else liftM mStr . G.comboBoxGetActiveText $ cwCmb anl
---(getVWay =<< G.toggleButtonGetActive chkButton )
let invMsg = [" average treatment length "," years between maintenance interventions " ]
let getValid xs = [invMsg !! i | i <- xs]
let invalid = [i | (s,i) <- zip (tail options) [0 ..], (entVal s) < 0]
----
if null missing then do
-- validate the entries
cway <- getVWay =<< G.toggleButtonGetActive analBothCW
case cway of
Just [] ->
runMsgDialog (Just "No carriage way") "Please select an option for carriage way" Error
anyElse ->
if (null invalid) then do
-- check the carriage way
let toAdd = (options !! 0)
let addedMsg = "You have already added "++ toAdd
let [y,d] = map entVal $ tail options
mAdd <- getAnalisiOptions toAdd (liftM head cway) (y,d)
added <- liftM (filter (== mAdd)) $ readIORef nextRef
if length added > 0 then
runMsgDialog (Just "Already Added") addedMsg Info
else do
modifyIORef nextRef (mAdd :)
updateBufferAtEnd tBuff toAdd
mapM_ clearEntry entries
G.comboBoxRemoveText combo 0
--clearAdd optionRef combo (filter (/= toAdd) choices)
--print mAdd
else do
let msg = "The value entered for" ++ (setErrMsg $ getValid invalid) ++ "is not valid."
runMsgDialog (Just "Invalid Entries") msg Error
else do
let msg = " You did not " ++ (setErrMsg $ getMissing missing)
runMsgDialog (Just "Missing Options") msg Error
-- sel <- G.comboBoxGetActiveText rclass
--
--labelOverWidget :: G.WidgetClass widget => String -> widget -> Maybe Int -> IO G.VBox
labelOverWidget "\t Select options for analysis\t " mainBox (Just 400)
---------------------------------------------------------------------------------------------------
-- User interface with options for analysis and viewing interface
---------------------------------------------------------------------------------------------------
makeAnalViewInterface :: [G.TextBuffer] -> Bool -> IO (Maybe MyDialog)
makeAnalViewInterface buffs anal = do
let pathsFile = "./FilePaths/pathsFile.txt"
if anal then do
exists <- doesFileExist pathsFile
if exists then
(return . Just) =<< makeAnalViewInterface_aux buffs anal
else do
return Nothing
else
(return . Just) =<< makeAnalViewInterface_aux buffs anal -- =<< makeAnalViewInterface_aux analysis
-- originally intended for this view to be the same for analysis and visualisation, but
-- changed my mind. Currently only used for analysis
makeAnalViewInterface_aux :: [G.TextBuffer] -> Bool -> IO MyDialog
makeAnalViewInterface_aux buffs analysis = do
let analType = if analysis then "analysize" else "visualize"
let windowMsg = "Select data to "++ analType
----------------------------------------------------------------
mdia <- myDialogNew
G.widgetSetSizeRequest (myDia mdia) 760 (-1)
-- mainframe <- G.frameNew
-- windowResizable
G.set (myDia mdia) [G.windowResizable G.:= True]
selectedRef <- newIORef []
modifyIORef (title mdia) (++ windowMsg)
----------------------------------------------------------------
---- the scanner or scrim radio butttons
conditionTypes_1 <- makeRadioButtonGroupFromStrings ["", "SCANNER","SCRIM"]
let conditionTypes = tail conditionTypes_1
-- [d1,d2] <- mapM (\st -> G.radioButtonNewWithLabel st) ["", ""]mapM (G.radioButtonNewWithLabelFromWidget d2 )
let (conditionsLabels, options) = unzip typesAndDetails
condTypes1 <- makeRadioButtonGroupFromStrings ("" : conditionsLabels) -- mapM (G.radioButtonNewWithLabelFromWidget d1 )
let condTypes = tail condTypes1
analTypes <- makeRadioButtonGroupFromStrings ["Network","Section"] -- "Scheme/Road",
--
G.widgetSetSensitive (analTypes !! 1) analysis
--G.widgetSetSensitive (analTypes !! 1) (not analysis)
G.widgetSetSensitive (analTypes !! 0) analysis
-- getConditions mbutt Nothing SchemAnalysis condTypes
let (rBUuttons, optTypes) = (conditionTypes ++ condTypes ++ analTypes , map (map display) options)
let wrtr str = G.postGUIAsync $ updateBufferAtEnd (buffs !! 0) str
(optionsBox, endLabel) <- analysisTop wrtr Nothing selectedRef rBUuttons optTypes
-- make optionBox wider than the default
--G.widgetSetSizeRequest optionsBox 850 (-1)
myDialogAddWidget mdia optionsBox
buttons <- mapM G.buttonNewWithMnemonic ["_Continue","_Cancel"]
mapM (myDialogAddButton mdia) buttons
-- add the label at the bottom of the dialog
G.boxPackEnd (lower mdia) endLabel G.PackGrow 4
-- handles for buttons
G.on (buttons !! 1) G.buttonActivated $ G.widgetDestroy (myDia mdia)
G.on (buttons !! 0) G.buttonActivated $ do
let flattn = concat . intercalate ","
selected <- liftM (map (\(a,b) -> a ++ flattn b)) $ readIORef selectedRef
if null selected then do
let nullMsg = "You have not entered any condition to analyse. Please select a \
\ condition or click on 'Cancel' to exit this option"
runMsgDialog (Just "Nothing selected") nullMsg Error
else do
G.widgetDestroy (myDia mdia)
isScanner <- G.toggleButtonGetActive (conditionTypes !! 0)
mkAnalysisDia buffs analysis isScanner selected >>= myDialogShow
--
return mdia
----------------------------------------------------------------------------------------------------
--- creating my analysis interface -- IORef [(ResultType,String, [String])]
----------------------------------------------------------------------------------------------------
analysisTop :: (String -> IO()) -> Maybe String -> IORef [(String, [String])] ->
[G.RadioButton] -> [[String]] -> IO (G.HBox, G.Label)
analysisTop logger mScanner selectedRef rButtons options = do
topBox <- G.vBoxNew False 0
-- message label
msgLabel <- G.labelNew Nothing
--
[topLeftBox, leftBox] <- sequence . replicate 2 $ G.vBoxNew False 0
mainBox <- G.hBoxNew False 0
[condFrame, topFrame, bottomFrame, bottomLeftFrame] <- sequence $ replicate 4 G.frameNew
---------------------------------------------------------------------------------------------
-----
scm <- getNewScheme
-- let scannerConds = head $ schemeCombos scm
let scombos = tail $ schemeCombos scm
let [scannerConds,rclass, rname, scode,rcode ] = schemeCombos scm
--
[conditionComboAddRef,rNameRef, rCodeRef, sCodeRef] <- sequence . replicate 4 $ newIORef []
-- ioRef or the condition type
analTypeRef <- newIORef $ Right $ Left scannerConds -- (Either (scannerConds,rclass,scode))
scanrScrimRef <- newIORef True
-- contineue here
let (scnnr, rest) = splitAt 7 (drop 2 rButtons)
-- update the main combo
let updateCombo (rButt, i) = G.on rButt G.toggled $ clearAdd conditionComboAddRef scannerConds (options !! i)
let clearEntry entry = G.entrySetText entry ""
mapM_ updateCombo (zip scnnr [0,1 ..])
--- update the road class
list <- readSchemeRec1 (\_ -> return ())
----
mapM_ (G.comboBoxAppendText rclass) =<< liftM sort getRClasses
---
G.on rclass G.changed $ do
cls <- liftM (maybe "" id) $ G.comboBoxGetActiveText rclass
let validClasses = filter ( (== cls) . scSrg) list
clearAdd rCodeRef rcode ((nub . map clsCode) validClasses)
G.on rcode G.changed $ do
cls <- liftM (maybe "" id) $ G.comboBoxGetActiveText rcode -- liftM (maybe "" (takeWhile isAlpha)) $
let validCodes = filter ((== cls) . clsCode) list -- classsCode
--let writer = G.labelSetText msgLabel
let string = "Updating entries for road code: "++ cls
let updateEntries = do clearAdd rNameRef rname (nub $ map rdName validCodes)
clearAdd sCodeRef scode (map scnrCode validCodes)
G.labelSetText msgLabel ("Finished " ++ string)
G.labelSetText msgLabel (string ++ " ...")
updateEntries
--------------------------------------------------------------------------------------------------
--- clearAdd cRef combo list
-- condFrame
let frameLabels = ["Condition Type", "Select Conditions","Analysis Type"]
mapM_ (uncurry G.frameSetLabel) $ zip [condFrame,topFrame, bottomFrame] frameLabels
G.containerAdd topFrame topBox
---------------------------------------------------------------------------------------
-- activate top frame for when scanner is selected
-- (rButtons !! 0)
let updateScannrScrim b1 butt = do G.on butt G.toggled $ do
isScanner <- G.toggleButtonGetActive b1
G.widgetSetSensitive topFrame isScanner
modifyIORef scanrScrimRef (\_ -> isScanner)
if butt == b1 then
-- need to check if the scanner path fexists and if it does, set the path file to the that
checkAndUpdatePath "SCANNER"
else do
checkAndUpdatePath "SCRIM"
mapM_ (updateScannrScrim (rButtons !! 0)) (take 2 rButtons)
--G.containerAdd condFrame =<< tableWithWidgetList False (take 2 rButtons) True 3
--
[a1,a,b,c,d] <- sequence [ts | (butts ,_) <- mkIntervals rButtons [2,3,3,1,2]
, let ts = tableWithWidgetList False butts True 5]
mapM (uncurry G.containerAdd) [(condFrame, a1),(bottomFrame, d)]
--
let str = maybe "SCANNER Conditions: " ((++ " Conditions: ") . id) mScanner
comboRow <- labelLeftofWidgetE str 120 scannerConds False
-- packing the topBox
mapM_ (\(w,n) -> G.boxPackStart topBox w G.PackNatural n) [(a,0),(b,4)]
G.boxPackStart topBox comboRow G.PackNatural 1
G.boxPackStart topBox c G.PackNatural 1
-- packing the mainBox ----------------------------------------------------------
mapM (\w -> G.boxPackStart topLeftBox w G.PackNatural 6) [condFrame, topFrame, bottomFrame]
--G.boxPackEnd mainBox leftPanes G.PackGrow 3
(leftPanes, entries) <- displayPane True
buttons <- mapM G.buttonNewWithLabel [" >>> "," Clear "]
(box, lframe,lvbox) <- schemeAndSection scm buttons
-- desenditive some stuff
G.widgetSetSensitive lvbox False
G.widgetSetSensitive topFrame False
--
forkIO $ showAlreadyAnal (entries !! 0)
--
G.containerAdd bottomLeftFrame box
-- packing the main box
G.boxPackStart leftBox topLeftBox G.PackGrow 3 -- bottomLeftFrame
G.boxPackStart leftBox bottomLeftFrame G.PackGrow 3
---
G.boxPackEnd mainBox leftPanes G.PackGrow 3
G.boxPackStart mainBox leftBox G.PackGrow 3
------------------------------------------------
updateConditionType rest scm lvbox lframe analTypeRef
-- handle for adding buttons
G.on (buttons !! 0) G.buttonActivated $ do
errOrOK <- verfyOptions scm scanrScrimRef analTypeRef
case errOrOK of
Right errorMessage -> do
let error = "You have not entered: " ++ errorMessage
runMsgDialog (Just "Input Error") error Error
Left xs -> do
let (hs, rs) = (head xs, tail xs)
--modifyIORef selectedRef ((hs,rs) :)
--let emptyMessage = "There is nothing to add"
let selectedMsg = "You have already selected "++ hs ++ " " ++ show rs
selected <- liftM (filter (== (hs, rs))) $ readIORef selectedRef
if length selected > 0 then
runMsgDialog (Just "Already Selected") selectedMsg Info
else do
-- Nothing -> runMsgDialog (Just "Empty Options") emptyMessage Info
let flattn = concat . intercalate ","
updateBufferAtEnd (entries !! 1) (hs ++ flattn rs)
mapM_ clearEntry $ schemeEntries scm
modifyIORef selectedRef ((hs,rs) :)
--return ()
--- option for clearing the
G.on (buttons !! 1) G.buttonActivated $ do
mapM_ clearEntry $ schemeEntries scm -- clear the entries
modifyIORef selectedRef (\_ -> []) -- empty the ioref
clearTextBuffer (entries !! 1) -- clear the buffer
------------------------------------------------
return (mainBox,msgLabel)
where -- scheme2Strings :: Scheme -> IO [String]
verfyOptions schm sScrimRef analRef = do -- returns Either (Three [String] [String] [String]) String
isScanner <- readIORef sScrimRef
let typeName = if isScanner then "SCANNER" else "SCRIM"
let strings = [typeName ++ " condition", "Road class","Road Name",typeName ++" section code",
"Road code","Start of subsection","End of subsection"]
let getMissing xs = [strings !! i | i <- xs]
input <- scheme2Strings schm
--
let missing = [i | (s,i) <- zip input [0..], null s]
-- Three a b c = Lft a | Mid b | Rgt c
aref <- readIORef analRef
case aref of
Left _ -> do
if null missing then
return $ Left (("Scheme: "):input)
else if (not isScanner) then
return $ Left ("Scheme: " : "SFC - SCRIM Parameter" : (tail input))
else
return $ Right . setErrMsg $ getMissing missing
Right (Left cm) -> do
let miss = filter (\i -> i == 0 ) missing
if (null miss) then
return $ Left ["Network: ",input !! 0]
else if (not isScanner) then
return $ Left ["Network: " , "SFC - SCRIM Parameter"]
else
return $ Right . setErrMsg $ getMissing miss
Right (Right _ ) -> do
let miss = filter (\i -> i == 0 || i == 1 || i == 3) missing
let smiss = filter (\i -> i == 1 || i == 3) missing
if (null miss) then
return $ Left ["Section: ",input !! 0, input !! 1, input !! 3]
else if (null smiss && not isScanner) then
return $ Left ["Section: ","SFC - SCRIM Parameter", input !! 1, input !! 3]
else
if isScanner then
return $ Right . setErrMsg $ getMissing miss
else
return $ Right . setErrMsg $ getMissing smiss
---
updateConditionType butts scm lvbx frame2 analRef = do
let [b1,b3] = butts
let [network,section] = butts
let combos = schemeCombos scm
let entries = schemeEntries scm
G.on b1 G.toggled $ do
active <- G.toggleButtonGetActive b1--print "section"
if active then do
rnamesExist <- doesFileExist roadNamesPath
if rnamesExist then
return ()
else do
let message = "There is no file from which to extract the roadnames and \
\conditions for the filters. Would you like to update the roadNames path? "
runDecisionMessageWindow (Just "No Roadnames") message $ convertSimFileWindow logger
--runMsgDialog (Just "No Roadnames") nullMsg Error
--- continue regardless
G.widgetSetSensitive lvbx False
modifyIORef analRef (\_ -> Right . Left $ head combos )
else return ()
{-
G.on b2 G.toggled $ do
active <- G.toggleButtonGetActive b2--print "section"
if active then do
G.widgetSetSensitive lvbx True
G.widgetSetSensitive frame2 True
mapM_ (flip G.widgetSetSensitive True) combos
mapM_ (flip G.widgetSetSensitive True) entries
modifyIORef analRef (\_ -> Left scm)
else return ()
--}
G.on section G.toggled $ do
-- check the roadnames path and give a message if any path was loaded
active <- G.toggleButtonGetActive section--print "section"
if active then do
G.widgetSetSensitive lvbx True
let hs = [ (c,i/= 2) | (c,i) <- zip (tail combos) [1 .. ]]
--print $ map snd hs
G.widgetSetSensitive lvbx True
--mapM_ (uncurry G.widgetSetSensitive ) hs
mapM_ (flip G.widgetSetSensitive True) combos
G.widgetSetSensitive frame2 False
modifyIORef analRef (\_ -> Right $ Right (combos !! 0, combos !! 1, combos !! 3) )
else return ()
--
displayPane :: Bool -> IO (G.VPaned , [G.TextBuffer])
displayPane topFrameSensitive = do
(sws, entries) <- liftM unzip . sequence . replicate 2 $ makeScrolledEntryArea BScroll False True
-- (bsw, bottomEntry) <- makeScrolledEntryArea BScroll False True
frames <- sequence $ replicate 2 G.frameNew
mapM_ (uncurry G.containerAdd) $ zip frames sws
let frameLabels = ["Conditions already analysed " ,"Conditions selected for analysis"]
mapM_ (uncurry G.frameSetLabel) $ zip frames frameLabels
G.set (frames !! 0) [G.widgetSensitive G.:= topFrameSensitive ]
vpane <- vPanedArea (frames !! 0) (frames !! 1) 100
return (vpane, entries)
--
schemeAndSection :: Scheme -> [G.Button] -> IO (G.HBox, G.Frame, G.VBox)
schemeAndSection scm buttons = do
let scombos = tail $ schemeCombos scm
let [rclass, rname, scode,rcode ] = scombos
let entries = schemeEntries scm
let entryLabels = ["Start of subsection:", "End of subsection:"]
let comboLabels = ["Road class:", "Road name: ", "Section code:", "Road code:"]
--
comboRows <- mapM (\(s,w) -> labelLeftofWidgetE s 83 w False) (zip comboLabels scombos)
entryRows <- mapM (\(s,w) -> labelLeftofWidgetE' s 100 w False) (zip entryLabels entries)
-- the main HBox
[mainBox,frameBox,optionsRow ] <- sequence . replicate 3 $ G.hBoxNew False 0
[mVBox, leftVBox, rightVBox] <- sequence . replicate 3 $ G.vBoxNew False 0
optionFrames <- sequence $ replicate 2 G.frameNew
--
mapM_ (uncurry G.containerAdd) (zip optionFrames [leftVBox, rightVBox])
--optionsRow <- tableWithWidgetList False optionFrames False 36
G.boxPackStart optionsRow (optionFrames !! 0) G.PackGrow 3
G.boxPackStart optionsRow (optionFrames !! 1) G.PackNatural 3
--
buttonList <- tableWithWidgetList True buttons True 16
-- packing the intermediate boxes
mapM (\w -> G.boxPackStart rightVBox w G.PackNatural 5) entryRows
mapM (\w -> G.boxPackStart leftVBox w G.PackGrow 2) [xs | (xs,i) <- zip comboRows [1 .. ], i /= 2 ]
----------------------------------------------------------------------------------------------------
-- packing the mainVBox
G.boxPackStart mVBox (comboRows !! 1) G.PackNatural 3
G.boxPackStart mVBox optionsRow G.PackNatural 3
-- packing into the main box
G.boxPackEnd mainBox buttonList G.PackNatural 3
G.boxPackEnd mainBox mVBox G.PackGrow 3
return (mainBox, optionFrames !! 1, mVBox)
-- get road classes
getRClasses :: IO [String]
getRClasses = do
--let path = "./roadAttributes/roadClasses.txt"
exists <- doesFileExist roadClasses
if exists then
L.readFile roadClasses >>= return . map L.unpack . L.lines
else
return []
-- showAlready analysed
showAlreadyAnal :: G.TextBuffer -> IO ()
showAlreadyAnal tbuff = do
let resultsFile = "./SectionResults/results.txt"
exists <- doesFileExist resultsFile
if exists then do
lists <- liftM (map L.unpack . nub . L.lines) $ L.readFile resultsFile
mapM_ (updateBufferAtEnd tbuff) lists
else
return ()
-- loading paths
checkAndUpdatePath :: String -> IO ()
checkAndUpdatePath str
| str == "SCANNER" = do
exists <- doesFileExist (paths !! 0)
if exists then do
L.writeFile (paths !! 1) =<< L.readFile (paths !! 0)
-- analyseSimFileWindow buffers
else do
-- message that the csv path does not exist and prompt to laod a csv path
let string = "SCANNER data not loaded. Would you like to load a SCANNER file?"
runDecisionMessageWindow Nothing string $ convertSimFileWindow logger
-- runMsgDialog Nothing string Error
| str == "SCRIM" = do
exists <- doesFileExist (paths !! 2)
if exists then do
L.writeFile (paths !! 1) =<< L.readFile (paths !! 2)
-- analyseSimFileWindow buffers
else do
-- message that the csv path does not exist and prompt to laod a csv path
let string = "SCRIM data not loaded. Would you like to load a SCRIM file?"
runDecisionMessageWindow Nothing string $ convertSimFileWindow logger
--runMsgDialog Nothing string Error
| otherwise = return ()
where
paths = ["./SimulationFiles/scannerPath.txt"
, "./SimulationFiles/hmdifpath.txt"
, "./SimulationFiles/scrimPath.txt"]
--- convenience function to set a message from a list of string
setErrMsg :: [String] -> String
setErrMsg [] = []
setErrMsg [a] = a
setErrMsg [a,b] = a ++ " nor " ++ b
setErrMsg (x: xs) = x ++", " ++ setErrMsg xs
------------------
--- a vbox with a lable over the widget
labelOverWidget :: G.WidgetClass widget => String -> widget -> Maybe Int -> IO G.VBox
labelOverWidget string widg mw = do
box <- G.vBoxNew False 0
label <- maybe (G.labelNew (Just string)) (flip newWrapedLabel string) mw
G.boxPackStart box label G.PackNatural 2
G.boxPackStart box widg G.PackNatural 0
return box
--
-- a label of a specified width with the text wraped
newWrapedLabel :: Int -> String -> IO G.Label
newWrapedLabel width string= do
label <- G.labelNew (Just string)
G.widgetSetSizeRequest label width (-1)
G.set label [G.miscXalign G.:= 0.1]
-- G.labelSetJustify label G.JustifyCenter
G.labelSetLineWrap label True
return label
--
-----------------------------------------------------------------------------------------------------
------ load simulation file for conversion to mhdif -----------------------------------------------
convertSimFileWindow :: (String -> IO()) -> IO ()
convertSimFileWindow wrtr = do
win <- G.windowNew
G.set win [G.windowTitle G.:= "Update Source Files", G.windowResizable G.:= True,
G.windowDefaultWidth G.:= 500, G.windowDefaultHeight G.:= 100]
mainBox <- G.vBoxNew False 3
G.set win [G.containerChild G.:= mainBox]
-- entry with the path of the simulation file
[fileEntry, yrEntry] <- sequence $ replicate 2 G.entryNew
parmBox <- labelLeftofWidget "Enter start year: " yrEntry True
--
mapM_ (\ent -> do
G.entrySetWidthChars ent 6
G.entrySetMaxLength ent 6) [yrEntry]
(combo,cBox) <- comboBoxWithLeftLabel "Choose a condition: "
G.boxPackEnd parmBox cBox G.PackGrow 3
--
{- roadNamesPath
scannerPath = "./SimulationFiles/scannerPath.txt"
scrimPath = "./SimulationFiles/scrimPath.txt"
csvPath = "./SimulationFiles/hmdifpath.txt"
--}
-------------------------------------------------
let paths = ["./SimulationFiles/csvPath.txt" -- hmdifpath
,"./SimulationFiles/scannerPath.txt"
, roadNamesPath --
-- , "./SimulationFiles/SectionAttributePath.txt"
, "./SimulationFiles/scrimPath.txt"]
pathsRef <- newIORef ""
--data PathType = HMDIFF | RoadNames | Simulation | SectionAttribute deriving (Eq , Show)
--disp PathType -> String
--disp HMDIFF = RoadNames , SectionAttribute, Simulation
let ptypes = ["CSV" , "SCANNER HMDIF" , "Road Names" , "SCRIM HMDIF"]
let buttonNames = [" Load file ", " Convert ", " Close "]
[loadButt,convetButt, cancelButt] <- mapM G.buttonNewWithLabel buttonNames
--- options
--let sectAttributes = "./roadAttributes/SECTION_ATTRIBUTES.txt"
-- let roadFile = "./roadAttributes/roads.txt"
dummy <- G.radioButtonNewWithLabel "Load File"
fileButtons <- mapM (G.radioButtonNewWithLabelFromWidget dummy) ptypes
let radios = fileButtons
let [simOpt , scannerOpt,roadAttributsOpt,scrimOpt] = fileButtons
--let buttLs ps rs = [p | (p,r) <- zip ps rs, let tt = unsafePerformIO $ G.toggleButtonGetActive r, tt == True]
let buttLs ps rs = map (\(a,b) -> (a, G.toggleButtonGetActive b)) $ zip ps rs
let updateBrowseButton rbutt = (G.on rbutt G.toggled $ do
active <- G.toggleButtonGetActive rbutt
if active then
case (findIndex ((== rbutt) . snd) $ zip ptypes radios) of
Nothing -> return ()
Just i -> do
modifyIORef pathsRef (\_ -> paths !! i )
--print (paths !! i)
let blabel a = "Load "++ a ++ " File"
G.buttonSetLabel loadButt $ blabel (ptypes !! i)
--print =<< readIORef pathsRef
---
if rbutt == simOpt then do
G.widgetSetSensitive parmBox active
G.buttonSetLabel convetButt " Convert "
else do
G.widgetSetSensitive parmBox False
G.buttonSetLabel convetButt " Ok "--
else return ()
) >> return ()
-- mapM_ (G.radioButtonSetGroup simOpt) (tail fileButtons) -- simOpt
mapM_ updateBrowseButton radios
-------------------------------------------------
mapM_ (G.comboBoxAppendText combo . show) conditions
-----
--- top of the box
G.hBoxNew False 3 >>= \hbox -> do
sep <- G.hSeparatorNew
---
hbox2 <- tableWithWidgetList False (fileButtons) True 4 -- G.hBoxNew False 3
vsep <- G.vSeparatorNew
---
G.boxPackStart hbox loadButt G.PackNatural 1
G.boxPackStart hbox fileEntry G.PackGrow 1
---
G.boxPackStart mainBox hbox2 G.PackNatural 2
G.boxPackStart mainBox sep G.PackNatural 5
G.boxPackStart mainBox hbox G.PackNatural 3
G.on loadButt G.buttonActivated $ do
fileBrowser (Just fileEntry) >>= G.dialogRun >> return ()
-- bottom of the box
pregressPane <- G.textBufferNew Nothing
inputSpace <- G.textViewNewWithBuffer pregressPane
--updateBufferAtEnd :: G.TextBuffer -> String -> IO()
frame <- G.frameNew
G.hBoxNew False 3 >>= \hbox -> do
G.containerAdd frame inputSpace
[leftBox, rightBox] <- sequence $ replicate 2 (G.vBoxNew False 2)
mapM_ (\b -> G.boxPackStart rightBox b G.PackNatural 2) [convetButt, cancelButt]
G.boxPackStart leftBox parmBox G.PackNatural 1
G.boxPackStart leftBox frame G.PackGrow 3
G.boxPackStart hbox leftBox G.PackGrow 2
G.boxPackStart hbox rightBox G.PackNatural 2
G.boxPackStart mainBox hbox G.PackGrow 5
-- handles
G.on convetButt G.buttonActivated $ do
path <- G.entryGetText fileEntry
let rpath = (reverse path)
let fileName = takeWhile (/= '/') rpath
if (null fileName) || (not $ isValid path) then
runMsgDialog Nothing "The path entered is invalid" Error
else do
simulation <- G.toggleButtonGetActive simOpt
if simulation then do
index <- G.comboBoxGetActive combo
if index > 0 then do
year <- G.entryGetText yrEntry
case bs2Int' (L.pack year) of
Nothing ->
runMsgDialog Nothing "The year entered is invalid " Error
Just n ->
if n < 1000 then
runMsgDialog Nothing "The year entered is invalid " Error
else -- do
myTry $ do
let newYear = "12/04/"++year
outPathFile <- readIORef pathsRef
if (not $ null outPathFile) then do
-- print outPathFile
mHmdifOK <- csv2HMDIF_aux path outPathFile (conditions !! index) newYear
case mHmdifOK of
Nothing -> do
let outFile = (takeWhile (/= '.') (reverse fileName)) ++ ".hmd"
let m1 = "HMDIF file successfully created\n"
let m2 = "output written to " ++ outFile
mapM_ (updateBufferAtEnd pregressPane) [m1,m2]
Just str ->
runMsgDialog Nothing str Error
else do
let str = "You did not select a valid path type to load."
runMsgDialog (Just "No path seleted") str Error
else
runMsgDialog Nothing "Please select a condition for the HMDIF file. " Error
-----
else do
let sel bs = [a | (a,b) <- bs, b]
sel <- liftM sel . mapM liftP $ buttLs ptypes radios
--
if (not $ null sel) then do
let hmtypes = ["SCANNER HMDIF" , "SCRIM HMDIF"]
let selected = (sel !! 0)
--print sel
if selected `elem` (tail ptypes) then do -- , "]
--- mapM G.toggleButtonGetActive
--print selected
let ending = dropWhile (/= '.') (reverse fileName)
--print ending
let end = if selected `elem` hmtypes then ".hmd" else ".txt"
if ending /= end then do
let message = "The path entered is not correct. Select a .hmd file for\
\ HMDIF or a .txt file for Road Names."
runMsgDialog (Just "Incorrect File Selected") message Error
else do
-- need to do some studd to see if the hmdif was a scrim or acanner file and clear the defects folder
-- and the results for the file selected
let nodefect = L.pack $ filter (not . isSpace) selected
let m1 = selected ++ " file at " ++ path ++ " \nsuccessfully loaded.\n"
outPathFile <- readIORef pathsRef
if (not $ null outPathFile) then do
L.writeFile outPathFile (L.append nodefect (L.cons ',' (L.pack path)))
updateBufferAtEnd pregressPane m1
if end == ".txt" then do
--wrt <- ioLogger
forkIO $ setRoadClasses wrtr
return ()
else
return ()
--removeDirectory defectsDir
else do
let str = "You did not select a path type to load."
runMsgDialog (Just "No path selcted") str Error
else
runMsgDialog Nothing "Unknown Error" Error
else do
let str = "You did not select a path type to load."
runMsgDialog (Just "No path selcted") str Error
G.on cancelButt G.buttonActivated $ G.widgetDestroy win
G.widgetShowAll win
where
liftP :: Monad m => (a, m b) -> m (a,b)
liftP (a, b) = do
bb <- b
return (a,bb)
---
| rawlep/EQS | sourceCode/ControlFlow.hs | mit | 53,136 | 743 | 18 | 18,459 | 10,718 | 5,919 | 4,799 | 703 | 21 |
{-# LANGUAGE GADTs #-}
module MonotoneFramework where
import Ast
import Data.Set (Set, intersection, difference)
import qualified Data.Set as Set
import Data.Map (Map, findWithDefault, (!))
import qualified Data.Map as Map
import AnalysisTools
import Lattice
data MonotoneFramework a where
MonotoneInstance :: AbstractSet a => {
bottom :: a,
order :: a -> a -> Bool,
leastUpperBound :: a -> a -> a,
transferFunction :: Statement -> a -> a,
flowF :: Set FlowElement,
extreLabE :: Set Label,
iota :: a,
bg :: BlockGraph
} -> MonotoneFramework a
data MFP a where
MFP :: {
circle :: Map Label a,
dot :: Map Label a
} -> MFP a
solveMFP :: (AbstractSet a, Eq a) => MonotoneFramework a -> MFP a
solveMFP monotone =
let iterateSolver [] analy = analy
iterateSolver ((Intra l l') : ws) analy =
if not $ new `lessThan` old
then let newWorkList = allFlowStart l' flw ++ ws
newAnalysis = Map.insert l' (new `join` old) analy
in iterateSolver newWorkList newAnalysis
else iterateSolver ws analy
where lStmt = g ! l
new = func lStmt $ findWithDefault bottm l analy
old = findWithDefault bottm l' analy
resultAnalysis = iterateSolver workList initAnalysis
in MFP {
circle = resultAnalysis,
dot = transMany resultAnalysis
}
where flw = flowF monotone
workList = Set.toList flw
--workList = [x | l <- extremalLables ,x <- allFlowStart l flw]
extremalLables = Set.toList $ extreLabE monotone
initAnalysis = Map.fromList $ zip extremalLables $ repeat (iota monotone)
func = transferFunction monotone
g = bg monotone
lessThan = order monotone
bottm = bottom monotone
join = leastUpperBound monotone
transMany = Map.mapWithKey (\k a -> func (g ! k) a)
| fiigii/dataflow | MonotoneFramework.hs | mit | 1,899 | 0 | 16 | 539 | 558 | 306 | 252 | -1 | -1 |
module Main where
import Control.Applicative
import Data.Tree
import Data.Monoid
import Data.Generics
-- | a tree containing integers
exTree1 :: Tree Int
exTree1 = Node (1 :: Int) [Node 2 [], Node 3 [], Node 4 [Node 5 [], Node 6 [Node 7 []]]]
-- | exTree1 converted to a tree of strings
exTree2 :: Tree String
exTree2 = show <$> exTree1
printTree :: Show a => Tree a -> IO ()
printTree t = putStrLn (drawTree (fmap show t))
foldWithDirectChildren :: (a -> a -> a) -> a -> Tree a -> Tree a
foldWithDirectChildren f z (Node a s) = Node (a `f` foldl f z (map rootLabel s)) s
main :: IO ()
main = do
printTree exTree1
-- drawback: cannot deduce a type
printTree (everywhere (mkT (foldWithDirectChildren (+) 0 :: Tree Int -> Tree Int)) exTree1)
printTree (everywhere' (mkT (foldWithDirectChildren (+) 0 :: Tree Int -> Tree Int)) exTree1)
-- if type mismatches, nothing will happen
-- (this might potentially be bad)
printTree (everywhere' (mkT (foldWithDirectChildren (+) 0 :: Tree Int -> Tree Int)) exTree2)
let plusStr a b
| null a = b
| null b = a
| otherwise = a ++ "|" ++ b
-- the different strategies
printTree (everywhere (mkT (foldWithDirectChildren plusStr "")) exTree2)
printTree (everywhere' (mkT (foldWithDirectChildren plusStr "")) exTree2)
| Javran/misc | syb-play/src/Main.hs | mit | 1,335 | 0 | 14 | 304 | 517 | 257 | 260 | 25 | 1 |
module Leob00 where
leob fs = xs where xs = fmap ($ xs) fs
| HaskellForCats/HaskellForCats | leob00.hs | mit | 62 | 0 | 8 | 17 | 28 | 16 | 12 | 2 | 1 |
module Fasta (toFasta, fastaEx) where
data Fasta = Fasta { header :: String, body :: String }
fastaEx :: String
fastaEx = ">Rosalind_1\n\
\GATTACA\n\
\>Rosalind_2\n\
\TAGACCA\n\
\>Rosalind_3\n\
\ATACA>Rosalind_6404\n\
\CCTGCGGAAGATCGGCACTAGAATAGCCAGAACCGTTTCTCTGAGGCTTCCGGCCTTCCC\n\
\TCCCACTAATAATTCTGAGG\n\
\>Rosalind_5959\n\
\CCATCGGTAGCGCATCCTTAGTCCAATTAAGTCCCTATCCAGGCGCTCCGCCGAAGGTCT\n\
\ATATCCATTTGTCAGCAGACACGC\n\
\>Rosalind_0808\n\
\CCACCCTCGTGGTATGGCTAGGCATTCAGGAACCGGAGAACGCTTCAGACCAGCCCGGAC\n\
\TGGGAACCTGCGGGCAGTAGGTGGAAT\n\
\>Rosalind_10\n\
\ATGGTCTACATAGCTGACAAACAGCACGTAGCAATCGGTCGAATCTCGAGAGGCATATGGTCACATGATCGGTCGAGCGTGTTTCAAAGTTTGCGCCTAG\n\
\>Rosalind_12\n\
\ATCGGTCGAA\n\
\>Rosalind_15\n\
\ATCGGTCGAGCGTGT\n\
\>Rosalind_24\n\
\TCAATGCATGCGGGTCTATATGCAT\n\
\>Rosalind_99\n\
\AGCCATGTAGCTAACTCAGGTTACATGGGGATGACCCCGCGACTTGGATTAGAGTCTCTTTTGGAATAAGCCTGAATGATCCGAGTAGCATCTCAG\n\
\>sp|B5ZC00|SYG_UREU1 Glycine--tRNA ligase OS=Ureaplasma urealyticum serovar 10 (strain ATCC 33699 / Western) GN=glyQS PE=3 SV=1\n\
\MKNKFKTQEELVNHLKTVGFVFANSEIYNGLANAWDYGPLGVLLKNNLKNLWWKEFVTKQ\n\
\KDVVGLDSAIILNPLVWKASGHLDNFSDPLIDCKNCKARYRADKLIESFDENIHIAENSS\n\
\NEEFAKVLNDYEISCPTCKQFNWTEIRHFNLMFKTYQGVIEDAKNVVYLRPETAQGIFVN\n\
\FKNVQRSMRLHLPFGIAQIGKSFRNEITPGNFIFRTREFEQMEIEFFLKEESAYDIFDKY\n\
\LNQIENWLVSACGLSLNNLRKHEHPKEELSHYSKKTIDFEYNFLHGFSELYGIAYRTNYD\n\
\LSVHMNLSKKDLTYFDEQTKEKYVPHVIEPSVGVERLLYAILTEATFIEKLENDDERILM\n\
\DLKYDLAPYKIAVMPLVNKLKDKAEEIYGKILDLNISATFDNSGSIGKRYRRQDAIGTIY\n\
\CLTIDFDSLDDQQDPSFTIRERNSMAQKRIKLSELPLYLNQKAHEDFQRQCQK"
toFasta :: String -> [Fasta]
toFasta s = undefined | brodyberg/LearnHaskell | Web/sqlitetest/Rosalind/8CountingPointMutations/src/Fasta.hs | mit | 1,973 | 0 | 8 | 464 | 63 | 38 | 25 | 6 | 1 |
module Solidran.Fasta
( parse
) where
import Data.Map (Map)
import Solidran.List (splitBy)
import qualified Data.Map as Map
parseSingle :: String -> (String, String)
parseSingle inp =
let (l:r) = lines inp
in (l, concat r)
parse :: String -> Map String String
parse inp =
let bks = filter (/="") . splitBy (=='>') $ inp
in Map.fromList (map parseSingle bks)
| Jefffrey/Solidran | src/Solidran/Fasta.hs | mit | 389 | 0 | 12 | 91 | 161 | 87 | 74 | 13 | 1 |
import Control.Monad (guard)
import Data.Char (toUpper)
import Data.List (intercalate)
isIncreasingTuple :: (Ord a) => (a,a) -> Bool -- this is our first type signature for the function isIncreasingTuple. It gives context to the tuple argument of type, a, which belongs to the typeclass Ord (more details on this to come), and returns a boolean value
isIncreasingTuple (x, y) = x < y -- this function compares two values within a tuple and returns a boolean
tupleToList :: (Ord a) => (a,a) -> [a]
tupleToList (x,y) = [x,y] -- this function converts a tuple to a list
convertTuplesToListsWithValuesThatAreIncreasing :: (Ord a) => [(a,a)] -> [[a]]
convertTuplesToListsWithValuesThatAreIncreasing [(x,y), (z,w)] = map tupleToList (filter isIncreasingTuple [(x,y), (z,w)]) -- this is an example that demonstrates nesting function calls
-- SHOULD WE DO AN EXAMPLE WHERE THE ABOVE IS COMPOSED INSTEAD? TRIED PLAYING AROUND WITH THIS RAN INTO SOME ISSUES
add :: Num a => (a, a) -> a
add (x, y) = x + y -- this is an uncurried function, which is a style less preferred by haskell programmers
curriedAdd :: Num a => a -> a -> a
curriedAdd = \x -> \y -> x + y -- this is the exact same function as add, except it represents a curried function that involves closures, more details on these concepts to come
capitals :: [(String, String)]
capitals =
[ ("CA", "Sacramento")
, ("HI", "Honolulu")
, ("WA", "Olympia")
] -- creates a dictionary of states and capitals
-- NOT SURE IF tellMeAbout IS TOO COMPLEX IN THIS SET OF EXAMPLES
tellMeAbout :: String -> String
tellMeAbout state =
case (lookup state capitals) of -- checks to see if a capital is in the dictionary via lookup, which returns a Maybe String representing a capital if it is there and Nothing if it is not (more on cases and monads to come)
Just city -> "The capital of " ++ state ++ " is " ++ city -- should the monad contain a string, it is unwrapped to a value of type Just String (more on this later) and concatenated to the following message
Nothing -> "I don't know anything about " ++ state -- should the monad contain Nothing, what is returned is the following message concatenated to the state name
data Light = RED | AMBER | GREEN deriving (Eq, Show) -- creates a new algebraic datatype called Light, whose possible values are RED, AMBER, and GREEN, who derives from the typeclasses Eq and Show
squares :: Floating a => [a] -> [a]
squares range = [x ** 2 | x <- range] -- this is an example of a list comprehension, whose result represents all the squares of values within a range, and these squares belong to the typeclass Floating; [0..10] is known as a range
notDivisibleByThreeAndFive :: Integral a => p -> [a]
notDivisibleByThreeAndFive range = [x | x <- [0..30], x `mod` 3 /= 0 && x `mod` 5 /= 0] -- list comprehension whose result represents all values who are not divisible by 3 and 5; SHOULD WE GO INTO DIFF BETWEEN DIV AND MOD HERE? OR MAYBE ELSEWHERE OR TELL THE READERS TO GO BACK AND RESEARCH ON THEIR OWN?
twoToThePowerOf :: (Integral b1, Num b2) => [b1] -> [b2]
twoToThePowerOf powers = map (2^) powers -- maps what is known as a left section, or a sugared function using an infix operator that is left associative, to a list; the section takes 2 to the power of whatever value is getting mapped in the array THIS IS POORLY WORDED MAYBE GO BACK TO THIS ONE
joinPhrases :: String -> [String] -> String
joinPhrases separator phrases = intercalate separator phrases -- intercalates, or joins, all the values of the list with the connecting string " & "
capitalize :: String -> String
capitalize phrase = map toUpper phrase -- strings in Haskell are just lists of Chars, so map can be applied to it in order to convert it to all-caps
joinAndCapitalizedPhrases :: String -> [String] -> String
joinAndCapitalizedPhrases separator phrases = joinPhrases separator (map capitalize phrases) -- another example of nesting function calls
main = do
guard $ convertTuplesToListsWithValuesThatAreIncreasing [(1,2), (3,5)] == [[1,2],[3,5]]
guard $ convertTuplesToListsWithValuesThatAreIncreasing [(1,2), (2,1)] == [[1,2]]
guard $ curriedAdd 5 8 == add (5,8)
guard $ tellMeAbout "NY" == "I don't know anything about NY"
guard $ tellMeAbout "CA" == "The capital of CA is Sacramento"
guard $ show RED == "(\"RED\")"
guard $ show AMBER == "(\"AMBER\")"
guard $ show GREEN == "(\"GREEN\")"
guard $ RED == RED
guard $ AMBER == AMBER
guard $ GREEN == GREEN
guard $ GREEN /= AMBER
guard $ AMBER /= RED
guard $ RED /= GREEN
guard $ squares [0..10] == [0.0,1.0,4.0,9.0,16.0,25.0,36.0,49.0,64.0,81.0,100.0]
guard $ notDivisibleByThreeAndFive [0..30] == [1,2,4,7,8,11,13,14,16,17,19,22,23,26,28,29]
guard $ twoToThePowerOf [3,5,8,2,1] == [8,32,256,4,2]
guard $ joinPhrases " & " ["swim", "bike", "run"] == "swim & bike & run"
guard $ joinAndCapitalizedPhrases "& " ["swim", "bike", "run"] == "SWIM & BIKE & RUN"
| rtoal/ple | haskell/tuples_functions_algebraictypes.hs | mit | 5,004 | 6 | 11 | 974 | 1,197 | 654 | 543 | 56 | 2 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module DataLoader.CSV (
parseCSV,
parseCSVstruc
) where
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Data.List as L
import qualified Data.Map.Lazy as M
import Maps.DataType
import qualified Data.Csv as C
-- |The polymorphic data class CSVParser enables the direct parsing of CSVs into datasets
class CSVParser a where
-- | decode : Decodes a CSV into a Vector of tuples with a String (code) and the specified polymorphic type
decode :: BL.ByteString -> Either String (V.Vector (String, a))
-- | decoderel : Decodes a CSV into a Vector of triples with two String (codes) and the specified polymorphic type
decoderel :: BL.ByteString -> Either String (V.Vector (String, String, a))
-- | parseCSV : Can parse a contextable type generating a dataset of this type. The second parameter, integer, defines
-- the length of the set of codes. However, only length one and two are implemented (entities and simple relations).
-- Therefore, if the integer is other than 1 or 2, an EntitySet will be generated as it is the default one.
parseCSV :: (Contextable a) => BL.ByteString -> Int -> DataSet a
parseCSV file 2 = let
v = decoderel file
l = V.toList $ applyData v V.empty
in generateRelationset l
parseCSV file i = let
v = decode file
l = V.toList $ applyData v V.empty
in generateDataset l
-- | Instance of CSVParser for StvNominal : implementation of decoderel and decode
instance CSVParser StvNominal where
decoderel file = C.decode C.NoHeader file::Either String (V.Vector (String, String, StvNominal))
decode file = C.decode C.NoHeader file::Either String (V.Vector (String, StvNominal))
-- | Instance of CSVParser for StvOrdinal : implementation of decoderel and decode
instance CSVParser StvOrdinal where
decoderel file = C.decode C.NoHeader file::Either String (V.Vector (String, String, StvOrdinal))
decode file = C.decode C.NoHeader file::Either String (V.Vector (String, StvOrdinal))
-- | Instance of CSVParser for StvRatio : implementation of decoderel and decode
instance CSVParser StvRatio where
decoderel file = C.decode C.NoHeader file::Either String (V.Vector (String, String, StvRatio))
decode file = C.decode C.NoHeader file::Either String (V.Vector (String, StvRatio))
-- | parseCSVstruc : Function to load a structure of regions, as it is provided by the World Bank
parseCSVstruc :: BL.ByteString -> [(String, [String])]
parseCSVstruc file = let
v = C.decode C.NoHeader file::Either String (V.Vector (String, String))
l = V.toList $ applyData v V.empty
grouped = parseMap l
parseMap list = foldl (\map (code, son) -> M.insertWith (mixRecord) code [son] map) (M.empty) list
mixRecord original new = original L.++ new
in M.toList grouped
-- | applyData : Utility function to translate an Either a b to the value placed in the Left position.
-- it does return that value if Left come and a default one otherwise, provided as second argument.
applyData :: Either b a -> a -> a
applyData (Right x) _ = x
applyData _ v = v
| sc14lga/Maps | src/DataLoader/CSV.hs | gpl-2.0 | 3,105 | 6 | 13 | 542 | 753 | 405 | 348 | 42 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{- |
Module : $Header$
Description : interface to Reduce CAS
Copyright : (c) Dominik Dietrich, DFKI Bremen, 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (uses type-expression in class instances)
Interface for Reduce CAS system.
-}
module CSL.Reduce_Interface where
import Common.AS_Annotation
import Common.Id
import Common.ProverTools (missingExecutableInPath)
import Common.Utils (getEnvDef)
import Logic.Prover
import CSL.AS_BASIC_CSL
import CSL.ASUtils
import CSL.Parse_AS_Basic
import CSL.Lemma_Export
import Control.Monad (replicateM_)
import Data.Time (midnight)
import Data.Maybe (maybeToList)
import Data.List (intercalate)
import qualified Data.Map as Map
import System.IO
import System.Process
{- ----------------------------------------------------------------------
Connection handling
---------------------------------------------------------------------- -}
-- | A session is a process connection
class Session a where
outp :: a -> Handle
inp :: a -> Handle
err :: a -> Maybe Handle
err = const Nothing
proch :: a -> Maybe ProcessHandle
proch = const Nothing
-- | The simplest session
instance Session (Handle, Handle) where
inp = fst
outp = snd
-- | Better use this session to properly close the connection
instance Session (Handle, Handle, ProcessHandle) where
inp (x, _, _) = x
outp (_, x, _) = x
proch (_, _, x) = Just x
-- | Left String is success, Right String is failure
lookupRedShellCmd :: IO (Either String String)
lookupRedShellCmd = do
reducecmd <- getEnvDef "HETS_REDUCE" "redcsl"
-- check that prog exists
noProg <- missingExecutableInPath reducecmd
let f = if noProg then Right else Left
return $ f reducecmd
-- | connects to the CAS, prepares the streams and sets initial options
connectCAS :: String -> IO (Handle, Handle, Handle, ProcessHandle)
connectCAS reducecmd = do
putStrLn "succeeded"
(inpt, out, errh, pid) <- runInteractiveCommand $ reducecmd ++ " -w"
hSetBuffering out NoBuffering
hSetBuffering inpt LineBuffering
hPutStrLn inpt "off nat;"
hPutStrLn inpt "load redlog;"
hPutStrLn inpt "rlset reals;"
-- read 7 lines
replicateM_ 7 $ hGetLine out
putStrLn "done"
return (inpt, out, errh, pid)
-- | closes the connection to the CAS
disconnectCAS :: Session a => a -> IO ()
disconnectCAS s = do
hPutStrLn (inp s) "quit;"
case proch s of
Nothing -> return ()
{- this is always better, because it closes also the shell-process,
hence use a Session-variant with ProcessHandle! -}
Just ph -> waitForProcess ph >> return ()
putStrLn "CAS disconnected"
return ()
sendToReduce :: Session a => a -> String -> IO ()
sendToReduce sess = hPutStrLn (inp sess)
{- ----------------------------------------------------------------------
Prover specific
---------------------------------------------------------------------- -}
-- | returns the name of the reduce prover
reduceS :: String
reduceS = "Reduce"
{- | returns a basic proof status for conjecture with name n
where [EXPRESSION] represents the proof tree. -}
openReduceProofStatus :: String -> [EXPRESSION] -> ProofStatus [EXPRESSION]
openReduceProofStatus n = openProofStatus n reduceS
closedReduceProofStatus :: Ord pt => String -- ^ name of the goal
-> pt -> ProofStatus pt
closedReduceProofStatus goalname proof_tree =
ProofStatus
{ goalName = goalname
, goalStatus = Proved True
, usedAxioms = []
, usedProver = reduceS
, proofTree = proof_tree
, usedTime = midnight
, tacticScript = TacticScript "" }
{-
For Quantifier Elimination:
off nat; -- pretty-printing switch
load redlog;
rlset reals;
rlqe(exp...);
-}
{- ----------------------------------------------------------------------
Reduce Pretty Printing
---------------------------------------------------------------------- -}
exportExps :: [EXPRESSION] -> String
exportExps l = intercalate "," $ map exportExp l
-- | those operators declared as infix in Reduce
infixOps :: [String]
infixOps = [ "+", "-", "/", "**", "^", "=", "<=", ">=", "<", ">", "*", "and"
, "impl", "or"]
-- | Exports an expression to Reduce format
exportExp :: EXPRESSION -> String
exportExp (Var token) = tokStr token
exportExp (Op s _ exps@[e1, e2] _)
| elem (simpleName s) infixOps =
concat ["(", exportExp e1, simpleName s, exportExp e2, ")"]
| otherwise = concat [simpleName s, "(", exportExps exps, ")"]
exportExp (Op s _ [] _) = simpleName s
exportExp (Op s _ exps _) = concat [simpleName s, "(", exportExps exps, ")"]
exportExp (List exps _) = "{" ++ exportExps exps ++ "}"
exportExp (Int i _) = show i
exportExp (Rat d _) = show d
exportExp (Interval l r _) = concat [ "[", show l, ",", show r, "]" ]
-- exportExp e = error $ "exportExp: expression not supported: " ++ show e
-- | exports command to Reduce Format
exportReduce :: Named CMD -> String
exportReduce namedcmd = case sentence namedcmd of
Cmd "simplify" exps -> exportExp $ head exps
Cmd "ask" exps -> exportExp $ head exps
Cmd cmd exps -> cmd ++ "(" ++ exportExps exps ++ ")"
_ -> error "exportReduce: not implemented for this case" -- TODO: implement
{- ----------------------------------------------------------------------
Reduce Parsing
---------------------------------------------------------------------- -}
-- | removes the newlines 4: from the beginning of the string
skipReduceLineNr :: String -> String
skipReduceLineNr s = dropWhile (`elem` " \n") $ tail
$ dropWhile (/= ':') s
-- | try to get an EXPRESSION from a Reduce string
redOutputToExpression :: String -> Maybe EXPRESSION
redOutputToExpression = parseExpression () . skipReduceLineNr
{- ----------------------------------------------------------------------
Reduce Commands
---------------------------------------------------------------------- -}
cslReduceDefaultMapping :: [(OPNAME, String)]
cslReduceDefaultMapping =
let idmapping = map (\ x -> (x, show x))
in (OP_pow, "**") :
idmapping (Map.keys $ Map.delete OP_pow operatorInfoNameMap)
{- | reads characters from the specified output until the next result is
complete, indicated by $ when using the maxima mode off nat; -}
getNextResultOutput :: Handle -> IO String
getNextResultOutput out = do
b <- hIsEOF out
if b then return "" else do
c <- hGetChar out
if c == '$' then return [] else do
r <- getNextResultOutput out
return (c : r)
procCmd :: Session a => a -> Named CMD
-> IO (ProofStatus [EXPRESSION], [(Named CMD, ProofStatus [EXPRESSION])])
procCmd sess cmd = case cmdstring of
"simplify" -> cassimplify sess cmd
"ask" -> casask sess cmd
"divide" -> casremainder sess cmd
"rlqe" -> casqelim sess cmd
"factorize" -> casfactorExp sess cmd
"int" -> casint sess cmd
"solve" -> cassolve sess cmd
_ -> error "Command not supported"
where Cmd cmdstring _ = sentence cmd
-- | sends the given string to the CAS, reads the result and tries to parse it.
evalString :: Session a => a -> String -> IO [EXPRESSION]
evalString sess s = do
putStrLn $ "Send CAS cmd " ++ s
hPutStrLn (inp sess) s
res <- getNextResultOutput (outp sess)
putStrLn $ "Result is " ++ res
putStrLn $ "Parsing of --" ++ skipReduceLineNr res ++ "-- yields "
++ show (redOutputToExpression res)
return $ maybeToList $ redOutputToExpression res
-- | wrap evalString into a ProofStatus
procString :: Session a => a -> String -> String -> IO (ProofStatus [EXPRESSION])
procString h axname s = do
res <- evalString h s
let f = if null res then openReduceProofStatus else closedReduceProofStatus
return $ f axname res
-- | factors a given expression over the reals
casfactorExp :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
casfactorExp sess cmd =
do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [exportLemmaFactor cmd proofstatus])
-- | solves a single equation over the reals
cassolve :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
cassolve sess cmd =
do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [])
-- | simplifies a given expression over the reals
cassimplify :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
cassimplify sess cmd = do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [exportLemmaSimplify cmd proofstatus])
-- | asks value of a given expression
casask :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
casask sess cmd = do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [exportLemmaAsk cmd proofstatus])
-- | computes the remainder of a division
casremainder :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
casremainder sess cmd =
do
proofstatus <- procString sess (senAttr cmd) $ exportReduce
(makeNamed (senAttr cmd) (Cmd "divide" args)) ++ ";"
return (proofstatus, [exportLemmaRemainder cmd proofstatus])
where Cmd _ args = sentence cmd
-- | integrates the given expression
casint :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
casint sess cmd =
do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [exportLemmaInt cmd proofstatus])
-- | performs quantifier elimination of a given expression
casqelim :: Session a => a -> Named CMD -> IO (ProofStatus [EXPRESSION],
[(Named CMD, ProofStatus [EXPRESSION])])
casqelim sess cmd =
do
proofstatus <- procString sess (senAttr cmd) $ exportReduce cmd ++ ";"
return (proofstatus, [exportLemmaQelim cmd proofstatus])
-- | declares an operator, such that it can used infix/prefix in CAS
casDeclareOperators :: Session a => a -> [EXPRESSION] -> IO ()
casDeclareOperators sess varlist = do
hPutStrLn (inp sess) $ "operator " ++ exportExps varlist ++ ";"
hGetLine (outp sess)
return ()
-- | declares an equation x := exp
casDeclareEquation :: Session a => a -> CMD -> IO ()
casDeclareEquation sess (Ass c def) =
do
let e1 = exportExp $ opDeclToOp c
e2 = exportExp def
putStrLn $ e1 ++ ":=" ++ e2
hPutStrLn (inp sess) $ e1 ++ ":=" ++ e2 ++ ";"
res <- getNextResultOutput (outp sess)
putStrLn $ "Declaration Result: " ++ res
return ()
casDeclareEquation _ _ =
error "casDeclareEquation: not implemented for this case" -- TODO: implement
{- ----------------------------------------------------------------------
Reduce Lemma Export
---------------------------------------------------------------------- -}
exportLemmaGeneric :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaGeneric namedcmd ps =
(makeNamed lemmaname lemma, closedReduceProofStatus lemmaname [mkOp "Proof" []])
where Cmd _ exps = sentence namedcmd
lemma = Cmd "=" [head exps, head (proofTree ps)]
lemmaname = ganame namedcmd
exportLemmaQelim :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaQelim = exportLemmaGeneric
-- | generates the lemma for cmd with result ProofStatus
exportLemmaFactor :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaFactor = exportLemmaGeneric
exportLemmaSolve :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaSolve = exportLemmaGeneric
exportLemmaSimplify :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaSimplify = exportLemmaGeneric
exportLemmaAsk :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaAsk = exportLemmaGeneric
exportLemmaRemainder :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaRemainder = exportLemmaGeneric
exportLemmaInt :: Named CMD -> ProofStatus [EXPRESSION] ->
(Named CMD, ProofStatus [EXPRESSION])
exportLemmaInt = exportLemmaGeneric
| nevrenato/HetsAlloy | CSL/Reduce_Interface.hs | gpl-2.0 | 12,991 | 0 | 15 | 2,847 | 3,434 | 1,741 | 1,693 | 229 | 8 |
module Flowskell.Lib.Jack where
import qualified Sound.JACK as Jack
import qualified Sound.JACK.Audio as JA
import qualified Sound.JACK.Exception as JackExc
import Sound.JACK (NFrames(NFrames), Process, Client, Port)
import qualified Control.Monad.Exception.Synchronous as Sync
import qualified Control.Monad.Trans.Cont as MC
import qualified Control.Monad.Trans.Class as Trans
import Data.Foldable (forM_, )
import Foreign.Storable (sizeOf, peek, )
import Foreign.Ptr (nullPtr, )
import Foreign.C.Error (eOK, )
import qualified System.IO as IO
import Data.Array.Base (getNumElements, )
import Data.Array.Storable (readArray)
import Control.Concurrent
import Control.Monad hiding (forM_)
import System.Posix.Process
import System.Posix.IO
import System.Posix.Types (Fd)
import Data.Char (chr, ord)
import Control.Applicative (pure)
import Language.Scheme.Types
encode :: Double -> Char
encode dbl = chr (truncate ((dbl + 1.0) / 2.0 * 255.0))
decode :: Char -> Double
decode c = realToFrac ((toRational (ord c) / 255.0 * 2) - 1.0)
getActivation :: (MVar Double) -> [LispVal] -> IO LispVal
getActivation stvar [] = fmap Float (readMVar stvar)
initJack :: IO [(String, [LispVal] -> IO LispVal)]
initJack = do
-- In this function we fork twice.
--
-- The first fork is a real system fork to provide us with two seperate
-- processes. This is needed because the Jack clients run unstable when it
-- runs together with OpenGLs mainLoop.
-- For now, both proccesses communicate with a pipe.
-- TODO: Exit process when pipe is closed/broken
(rfd, wfd) <- createPipe
forkProcess $ capture wfd "Flowskell" ["input"]
-- The following fork just creates a thread which will read from the pipe
-- and store the result in an MVar.
stvar <- newMVar 0.0
forkIO $ forever $ do
(str, cnt) <- fdRead rfd 10
case str of
"" -> return 0.0
_ -> swapMVar stvar (decode $ head str)
-- Finally, return a Scheme function to access the MVar
return [("snd-level", getActivation stvar)]
capture :: Fd -> String -> [String] -> IO ()
capture wfd name portNames =
Jack.handleExceptions $ flip MC.runContT return $ do
client <- MC.ContT $ Jack.withClientDefault name
inputs <- mapM (MC.ContT . Jack.withPort client) portNames
Trans.lift $ setProcess wfd client inputs
Trans.lift $ Jack.withActivation client $ Trans.lift $ do
putStr $ "started " ++ name ++ "..."
Jack.waitForBreak
setProcess ::
(JackExc.ThrowsErrno e) =>
Fd ->
Client ->
[Port JA.Sample Jack.Input] ->
Sync.ExceptionalT e IO ()
setProcess wfd client input =
flip (Jack.setProcess client) nullPtr =<<
(Trans.lift $ Jack.makeProcess $
wrapFun wfd input)
wrapFun ::
Fd ->
[Port JA.Sample Jack.Input] ->
Process a
wrapFun wfd inputs nframes _args = do
let send = fdWrite wfd . pure . encode
inArrs <- mapM (flip JA.getBufferArray nframes) inputs
forM_ inArrs $ \i -> do
samples <- mapM (readArray i) (Jack.nframesIndices nframes)
send $ maximum (map realToFrac samples)
return eOK
| lordi/flowskell | src/Flowskell/Lib/Jack.hs | gpl-2.0 | 3,164 | 0 | 16 | 690 | 945 | 510 | 435 | 70 | 2 |
Subsets and Splits