code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- | Ch04a
module Ch04a where
multThree :: (Num a) => a -> a -> a -> a
multThree x y z = x * y * z
compareWithHundred :: (Num a, Ord a) => a -> Ordering
compareWithHundred = flip compare 100
divideByTen :: (Floating a) => a -> a
divideByTen = (/ 10)
isUpperAlphanum :: Char -> Bool
isUpperAlphanum = (`elem` ['A'..'Z'])
applyTwice :: (a -> a) -> a -> a
applyTwice f x = f (f x)
zipWith' :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith' _ [] _ = []
zipWith' _ _ [] = []
zipWith' f (x:xs) (y:ys) = f x y : zipWith' f xs ys
flip' :: (a -> b -> c) -> (b -> a -> c)
flip' f x y = f y x
map' :: (a -> b) -> [a] -> [b]
map' _ [] = []
map' f (x:xs) = f x : map' f xs
filter' :: (a -> Bool) -> [a] -> [a]
filter' _ [] = []
filter' p (x:xs)
| p x = x : filter' p xs
| otherwise = filter' p xs
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerSorted = quicksort (filter (<=x) xs)
biggerSorted = quicksort (filter (>x) xs)
in smallerSorted ++ [x] ++ biggerSorted
largestDivisible :: (Integral a) => a
largestDivisible = head (filter' p [10000,9999])
where
p y = y `mod` 3829 == 0
findSum :: Integer
findSum = sum $ takeWhile (<10000) $ filter odd $ map (^ 2) [1..]
findSum' :: Integer
findSum' = sum $ takeWhile (<10000) [x ^ 2 | x <- [1..], odd (x ^ 2)]
chain :: (Integral a) => a -> [a]
chain 1 = [1]
chain x
| even x = x : chain (x `div` 2)
| otherwise = x : chain (x * 3 + 1)
numLongChains :: Int
numLongChains = sum [1 | x <- [1..100], isLong $ chain x]
where isLong xs = length xs > 15
flip'' :: (a -> b -> c) -> b -> a -> c
flip'' f = \x y -> f y x
sum' :: (Num a) => [a] -> a
sum' = foldl (\a x -> a + x) 0
elem' :: (Eq a) => a -> [a] -> Bool
elem' e = foldl (\a y -> if e == y then True else a) False
map'' :: (a -> b) -> [a] -> [b]
map'' f xs = foldr (\x a -> f x : a) [] xs
maximum' :: (Ord a) => [a] -> a
maximum' = foldr1 (\ x a -> if x > a then x else a)
reverse' :: [a] -> [a]
reverse' = foldl (\a x -> x : a) []
product' :: (Num a) => [a] -> a
product' = foldr1 (*)
filter'1 :: (a -> Bool) -> [a] -> [a]
filter'1 p = foldr (\x a -> if p x then x : a else a) []
appl :: [Double]
appl = map ($ 3) [(4+), (10*), (^2), sqrt]
comp :: (Enum b, Num b) => [b]
comp = map (negate . sum . tail) [[1..5],[3..6],[1..7]]
| codingiam/sandbox-hs | src/Ch04a.hs | bsd-3-clause | 2,303 | 0 | 12 | 598 | 1,423 | 771 | 652 | 66 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
#if !(MIN_VERSION_base(4,11,0))
import Data.Monoid ((<>))
#endif
import qualified Graphics.Vty as V
import Brick.Main (App(..), defaultMain, resizeOrQuit, neverShowCursor)
import Brick.Types
( Widget
, Padding(..)
)
import Brick.Widgets.Core
( (<=>)
, (<+>)
, padLeft
)
import Brick.Util (on, fg)
import Brick.Markup (markup, (@?))
import Brick.AttrMap (attrMap, AttrMap)
import Data.Text.Markup ((@@))
ui :: Widget ()
ui = (m1 <=> m2) <+> (padLeft (Pad 1) m3)
where
m1 = markup $ ("Hello" @@ fg V.blue) <> ", " <> ("world!" @@ fg V.red)
m2 = markup $ ("Hello" @? "keyword1") <> ", " <> ("world!" @? "keyword2")
m3 = markup $ ("Hello," @? "keyword1") <> "\n" <> ("world!" @? "keyword2")
theMap :: AttrMap
theMap = attrMap V.defAttr
[ ("keyword1", fg V.magenta)
, ("keyword2", V.white `on` V.blue)
]
app :: App () e ()
app =
App { appDraw = const [ui]
, appHandleEvent = resizeOrQuit
, appAttrMap = const theMap
, appStartEvent = return
, appChooseCursor = neverShowCursor
}
main :: IO ()
main = defaultMain app ()
| sjakobi/brick | programs/MarkupDemo.hs | bsd-3-clause | 1,202 | 0 | 13 | 289 | 422 | 250 | 172 | 35 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Protocol.ROC.PointTypes.PointType48 where
import Data.Binary.Get (getByteString,getWord8,Get)
import Data.ByteString (ByteString)
import Data.Word (Word8)
import Prelude (($),
return,
Eq,
Float,
Read,
Show)
import Protocol.ROC.Float (getIeeeFloat32)
import Protocol.ROC.Utils (getTLP)
data PointType48 = PointType48 {
pointType48PointTag :: !PointType48PointTag
,pointType48ControlType :: !PointType48ControlType
,pointType48ActiveLoopStatus :: !PointType48ActiveLoopStatus
,pointType48LoopPeriod :: !PointType48LoopPeriod
,pointType48ActualLoopPeriod :: !PointType48ActualLoopPeriod
,pointType48PrimPVInputPnt :: !PointType48PrimPVInputPnt
,pointType48PrimStpnt :: !PointType48PrimStpnt
,pointType48PrimStpntChangeMax :: !PointType48PrimStpntChangeMax
,pointType48PrimPorpGain :: !PointType48PrimPorpGain
,pointType48PrimResetIntGain :: !PointType48PrimResetIntGain
,pointType48PrimRateDerivGain :: !PointType48PrimRateDerivGain
,pointType48PrimScaleFactor :: !PointType48PrimScaleFactor
,pointType48PrimIntDeadband :: !PointType48PrimIntDeadband
,pointType48PrimProcessVar :: !PointType48PrimProcessVar
,pointType48PrimChangeOutput :: !PointType48PrimChangeOutput
,pointType48OvrdPVInputPnt :: !PointType48OvrdPVInputPnt
,pointType48OvrdStpnt :: !PointType48OvrdStpnt
,pointType48OvrdStpntChangeMax :: !PointType48OvrdStpntChangeMax
,pointType48OvrdPropGain :: !PointType48OvrdPropGain
,pointType48OvrdResetIntGain :: !PointType48OvrdResetIntGain
,pointType48OvrdRateDerivGain :: !PointType48OvrdRateDerivGain
,pointType48OvrdScaleFactor :: !PointType48OvrdScaleFactor
,pointType48OvrdIntDeadband :: !PointType48OvrdIntDeadband
,pointType48OvrdProcessVar :: !PointType48OvrdProcessVar
,pointType48OvrdChangeOutput :: !PointType48OvrdChangeOutput
,pointType48PIDCurrentOutput :: !PointType48PIDCurrentOutput
,pointType48PIDOutputPnt :: !PointType48PIDOutputPnt
,pointType48PID2ndOutput :: !PointType48PID2ndOutput
,pointType48OutputLowLimitValue :: !PointType48OutputLowLimitValue
,pointType48OutputHighLimitValue :: !PointType48OutputHighLimitValue
,pointType48ControlLoopSelecion :: !PointType48ControlLoopSelecion
,pointType48OvrdLoopThreshSwitch :: !PointType48OvrdLoopThreshSwitch
,pointType48PrimLoopPVStpntUnits :: !PointType48PrimLoopPVStpntUnits
,pointType48OvrdPVLoopStpntUnits :: !PointType48OvrdPVLoopStpntUnits
,pointType48PIDOutputUnits :: !PointType48PIDOutputUnits
,pointType48PrimLoopProcessVarLowEu :: !PointType48PrimLoopProcessVarLowEu
,pointType48PrimLoopProcessVarHighEu :: !PointType48PrimLoopProcessVarHighEu
,pointType48OvrdLoopProcessVarLowEu :: !PointType48OvrdLoopProcessVarLowEu
,pointType48OvrdLoopProcessVarHighEu :: !PointType48OvrdLoopProcessVarHighEu
} deriving (Read,Eq, Show)
type PointType48PointTag = ByteString
type PointType48ControlType = Word8
type PointType48ActiveLoopStatus = Word8
type PointType48LoopPeriod = Float
type PointType48ActualLoopPeriod = Float
type PointType48PrimPVInputPnt = [Word8]
type PointType48PrimStpnt = Float
type PointType48PrimStpntChangeMax = Float
type PointType48PrimPorpGain = Float
type PointType48PrimResetIntGain = Float
type PointType48PrimRateDerivGain = Float
type PointType48PrimScaleFactor = Float
type PointType48PrimIntDeadband = Float
type PointType48PrimProcessVar = Float
type PointType48PrimChangeOutput = Float
type PointType48OvrdPVInputPnt = [Word8]
type PointType48OvrdStpnt = Float
type PointType48OvrdStpntChangeMax = Float
type PointType48OvrdPropGain = Float
type PointType48OvrdResetIntGain = Float
type PointType48OvrdRateDerivGain = Float
type PointType48OvrdScaleFactor = Float
type PointType48OvrdIntDeadband = Float
type PointType48OvrdProcessVar = Float
type PointType48OvrdChangeOutput = Float
type PointType48PIDCurrentOutput = Float
type PointType48PIDOutputPnt = [Word8]
type PointType48PID2ndOutput = [Word8]
type PointType48OutputLowLimitValue = Float
type PointType48OutputHighLimitValue = Float
type PointType48ControlLoopSelecion = Word8
type PointType48OvrdLoopThreshSwitch = Float
type PointType48PrimLoopPVStpntUnits = ByteString
type PointType48OvrdPVLoopStpntUnits = ByteString
type PointType48PIDOutputUnits = ByteString
type PointType48PrimLoopProcessVarLowEu = Float
type PointType48PrimLoopProcessVarHighEu = Float
type PointType48OvrdLoopProcessVarLowEu = Float
type PointType48OvrdLoopProcessVarHighEu = Float
pointType48Parser :: Get PointType48
pointType48Parser = do
pointTag <- getByteString 10
controlType <- getWord8
activeLoopStatus <- getWord8
loopPeriod <- getIeeeFloat32
actualLoopPeriod <- getIeeeFloat32
primPVInputPnt <- getTLP
primStpnt <- getIeeeFloat32
primStpntChangeMax <- getIeeeFloat32
primPorpGain <- getIeeeFloat32
primResetIntGain <- getIeeeFloat32
primRateDerivGain <- getIeeeFloat32
primScaleFactor <- getIeeeFloat32
primIntDeadband <- getIeeeFloat32
primProcessVar <- getIeeeFloat32
primChangeOutput <- getIeeeFloat32
ovrdPVInputPnt <- getTLP
ovrdStpnt <- getIeeeFloat32
ovrdStpntChangeMax <- getIeeeFloat32
ovrdPropGain <- getIeeeFloat32
ovrdResetIntGain <- getIeeeFloat32
ovrdRateDerivGain <- getIeeeFloat32
ovrdScaleFactor <- getIeeeFloat32
ovrdIntDeadband <- getIeeeFloat32
ovrdProcessVar <- getIeeeFloat32
ovrdChangeOutput <- getIeeeFloat32
pIDCurrentOutput <- getIeeeFloat32
pIDOutputPnt <- getTLP
pID2ndOutput <- getTLP
outputLowLimitValue <- getIeeeFloat32
outputHighLimitValue <- getIeeeFloat32
controlLoopSelecion <- getWord8
ovrdLoopThreshSwitch <- getIeeeFloat32
primLoopPVStpntUnits <- getByteString 10
ovrdPVLoopStpntUnits <- getByteString 10
pIDOutputUnits <- getByteString 10
primLoopProcessVarLowEu <- getIeeeFloat32
primLoopProcessVarHighEu <- getIeeeFloat32
ovrdLoopProcessVarLowEu <- getIeeeFloat32
ovrdLoopProcessVarHighEu <- getIeeeFloat32
return $ PointType48 pointTag controlType activeLoopStatus loopPeriod actualLoopPeriod primPVInputPnt primStpnt primStpntChangeMax primPorpGain primResetIntGain primRateDerivGain
primScaleFactor primIntDeadband primProcessVar primChangeOutput ovrdPVInputPnt ovrdStpnt ovrdStpntChangeMax ovrdPropGain ovrdResetIntGain ovrdRateDerivGain ovrdScaleFactor
ovrdIntDeadband ovrdProcessVar ovrdChangeOutput pIDCurrentOutput pIDOutputPnt pID2ndOutput outputLowLimitValue outputHighLimitValue controlLoopSelecion ovrdLoopThreshSwitch
primLoopPVStpntUnits ovrdPVLoopStpntUnits pIDOutputUnits primLoopProcessVarLowEu primLoopProcessVarHighEu ovrdLoopProcessVarLowEu ovrdLoopProcessVarHighEu
| plow-technologies/roc-translator | src/Protocol/ROC/PointTypes/PointType48.hs | bsd-3-clause | 13,232 | 0 | 9 | 7,280 | 1,020 | 564 | 456 | 216 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DataKinds #-}
#if __GLASGOW_HASKELL__ < 800
{-# OPTIONS_GHC -fcontext-stack=26 #-}
#else
{-# OPTIONS_GHC -freduction-depth=0 #-}
#endif
--------------------------------------------------------------------------------
-- |
-- Module : Database.EventStore.Internal.Operation.ReadEvent.Message
-- Copyright : (C) 2015 Yorick Laupa
-- License : (see the file LICENSE)
--
-- Maintainer : Yorick Laupa <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
--------------------------------------------------------------------------------
module Database.EventStore.Internal.Operation.ReadEvent.Message where
--------------------------------------------------------------------------------
import Data.Int
--------------------------------------------------------------------------------
import Data.ProtocolBuffers
--------------------------------------------------------------------------------
import Database.EventStore.Internal.Prelude
import Database.EventStore.Internal.Types
--------------------------------------------------------------------------------
-- | Read event on a regular stream request.
data Request
= Request
{ _streamId :: Required 1 (Value Text)
, _eventNumber :: Required 2 (Value Int64)
, _resolveLinkTos :: Required 3 (Value Bool)
, _requireMaster :: Required 4 (Value Bool)
}
deriving (Generic, Show)
--------------------------------------------------------------------------------
instance Encode Request
--------------------------------------------------------------------------------
-- | 'Request' smart constructor.
newRequest :: Text -> Int64 -> Bool -> Bool -> Request
newRequest stream_id evt_num res_link_tos req_master =
Request
{ _streamId = putField stream_id
, _eventNumber = putField evt_num
, _resolveLinkTos = putField res_link_tos
, _requireMaster = putField req_master
}
--------------------------------------------------------------------------------
-- | Enumeration representing the status of a single event read operation.
data Result
= SUCCESS
| NOT_FOUND
| NO_STREAM
| STREAM_DELETED
| ERROR
| ACCESS_DENIED
deriving (Eq, Enum, Show)
--------------------------------------------------------------------------------
-- | Read event on a regular stream response.
data Response
= Response
{ _result :: Required 1 (Enumeration Result)
, _indexedEvent :: Required 2 (Message ResolvedIndexedEvent)
, _error :: Optional 3 (Value Text)
}
deriving (Generic, Show)
--------------------------------------------------------------------------------
instance Decode Response
instance Encode Response
| YoEight/eventstore | Database/EventStore/Internal/Operation/ReadEvent/Message.hs | bsd-3-clause | 2,800 | 0 | 11 | 428 | 355 | 210 | 145 | 40 | 1 |
module Day11 where
import Control.Arrow
import Data.Function
import Data.List
import Data.Maybe
partOne = nextGoodPass input
partTwo = nextGoodPass partOne
nextGoodPass :: String -> String
nextGoodPass s = fromJust $ find goodPass $ filter (/= s) $ iterate nextWord s
nextWord :: String -> String
nextWord = reverse . revNextWord . reverse
revNextWord :: String -> String
revNextWord (x:xs)
| x < 'z' = nextChar x : xs
| otherwise = 'a' : revNextWord xs
nextChar :: Char -> Char
nextChar c = head $ dropWhile (c >=) ['a'..'z']
goodPass :: String -> Bool
goodPass s = stringOfThree s && noLetters "iol" s && twoNonOverlappingPairs s
stringOfThree :: String -> Bool
stringOfThree = any (`isInfixOf` ['a'..'z']) . filter ((==) 3 . length) . map (take 3) . tails
noLetters :: String -> String -> Bool
noLetters s = all (`notElem` s)
twoNonOverlappingPairs :: String -> Bool
twoNonOverlappingPairs = (> 1) . length . nubBy ((==) `on` fst) . filter (\cs -> snd cs > 1) . map (head &&& length) . group
input :: String
input = "hxbxwxba"
| z0isch/advent-of-code | src/Day11.hs | bsd-3-clause | 1,085 | 0 | 12 | 232 | 418 | 222 | 196 | 27 | 1 |
-- | All types.
module Senza.Types
(Senza)
where
import Text.Blaze.Html (Markup)
-- | The type for an HTML value. In case blaze changes its type again.
type Senza = Markup
| chrisdone/senza | src/Senza/Types.hs | bsd-3-clause | 179 | 0 | 5 | 37 | 30 | 20 | 10 | 4 | 0 |
module TestImportFile (tests) where
import Asserts
import Bucket.Import
import Bucket.Types
import Control.Exception
import Fixtures
import Prelude hiding (catch)
import System.FilePath
import Test.Hspec.HUnit()
import Test.Hspec.Monadic
import Test.Hspec.QuickCheck
import Test.HUnit
import Test.QuickCheck
tests = describe "importing a file" $ do
it "copies it inside the bucket" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
importFile bucket aSourceFile
assertFileExists (bucketPath bucket </> "a-file-1" </> "a-file.png")
it "creates a meta.txt file" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
importFile bucket aSourceFile
let metaFile = (bucketPath bucket </> "a-file-1" </> "meta.txt")
assertFileContains metaFile "name::a-file.png\n"
it "writes the current modification date to meta" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
setModificationTime aSourceFile 2012 2 15
importFile bucket aSourceFile
let metaFile = (bucketPath bucket </> "a-file-1" </> "meta.txt")
assertFileContains metaFile "creationdate::2012-02-15\n"
it "copies modification time" $ withBucket $ \((tmpDir, bucket)) -> do
aSourceFile <- createEmptyFile $ tmpDir </> "a-file.png"
setModificationTime aSourceFile 2012 2 15
mtime1 <- getMtime aSourceFile
importFile bucket aSourceFile
mtime2 <- getMtime (bucketPath bucket </> "a-file-1" </> "a-file.png")
assertEqual "" mtime1 mtime2
it "updates the bucket" $ withBucket $ \((tmpDir, bucket)) -> do
file1 <- createEmptyFile $ tmpDir </> "file1.png"
file2 <- createEmptyFile $ tmpDir </> "file2.png"
bucket <- importFile bucket file1
bucket <- importFile bucket file2
bucket `assertHasItems` [bucketPath bucket </> "file1-1", bucketPath bucket </> "file2-1"]
it "does not leave a trace in bucket if importing fails" $ withBucket $ \((tmpDir, bucket)) -> do
let importNonExistingFile = do
importFile bucket (tmpDir </> "nonExistingFile.png")
assertFailure "importing should have thrown IOException"
let assertBucketHasNoTraceOfFile = \e -> do
let _ = (e :: IOException)
assertDirectoryDoesNotExist $ bucketPath bucket </> "nonExistingFile-1"
catch importNonExistingFile assertBucketHasNoTraceOfFile
describe "item name" $ do
prop "is unique" $
forAll ourListOfStrings $ \itemNames ->
let newItemName = createItemName itemNames ("/tmp/" ++ aItem ++ ".png")
aItem = itemPath $ head itemNames
in newItemName `notElem` map itemPath itemNames
it "when it's the only file" $
createItemName [] "/tmp/foo.png" @?= "foo-1"
it "when it's the third file with the same name" $
createItemName [anItemWithName "foo", anItemWithName "foo-1"] "/tmp/foo.png" @?= "foo-2"
it "when one of the existing items with same name has been deleted" $
createItemName [anItemWithName "foo", anItemWithName "foo-2"] "/tmp/foo.png" @?= "foo-1"
| rickardlindberg/orgapp | tests/TestImportFile.hs | bsd-3-clause | 3,329 | 0 | 21 | 804 | 827 | 402 | 425 | -1 | -1 |
module MatchTheTypes where
import Data.List (sort)
i :: Num a => a
i = 1
f :: Fractional a => a
f = 1.0
g :: RealFrac a => a
g = 1.0
freud :: Ord a => a -> a
freud x = x
freud' :: Int -> Int
freud' x = x
myX = 1 :: Int
sigmund :: Int -> Int
sigmund x = myX
jung :: Ord a => [a] -> a
jung xs = head (sort xs)
jung' :: [Int] -> Int
jung' xs = head (sort xs)
young :: [Char] -> Char
young xs = head (sort xs)
young' :: Ord a => [a] -> a
young' xs = head (sort xs)
mysort :: [Char] -> [Char]
mysort = sort
signifier :: [Char] -> Char
signifier xs = head (mysort xs)
mysort' :: [Char] -> [Char]
mysort' = sort
-- won't type checked
signifier' :: Ord a => [a] -> a
signifier' xs = head (mysort' xs)
| chengzh2008/hpffp | src/ch06-Typeclasses/matchTheTypes.hs | bsd-3-clause | 714 | 0 | 7 | 184 | 367 | 195 | 172 | 31 | 1 |
module RDSTests.EventSubscriptionTests
( runEventSubscriptionTests
)
where
import Control.Applicative ((<$>))
import Control.Monad.IO.Class (liftIO)
import Data.Text (Text)
import Test.Hspec
import Cloud.AWS.RDS
import Cloud.AWS.RDS.Types (SourceType(..), DBInstance(..))
import Util
import RDSTests.Util
region :: Text
region = "ap-northeast-1"
runEventSubscriptionTests :: IO ()
runEventSubscriptionTests = hspec $ do
describeEventSubscriptionsTest
createAndDeleteSubscriptionTest
modifySubscriptionTest
addSourceIdentifierToSubscriptionTest
describeEventSubscriptionsTest :: Spec
describeEventSubscriptionsTest = do
describe "describeEventSubscriptions doesn't fail" $ do
it "describeEventSubscriptions doesn't throw any exception" $ do
testRDS region (
describeEventSubscriptions Nothing Nothing Nothing
) `miss` anyConnectionException
createAndDeleteSubscriptionTest :: Spec
createAndDeleteSubscriptionTest = do
describe "{create,delete}EventSubscription doesn't fail" $ do
it "{create,delete}EventSubscription doesn't throw any excpetion" $ do
testRDS region (do
name <- liftIO $ getRandomText "hspec-test-subscription-"
withEventSubscription name snsTopicArn $
const $ return ()
) `miss` anyConnectionException
snsTopicArn :: Text
snsTopicArn = "arn:aws:sns:ap-northeast-1:049669284607:hspec-test-topic"
modifySubscriptionTest :: Spec
modifySubscriptionTest = do
describe "modifyEventSubscription doesn't fail" $ do
it "modifyEventSubscription doesn't throw any excpetion" $ do
testRDS region (do
name <- liftIO $ getRandomText "hspec-test-subscription-"
withEventSubscription name snsTopicArn $ \_ -> do
modifyEventSubscription
(Just False)
["creation","deletion"]
(Just snsTopicArn)
(Just SourceTypeDBInstance)
name
) `miss` anyConnectionException
addSourceIdentifierToSubscriptionTest :: Spec
addSourceIdentifierToSubscriptionTest = do
describe "addSourceIdentifierToEventSubscription doesn't fail" $ do
it "addSourceIdentifierToEventSubscription doesn't throw any excpetion" $ do
testRDS region (do
name <- liftIO $ getRandomText "hspec-test-subscription-"
withEventSubscription name snsTopicArn $ \_ -> do
modifyEventSubscription
(Just False)
["creation","deletion"]
(Just snsTopicArn)
(Just SourceTypeDBInstance)
name
dbiid <- dbInstanceIdentifier . head <$>
describeDBInstances Nothing Nothing Nothing
addSourceIdentifierToSubscription
dbiid name
) `miss` anyConnectionException
| worksap-ate/aws-sdk | test/RDSTests/EventSubscriptionTests.hs | bsd-3-clause | 3,090 | 0 | 24 | 928 | 536 | 272 | 264 | 68 | 1 |
module Main where
import AOC2015.Day01
import AOC2015.Day02
import AOC2015.Day03
import AOC2015.Day04
import AOC2015.Day05
import AOC2015.Day06
import AOC2015.Day07
import AOC2015.Day08
import AOC2015.Day09
import AOC2015.Day10
import AOC2015.Day11
import AOC2015.Day12
import AOC2015.Day13
import AOC2015.Day14
import AOC2015.Day15
--import AOC2015.Day16
import AOC2015.Day17
import AOC2015.Day18
import AOC2015.Day19
import AOC2015.Day20
import AOC2015.Day21
import AOC2015.Day22
import AOC2015.Day23
import AOC2015.Day24
import AOC2015.Day25
main :: IO ()
main = do
putStrLn "Advent of Code"
| bitrauser/aoc | src/Main.hs | bsd-3-clause | 599 | 0 | 7 | 68 | 146 | 86 | 60 | 28 | 1 |
module Main where
import ABS
(x:i:ni:num_div:obj:i_divides:f:n:primeb:reminder:res:the_end) = [1..]
main_ :: Method
main_ [] this wb k =
Assign n (Val (I 2500)) $
Assign x (Sync is_prime [n]) $
k
is_prime :: Method
is_prime [pn] this wb k =
Assign i (Val (I 1)) $
Assign ni (Val (I pn)) $
Assign num_div (Val (I 0)) $
While (ILTE (Attr i) (Attr ni)) (\k' ->
Assign obj New $
Assign f (Async obj divides [i,ni]) $
Await f $
Assign i_divides (Get f) $
Assign num_div (Val (Add (Attr num_div) (Attr i_divides))) $
Assign i (Val (Add (Attr i) (I 1))) $
k'
) $
If (IEq (Attr num_div) (I 2))
(\k' -> Assign primeb (Val (I 1)) k')
(\k' -> Assign primeb (Val (I 0)) k') $
Return primeb wb k
divides :: Method
divides [pd, pn] this wb k =
Assign reminder (Val (Mod (I pn) (I pd)) ) $
If (IEq (Attr reminder) (I 0))
(\k' -> Assign res (Val (I 1)) k')
(\k' -> Assign res (Val (I 0)) k' ) $
Return res wb k
main' :: IO ()
main' = run' 1000000 main_ (head the_end)
main :: IO ()
main = printHeap =<< run 1000000 main_ (head the_end)
| abstools/abs-haskell-formal | benchmarks/2_primality_test/progs/2500.hs | bsd-3-clause | 1,111 | 0 | 20 | 295 | 677 | 340 | 337 | 37 | 1 |
module Prosper.Money where
type Money = Double
| WraithM/prosper | src/Prosper/Money.hs | bsd-3-clause | 48 | 0 | 4 | 8 | 12 | 8 | 4 | 2 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Physics.Falling2d.OrthonormalBasis2d
(
)
where
import Data.Vect.Double.Base
import Physics.Falling.Math.OrthonormalBasis
instance OrthonormalBasis Vec2 Normal2 where
canonicalBasis = [ toNormalUnsafe $ Vec2 1.0 0.0, toNormalUnsafe $ Vec2 0.0 1.0 ]
completeBasis n = let Vec2 x y = fromNormal n in
(n, [ toNormalUnsafe $ Vec2 (-y) x ])
| sebcrozet/falling2d | Physics/Falling2d/OrthonormalBasis2d.hs | bsd-3-clause | 409 | 0 | 13 | 79 | 115 | 63 | 52 | 9 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
module Lucid.Foundation.Callouts.Joyride where
import Lucid.Base
import Lucid.Html5
import qualified Data.Text as T
import Data.Monoid
joyride_list_ :: T.Text
joyride_list_ = " joyride-list "
joyride_tip_guide_ :: T.Text
joyride_tip_guide_ = " joyride-tip-guide "
joyride_nub_ :: T.Text
joyride_nub_ = " joyride-nub "
joyride_content_wrapper_ :: T.Text
joyride_content_wrapper_ = " joyride-content-wrapper "
joyride_close_tip_ :: T.Text
joyride_close_tip_ = " joyride-close-tip "
joyride_next_tip_ :: T.Text
joyride_next_tip_ = " joyride-next-tip "
| athanclark/lucid-foundation | src/Lucid/Foundation/Callouts/Joyride.hs | bsd-3-clause | 632 | 0 | 5 | 80 | 105 | 65 | 40 | 19 | 1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Indexed.Command
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Indexed.Command
( (>>)(..)
) where
import Indexed.Types
import Indexed.Functor
infixr 8 >>
infixr 1 :&
data (p >> q) r i = p i :& (q ~> r)
instance IFunctor (p >> q) where
imap h (p :& k) = p :& (h . k)
| ekmett/indexed | src/Indexed/Command.hs | bsd-3-clause | 734 | 5 | 9 | 130 | 128 | 78 | 50 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- |
-- Module : Codec.Rot13
-- Description : Fast ROT13 cipher for Haskell.
-- Copyright : (c) Kyle Van Berendonck, 2014
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module exposes the API for this package.
module Codec.Rot13
( -- * Typeclass Interfaces
Rot13(..)
, Rot13Bytes(..)
-- * Constraint Interfaces
, rot13enum
, rot13int
-- * Compatibility
, rot13word
, rot13word8
, rot13char
, rot13string
) where
import Data.Char
import Data.Word
import Data.Int
import qualified Data.ByteString as BS
import qualified Data.ByteString.Internal as BS
import qualified Data.Text as Text
import Foreign.Ptr
import Foreign.Storable
import qualified Foreign.C.Types as Foreign
-- | The 'Rot13' typeclass is intended to perform the ROT13 cipher on the provided data, as if it
-- were representing a single ANSI-encoded character. This interface doesn't consider the storage
-- behaviour of the type at all, but is the fastest implementation if you need to integrate the
-- transformation as part of a stream.
class Rot13 a where
rot13 :: a -> a
-- | The 'Rot13Bytes' typeclass is intended for when you need to perform the ROT13 cipher on some
-- data at the memory level. It stores the given data into a temporary buffer in memory, then runs
-- the cipher over the stored bytes to produce a new buffer. This operation is typically slower
-- than just using 'rot13' as part of a fusion pipeline.
class Rot13Bytes a where
rot13bs :: a -> BS.ByteString
-- | Perform the ROT13 cipher on the given 'Integral' instance (in the sense of 'Rot13').
rot13int :: Integral a => a -> a
rot13int x
| (fromIntegral x :: Word) - 97 < 26 = 97 + rem (x - 84) 26
| (fromIntegral x :: Word) - 65 < 26 = 65 + rem (x - 52) 26
| otherwise = x
{-# INLINE rot13int #-}
{-# SPECIALIZE rot13int :: Word -> Word #-}
{-# SPECIALIZE rot13int :: Word8 -> Word8 #-}
{-# SPECIALIZE rot13int :: Word16 -> Word16 #-}
{-# SPECIALIZE rot13int :: Word32 -> Word32 #-}
{-# SPECIALIZE rot13int :: Word64 -> Word64 #-}
{-# SPECIALIZE rot13int :: Int -> Int #-}
{-# SPECIALIZE rot13int :: Int8 -> Int8 #-}
{-# SPECIALIZE rot13int :: Int16 -> Int16 #-}
{-# SPECIALIZE rot13int :: Int32 -> Int32 #-}
{-# SPECIALIZE rot13int :: Int64 -> Int64 #-}
{-# SPECIALIZE rot13int :: Integer -> Integer #-}
{-# SPECIALIZE rot13int :: Foreign.CChar -> Foreign.CChar #-}
{-# SPECIALIZE rot13int :: Foreign.CSChar -> Foreign.CSChar #-}
{-# SPECIALIZE rot13int :: Foreign.CUChar -> Foreign.CUChar #-}
{-# SPECIALIZE rot13int :: Foreign.CShort -> Foreign.CShort #-}
{-# SPECIALIZE rot13int :: Foreign.CUShort -> Foreign.CUShort #-}
{-# SPECIALIZE rot13int :: Foreign.CInt -> Foreign.CInt #-}
{-# SPECIALIZE rot13int :: Foreign.CUInt -> Foreign.CUInt #-}
{-# SPECIALIZE rot13int :: Foreign.CLong -> Foreign.CLong #-}
{-# SPECIALIZE rot13int :: Foreign.CULong -> Foreign.CULong #-}
{-# SPECIALIZE rot13int :: Foreign.CWchar -> Foreign.CWchar #-}
{-# SPECIALIZE rot13int :: Foreign.CLLong -> Foreign.CLLong #-}
{-# SPECIALIZE rot13int :: Foreign.CULLong -> Foreign.CULLong #-}
-- | Perform the ROT13 cipher on the given 'Enum' instance (in the sense of 'Rot13').
{-# INLINE rot13enum #-}
rot13enum :: Enum a => a -> a
rot13enum = toEnum . (rot13int :: Int -> Int) . fromEnum
-- | Perform the ROT13 cipher on the given 'Storable' instance bytes to yield a 'BS.ByteString'.
{-# INLINE rot13stor #-}
rot13stor :: Storable a => a -> BS.ByteString
rot13stor x = rot13bs $! BS.unsafeCreate (sizeOf x) $ \ptr -> poke (castPtr ptr) x
--------------------------------------------------------------------------------------------------
-- Rot13 Instances
instance Rot13 Char where rot13 = rot13enum
instance Rot13 String where rot13 = map rot13
instance Rot13 BS.ByteString where rot13 = BS.map rot13
instance Rot13 Text.Text where rot13 = Text.map rot13
instance Rot13 Word where rot13 = rot13int
instance Rot13 Word8 where rot13 = rot13int
instance Rot13 Word16 where rot13 = rot13int
instance Rot13 Word32 where rot13 = rot13int
instance Rot13 Word64 where rot13 = rot13int
instance Rot13 Int where rot13 = rot13int
instance Rot13 Int8 where rot13 = rot13int
instance Rot13 Int16 where rot13 = rot13int
instance Rot13 Int32 where rot13 = rot13int
instance Rot13 Int64 where rot13 = rot13int
instance Rot13 Integer where rot13 = rot13int
instance Rot13 Foreign.CChar where rot13 = rot13
instance Rot13 Foreign.CSChar where rot13 = rot13
instance Rot13 Foreign.CUChar where rot13 = rot13
instance Rot13 Foreign.CShort where rot13 = rot13
instance Rot13 Foreign.CUShort where rot13 = rot13
instance Rot13 Foreign.CInt where rot13 = rot13
instance Rot13 Foreign.CUInt where rot13 = rot13
instance Rot13 Foreign.CLong where rot13 = rot13
instance Rot13 Foreign.CULong where rot13 = rot13
instance Rot13 Foreign.CWchar where rot13 = rot13
instance Rot13 Foreign.CLLong where rot13 = rot13
instance Rot13 Foreign.CULLong where rot13 = rot13
--------------------------------------------------------------------------------------------------
-- Rot13Bytes Instances
instance {-# OVERLAPPING #-} Rot13Bytes BS.ByteString where rot13bs = rot13
instance {-# OVERLAPPING #-} Storable a => Rot13Bytes a where rot13bs = rot13stor
--------------------------------------------------------------------------------------------------
-- Compatibility
{-# INLINE rot13word #-}
rot13word :: Word -> Word
rot13word = rot13
{-# INLINE rot13word8 #-}
rot13word8 :: Word8 -> Word8
rot13word8 = rot13
{-# INLINE rot13char #-}
rot13char :: Char -> Char
rot13char = rot13
{-# INLINE rot13string #-}
rot13string :: String -> String
rot13string = rot13
| kvanberendonck/codec-rot13 | src/Codec/Rot13.hs | bsd-3-clause | 6,251 | 0 | 11 | 1,397 | 899 | 517 | 382 | 102 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Graphics.Storyboard.Box where
import Data.Monoid
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Text (Text)
import Graphics.Blank as Blank
import qualified Graphics.Blank.Style as Style
import Graphics.Storyboard.Act
import Graphics.Storyboard.Literals
import Graphics.Storyboard.Mosaic
import Graphics.Storyboard.Tile
import Graphics.Storyboard.Types
data TheBoxStyle = TheBoxStyle
{ theBorderWidth :: Double
, theBorderColor :: Color
, theBackground :: Background
, theShadowStyle :: Maybe TheShadowStyle
, theSharedBorders :: Set Side -- ^ a shared border is straight, and perhaps offset
} deriving Show
defaultBoxStyle :: TheBoxStyle
defaultBoxStyle = TheBoxStyle
{ theBorderWidth = 1
, theBorderColor = "black"
, theBackground = bgColor "white"
, theShadowStyle = Just defaultShadowStyle
, theSharedBorders = Set.empty
}
data TheShadowStyle = TheShadowStyle
{ theShadowColor :: Text
, theShadowOffsetX :: Double
, theShadowOffsetY :: Double
, theShadowBlur :: Double
} deriving Show
defaultShadowStyle :: TheShadowStyle
defaultShadowStyle = TheShadowStyle
{ theShadowColor = "#cccccc"
, theShadowOffsetX = 5
, theShadowOffsetY = 5
, theShadowBlur = 5
}
{-
-- Later support LinearGradients
data TheBackgroundStyle
= Background Text
| LinearGradient Text Text
deriving Show
-}
class BoxStyle a where
boxStyle :: (TheBoxStyle -> TheBoxStyle) -> a -> a
instance BoxStyle TheBoxStyle where
boxStyle f a = f a
{-
class ShadowStyle a where
shadowStyle :: (TheShadowStyle -> TheShadowStyle) -> a -> a
instance ShadowStyle TheShadowStyle where
shadowStyle f a = f a
class BackgroundStyle a where
backgroundStyle :: (TheBackgroundStyle -> TheBackgroundStyle) -> a -> a
instance BackgroundStyle TheBackgroundStyle where
backgroundStyle f a = f a
instance BackgroundStyle TheBoxStyle where
backgroundStyle f a = f a
-}
background :: BoxStyle a => Background -> a -> a
background bg = boxStyle $ \ m -> m { theBackground = bg }
shadows :: BoxStyle a => Bool -> a -> a
shadows s = boxStyle $ \ m -> m
{ theShadowStyle =
if s
then Just defaultShadowStyle
else Nothing
}
borderWidth :: BoxStyle a => Double -> a -> a
borderWidth w = boxStyle $ \ m -> m { theBorderWidth = w }
borderColor :: BoxStyle a => Color -> a -> a
borderColor c = boxStyle $ \ m -> m { theBorderColor = c }
box :: TheBoxStyle -> Tile a -> Tile a
box st (Tile (w,h) act) = Tile (w+wd*2,h+wd*2) $ \ (Cavity ps' sz') ->
action (before ps' sz') <>
during ps' sz' <>
action (after ps' sz')
where
wd = theBorderWidth st
before (x,y) (w',h') = saveRestore $ do
translate (x,y)
case theBackground st of
Background bg -> Style.fillStyle bg
-- Our backgrounds are scaled to ((0,0),(1,1))
scale(w',h')
beginPath()
rect(0,0,1,1)
closePath()
-- lineWidth wd
case theShadowStyle st of
Nothing -> return ()
Just s_st -> do
shadowColor (theShadowColor s_st)
shadowOffsetX (theShadowOffsetX s_st)
shadowOffsetY (theShadowOffsetY s_st)
shadowBlur (theShadowBlur s_st)
fill()
during (x,y) (w',h') =
act (Cavity (x+wd,y+wd) (w' - wd * 2,h' - wd * 2))
after (x,y) (w',h')
| wd == 0 = return ()
| otherwise = saveRestore $ do
translate (x,y)
beginPath()
rect(0,0,w',h')
closePath()
lineWidth wd
strokeStyle (theBorderColor st)
stroke()
-- Build a 2D table of boxes
boxes :: TheBoxStyle -> [[(TheBoxStyle -> TheBoxStyle,Tile ())]] -> Tile ()
boxes st tss = pack $ mconcat $
[ anchor top $ pack $ mconcat
[ anchor left $ box (f st) $ t
| (f,t) <- ts
]
| ts <- tss
]
| tonymorris/story-board | src/Graphics/Storyboard/Box.hs | bsd-3-clause | 4,086 | 0 | 17 | 1,166 | 1,138 | 607 | 531 | 95 | 2 |
{-# LANGUAGE RankNTypes #-}
module Node.Message.Decoder
( Decoder (..)
, DecoderStep (..)
, ByteOffset
, continueDecoding
, hoistDecoder
, hoistDecoderStep
, pureDone
, pureFail
, purePartial
) where
import qualified Data.ByteString as BS
import Data.Int (Int64)
import qualified Data.Text as T
type ByteOffset = Int64
data DecoderStep m t =
Done !BS.ByteString !ByteOffset !t
| Fail !BS.ByteString !ByteOffset !T.Text
| Partial (Maybe BS.ByteString -> Decoder m t)
newtype Decoder m t = Decoder {
runDecoder :: m (DecoderStep m t)
}
hoistDecoder
:: ( Functor n )
=> (forall a . m a -> n a)
-> Decoder m t
-> Decoder n t
hoistDecoder nat (Decoder m) = Decoder (hoistDecoderStep nat <$> nat m)
hoistDecoderStep
:: ( Functor n )
=> (forall a . m a -> n a)
-> DecoderStep m t
-> DecoderStep n t
hoistDecoderStep nat step = case step of
Done trailing offset t -> Done trailing offset t
Fail trailing offset err -> Fail trailing offset err
Partial k -> Partial $ hoistDecoder nat . k
-- | Feed input through a decoder.
--
continueDecoding
:: ( Monad m )
=> DecoderStep m t
-> BS.ByteString
-> m (DecoderStep m t)
continueDecoding decoderStep bs = case decoderStep of
Done trailing offset t -> pure $ Done (BS.append trailing bs) offset t
Fail trailing offset err -> pure $ Fail (BS.append trailing bs) offset err
Partial k -> runDecoder (k (Just bs))
pureDone :: Monad m => BS.ByteString -> ByteOffset -> t -> Decoder m t
pureDone trailing offset t = Decoder . return $ Done trailing offset t
pureFail :: Monad m => BS.ByteString -> ByteOffset -> T.Text -> Decoder m t
pureFail trailing offset err = Decoder . return $ Fail trailing offset err
purePartial :: Monad m => (Maybe BS.ByteString -> Decoder m t) -> Decoder m t
purePartial k = Decoder . return $ Partial k
| input-output-hk/pos-haskell-prototype | networking/src/Node/Message/Decoder.hs | mit | 1,954 | 0 | 12 | 507 | 690 | 352 | 338 | 63 | 3 |
module Data.String.Util (
split
) where
-- | /O(n)/ Splits a 'String' into components delimited by separators,
-- where the predicate returns True for a separator element. The
-- resulting components do not contain the separators. Two adjacent
-- separators result in an empty component in the output. eg.
--
-- >>> split (== 'a') "aabbaca"
-- ["","","bb","c",""]
-- >>> split (== 'a') ""
-- [""]
split :: (Char -> Bool) -> String -> [String]
split p = go
where
go xs = case break p xs of
(ys, []) -> [ys]
(ys, _:zs) -> ys : go zs
| beni55/string | src/Data/String/Util.hs | mit | 557 | 0 | 11 | 125 | 111 | 65 | 46 | 7 | 2 |
module TypeConstraint where
{-- snippet OrdStack --}
data (Ord a) => OrdStack a = Bottom
| Item a (OrdStack a)
deriving (Show)
{-- /snippet OrdStack --}
{-- snippet isIncreasing --}
isIncreasing :: (Ord a) => OrdStack a -> Bool
isIncreasing (Item a rest@(Item b _))
| a < b = isIncreasing rest
| otherwise = False
isIncreasing _ = True
{-- /snippet isIncreasing --}
{-- snippet push --}
push :: (Ord a) => a -> OrdStack a -> OrdStack a
push a s = Item a s
{-- /snippet push --}
| binesiyu/ifl | examples/ch10/TypeConstraint.hs | mit | 550 | 0 | 10 | 163 | 168 | 89 | 79 | 11 | 1 |
{- |
Module : ./MMT/Hets2mmt.hs
Description : interface for MMT jar
Copyright :
License :
Maintainer : [email protected]
Stability : experimental
Portability :
-}
module MMT.Hets2mmt (
mmtRes,
callSpec
)
where
import System.Process
import System.IO
import Common.Result
import Common.Id
import Static.DevGraph
import Common.LibName
import Framework.Analysis (addLogic2LogicList)
import System.FilePath
import Common.Utils
-- import MMT.XMLtoPT
{- import System.IO.Unsafe
import Text.ParserCombinators.Parsec -}
jar :: String
jar = "hets-mmt-standalone.jar"
staloneclass :: String
staloneclass = "com.simontuffs.onejar.Boot"
calljar :: FilePath -> IO (String, Maybe String)
calljar fileName = do
(_, Just hout, maybeErr, _) <- createProcess (
proc "java" ["-cp",
jar,
staloneclass,
"-newLogic",
fileName])
{ std_out = CreatePipe }
cont <- hGetContents hout
case maybeErr of
(Just hErr) -> do
err <- hGetContents hErr
putStr err
return (cont, Just err)
Nothing -> return (cont, Nothing)
callSpec :: FilePath -> IO (String, Maybe String)
callSpec fileName = do
putStr "creating process\n"
(_, Just hout, maybeErr, _) <- createProcess (
proc "java" ["-cp",
jar,
staloneclass,
"-readSpec",
fileName])
{ std_out = CreatePipe }
cont <- hGetContents hout
-- putStr cont
case maybeErr of
(Just hErr) -> do
err <- hGetContents hErr
putStr err
return (cont, Just err)
Nothing -> return (cont, Nothing)
callMMT :: FilePath -> IO [Diagnosis]
callMMT fp = do
(out, maybeErr) <- calljar fp
case maybeErr of
(Just err) -> return [Diag Warning out nullRange,
Diag Error err nullRange]
Nothing -> return [Diag Warning out nullRange]
mmtRes :: FilePath -> IO (Result (LibName, LibEnv))
mmtRes fname = do
libDir <- getEnvDef "HETS_LIB" ""
putStr $ "HETS_LIB at " ++ libDir
putStr $ "calling MMT on " ++ libDir ++ fname
dgs <- callMMT (libDir </> fname)
putStr $ show dgs
addLogic2LogicList $ dropExtension fname
return (emptyRes (dropExtension fname) dgs)
emptyRes :: String -> [Diagnosis] -> Result (LibName, LibEnv)
emptyRes lname = (`Result` Just (emptyLibName lname, emptyLibEnv))
| spechub/Hets | MMT/Hets2mmt.hs | gpl-2.0 | 2,619 | 0 | 14 | 837 | 692 | 356 | 336 | 67 | 2 |
module GameLogic.StartLogic
( newGame
) where
import System.Random
import Control.Lens
import GameLogic.Data.Facade
import GameLogic.Util.Shuffle
newGame::IO GameData
newGame = newGame' defSeed
newGame':: Int -> IO GameData
newGame' seed
| 0 == seed
= fmap mkStartGameGen newStdGen
| otherwise
= return $ mkStartGame seed
mkStartGame :: Int -> GameData
mkStartGame seed = mkStartGameGen gen
where gen = mkStdGen seed
mkStartGameGen :: StdGen -> GameData
mkStartGameGen gen = mkGameDef world players gen''
where (world, gen') = mkStartWorld defWorldSize defNumPlayers gen
(players, gen'') = mkPlayers defNumPlayers world gen'
{-# ANN mkStartWorld "HLint: ignore Eta reduce" #-}
mkStartWorld :: RandomGen g => Int ->Int -> g -> (World, g)
mkStartWorld size numPlayers gen = placeWorldPlayers (mkEmptyWorld size) numPlayers gen
placeWorldPlayers :: RandomGen g => World -> Int -> g -> (World, g)
placeWorldPlayers world numPlayers gen =
let players = [1..numPlayers]
positions = [calcStartPos world numPlayers pl | pl <- players]
(positions', gen') = shuffle gen positions
playersPosition = zip positions' players
p w (pos, pl) = w & ix pos .~ mkCell 1 pl
world' = foldl p world playersPosition
in (world', gen')
calcStartPos :: World -> Int -> Int -> WorldPos
calcStartPos world numPlayers num =
let list = playersStartPosXList (getWorldSize world) numPlayers
cols = playersStartPosCols numPlayers
xInd = ((num-1) `mod` cols)
yInd = ((num-1) `div` cols)
in (list !! xInd, list !! yInd)
-- return number of columns of players start positions by number of players
playersStartPosCols :: Int -> Int
playersStartPosCols 4 = 2
playersStartPosCols 16 = 4
playersStartPosCols _ = error "Wrong world size in GameLogic.StartLogic.playersStartPosCols"
-- return list x-coords start positions
playersStartPosXList :: Int -> Int -> [Int]
playersStartPosXList size numPlayers =
let cols = playersStartPosCols numPlayers
dist = size `div` cols
in fmap (\i -> (i-1)*dist + (dist `div` 2) + 1 ) [1..cols]
| EPashkin/gamenumber-freegame | src_gl/GameLogic/StartLogic.hs | gpl-3.0 | 2,138 | 0 | 14 | 447 | 658 | 347 | 311 | 49 | 1 |
-- Copyright (c) 2014 Contributors as noted in the AUTHORS file
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Arduino.Uno
main = compileProgram $ do
let buttonPressStream = digitalRead pin12
let greenLed = digitalOutput pin13
let redLed1 = digitalOutput pin11
let redLed2 = digitalOutput pin10
blinkStream <- def $ clock ~> toggle
greenLed =: buttonPressStream
redLed1 =: keepWhen buttonPressStream bitLow blinkStream
redLed2 =: keepWhen buttonPressStream bitLow (invert blinkStream)
| frp-arduino/frp-arduino | examples/Combine.hs | gpl-3.0 | 1,162 | 0 | 11 | 243 | 133 | 67 | 66 | 10 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : Glome.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:42
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qt.Glome (
module Qt.Glome.Glome
, module Qt.Glome.Scene
, module Qt.Glome.Trace
, module Qt.Glome.Spd
, module Qt.Glome.TestScene
)
where
import Qt.Glome.Glome
import Qt.Glome.Scene
import Qt.Glome.Trace
import Qt.Glome.Spd
import Qt.Glome.TestScene
| keera-studios/hsQt | extra-pkgs/Glome/Qt/Glome.hs | bsd-2-clause | 682 | 0 | 5 | 119 | 77 | 54 | 23 | 12 | 0 |
module Main where
import Lib
main :: IO ()
main = someFunc
{-99 Haskell Problems-}
{-| Get the last element of a list-}
myLast :: [a] -> a
myLast [x] = x
myLast (_:xs) = myLast xs
{-| Get the second to last element of a list-}
myButtLast :: [a] -> a
myButtLast [x, _] = x
myButtLast (_:xs) = myButtLast xs
{-| Get the kth element of a list-}
elementAt :: [a] -> Int -> a
elementAt (x:_) 0 = x
elementAt (_:xs) k = elementAt xs (k - 1)
{-| Get the length of a list-}
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + (myLength xs)
{-| Reverse a list-}
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = (myReverse xs) ++ [x]
{-| Checks if list is a palindrome.-}
myPalindrome :: (Eq a) => [a] -> Bool
myPalindrome x
| x == (reverse x) = True
| otherwise = False
{-| Remove dupes in list-}
compress :: (Eq a) => [a] -> [a]
compress [] = []
compress (x:xs) = [x] ++ compress (clean x xs)
where clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Put duplicates in sublists-}
pack :: (Eq a) => [a] -> [[a]]
pack [] = []
pack [x] = [[x]]
pack (x:xs) = combine x xs ++ pack (clean x xs)
where
combine _ [] = []
combine x s = [[z | z <- x:s, z == x]]
clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Does stuff-}
encode :: (Eq a) => [a] -> [(Int, a)]
encode [] = []
encode s = map (\(x:xs) -> (length (x:xs), x)) (pack s)
data List a = Single a | Multiple Int a
deriving Show
{-| Similar to before-}
encodeModified :: (Eq a) => [a] -> [List a]
encodeModified s = map f (encode s)
where f (1, x) = Single x
f (n, x) = Multiple n x
decode :: [List a] -> [a]
decode s = foldr (++) [] (map f s)
where f (Single x) = [x]
f (Multiple n x) = replicate n x
encodeDirect :: (Eq a) => [a] -> [List a]
encodeDirect [] = []
encodeDirect (x:xs) = [toList (count x (x:xs)) x] ++
encodeDirect (filter (x /=) xs)
where count x s = length (filter (x==) s)
toList 1 x = Single x
toList n x = Multiple n x
dupl :: [a] -> [a]
dupl [] = []
dupl (x:xs) = [x,x] ++ dupl xs
repli :: [a] -> Int -> [a]
repli [] _ = []
repli (x:xs) n = replicate n x ++ repli xs n
dropEvery :: [a] -> Int -> [a]
dropEvery [] _ = []
dropEvery s n = foldr (++) [] (map (f n) (zip [1..] s))
where f n (m, x)
| m `mod` n == 0 = []
| otherwise = [x]
spliter :: [a] -> Int -> [[a]]
spliter [] _ = []
spliter s n = [reverse (drop ((length s) - n) (reverse s))] ++ [drop n s]
slice :: [a] -> Int -> Int -> [a]
slice [] _ _ = []
slice s start stop = reverse (drop (((length s)) - stop) (reverse (drop (start - 1) s)))
rotate :: [a] -> Int -> [a]
rotate [] _ = []
rotate s n = slice s ((f n (length s)) + 1) (length s) ++ slice s 1 (f n (length s))
where f n m
| n > m = f (n - m) m
| n < 0 = f (m + n) m
| otherwise = n
removeAt :: [a] -> Int -> (a, [a])
removeAt s n = (elementAt (slice s (n + 1) (n + 2)) 0,
slice s 1 n ++ slice s (n+2) (length s))
insertAt :: [a] -> a -> Int -> [a]
insertAt xs x n = slice xs 1 (n-1) ++ [x] ++ slice xs n (length xs)
range :: Int -> Int -> [Int]
range n1 n2 = [n1..n2]
listEq :: (Eq a) => [a] -> [a] -> Bool
listEq [] [] = True
listEq [] _ = False
listEq _ [] = False
listEq s1 s2 = False `notElem` (map (`elem`s1) s2 ++ map (`elem`s2) s1)
listNeq :: (Eq a) => [a] -> [a] -> Bool
listNeq s1 s2
| listEq s1 s2 = False
| otherwise = True
listRemoveDupes :: (Eq a) => [[a]] -> [[a]]
listRemoveDupes [[]] = [[]]
listRemoveDupes [] = []
listRemoveDupes (x:xs) = [x] ++ listRemoveDupes (filter (listNeq x) xs)
combinations :: (Eq a) => Int -> [a] -> [[a]]
combinations 0 _ = [[]]
combinations _ [] = [[]]
combinations n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 (listRemoveDupes
[x ++ [y] |
x <- s2,
y <- s1,
y `notElem` x])
{- TODO the second combinatorics problem on the haskell website.-}
isDisjoint :: (Eq a) => [a] -> [a] -> Bool
isDisjoint [] [] = False
isDisjoint [] _ = True
isDisjoint _ [] = True
isDisjoint (x:xs) s2
| x `elem` s2 = False
| otherwise = isDisjoint xs s2
{-| TODO Finish this.-}
{-grouper :: (Eq a) => [Int] -> [a] -> [[[a]]]
grouper n s = g (map (`combinations`s) n)
where f x s = filter (isDisjoint x) s
g (x:y:s)
|y == [] = []
|otherwise = map (\z -> g (f z y) (y:s)) x -}
sortOnLength :: [[a]] -> [[a]]
sortOnLength [] = []
sortOnLength (x:xs) =
sortOnLength [y | y <- xs, (length y) < (length x)]
++ [x]
++ sortOnLength [y | y <- xs, (length y) > (length x)]
sieveEratosthenes :: Int -> [Int]
sieveEratosthenes n = f n [2..n]
where f n [] = []
f n (x:xs) = [x] ++ f n [y | y <- xs,
y `notElem` (map (x*) [2..n])]
isPrime :: Int -> Bool
isPrime n = n `elem` (sieveEratosthenes n)
gcd' :: Int -> Int -> Int
gcd' n1 n2
| n1 == n2 = n1
| n1 > n2 = gcd' (n1 - n2) n2
| otherwise = gcd' (n2 - n1) n1
isCoPrime :: Int -> Int -> Bool
isCoPrime n1 n2
| (gcd' n1 n2) == 1 = True
| otherwise = False
eulerTotient :: Int -> Int
eulerTotient n = length (filter id (map (isCoPrime n) [1..n]))
primeFactors :: Int -> [Int]
primeFactors n
|isPrime n = [n]
|otherwise = [f] ++ primeFactors (n `div` f)
where f = fst (head (filter (\(x,y) ->
y == 0) (map (\x ->
(x, (n `mod` x)))
(sieveEratosthenes n))))
encodePrimeFactors :: Int -> [(Int, Int)]
encodePrimeFactors = encode . primeFactors
eulerTotient' :: Int -> Int
eulerTotient' n = foldr (*) 1
. map (\(x, y) ->
(y-1) * (y^(x - 1)))
. encodePrimeFactors $ n
primesRange :: Int -> Int -> [Int]
primesRange l u = filter (>=l) (sieveEratosthenes u)
combinationsWithDupes :: (Eq a) => Int -> [a] -> [[a]]
combinationsWithDupes 0 _ = [[]]
combinationsWithDupes _ [] = [[]]
combinationsWithDupes n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 [x ++ [y] |
x <- s2,
y <- s1,
y `notElem` x]
{-| Fix empty list issue-}
goldbach :: Int -> (Int,Int)
goldbach n = snd
. head
. filter (\(x, _) -> x == n)
. map (\[x,y] -> ((x+y),(x,y)))
. combinationsWithDupes 2
. sieveEratosthenes $ n
goldbachList :: Int -> Int -> [(Int,Int)]
goldbachList l u = map goldbach
. dropWhile (<= l) $ [2,4 .. u]
grayC :: Int -> [String]
grayC n = combinationsWithDupes n
$ replicate n '1' ++ replicate n '0'
| MauriceIsAG/HaskellScratch | .stack-work/intero/intero1846heo.hs | bsd-3-clause | 7,113 | 0 | 18 | 2,344 | 3,627 | 1,921 | 1,706 | 187 | 3 |
import Development.Shake
import Development.Shake.System
import System.FilePath
main :: IO ()
main = shake $ do
("subdirectory" </> "foo") *> \x -> do
system' $ ["touch",x]
want ["subdirectory/foo"]
| beni55/openshake | tests/creates-directory-implicitly/Shakefile.hs | bsd-3-clause | 218 | 0 | 13 | 46 | 77 | 41 | 36 | 8 | 1 |
module ShallowVector where
import qualified Data.Vector.Unboxed as V
import Types
import Debug.Trace
{-# RULES "darkenBy.brightenBy" forall im1 n. darkenBy n (brightenBy n im1) = im1 #-}
{-# RULES "brightenBy.darkenBy" forall im1 n. brightenBy n (darkenBy n im1) = im1 #-}
-- TODO: overload + and - to be brightenBy and darkenBy
-- inline the blur functions so that they can be fused when used together,
-- using the fusion optimisations in the vector library.
{-# INLINE blurY #-}
blurY :: VectorImage -> VectorImage
blurY (VectorImage pixels w h) = VectorImage newPixels w h
where
newPixels = V.imap blurPixel pixels
normalise x = round (fromIntegral x / 4.0)
blurPixel i p
-- bottom of a column
| (i+1) `mod` h == 0 =
normalise ((pixels V.! (i-1)) + p*2 + p)
-- top of a column
| i `mod` h == 0 =
normalise (p + p*2 + (pixels V.! (i+1)))
-- somewhere in between
| otherwise =
normalise (pixels V.! (i-1) + p*2 + (pixels V.! (i+1)))
{-# INLINE blurX #-}
blurX :: VectorImage -> VectorImage
blurX (VectorImage pixels w h) = VectorImage newPixels w h
where
newPixels = V.imap blurPixel pixels
normalise x = round (fromIntegral x / 4.0)
blurPixel i p
-- right end of a row
| (i+1) `mod` w == 0 =
normalise ((pixels V.! (i-1)) + p*2 + p)
-- left start to a row
| i `mod` w == 0 =
normalise (p + p*2 + (pixels V.! (i+1)))
-- somewhere in between
| otherwise =
normalise (pixels V.! (i-1) + p*2 + (pixels V.! (i+1)))
-- don't inline, so they can be eliminated with the rewrite rule at the top.
{-# NOINLINE brightenBy #-}
{-# NOINLINE darkenBy #-}
brightenBy,darkenBy :: Int -> VectorImage -> VectorImage
brightenBy i (VectorImage pixels w h) = VectorImage (V.map ((\x -> min 255 (x+1))) pixels) w h
darkenBy i (VectorImage pixels w h) = VectorImage (V.map ((\x -> max 0 (x-1))) pixels) w h
| robstewart57/small-image-processing-dsl-implementations | haskell/small-image-processing-dsl/src/ShallowVector.hs | bsd-3-clause | 2,036 | 0 | 16 | 575 | 662 | 354 | 308 | 35 | 1 |
{-# LANGUAGE PatternGuards #-}
module Main (main) where
import Network.HTTP hiding (password)
import Network.Browser
import Network.URI (URI(..), parseRelativeReference, relativeTo)
import Distribution.Client
import Distribution.Client.Cron (cron, rethrowSignalsAsExceptions,
Signal(..), ReceivedSignal(..))
import Distribution.Package
import Distribution.Text
import Distribution.Verbosity
import Distribution.Simple.Utils hiding (intercalate)
import Distribution.Version (Version(..))
import Data.List
import Data.Maybe
import Data.IORef
import Data.Time
import Control.Exception
import Control.Monad
import Control.Monad.Trans
import qualified Data.ByteString.Lazy as BS
import qualified Data.Set as S
import qualified Codec.Compression.GZip as GZip
import qualified Codec.Archive.Tar as Tar
import System.Environment
import System.Exit(exitFailure, ExitCode(..))
import System.FilePath
import System.Directory
import System.Console.GetOpt
import System.Process
import System.IO
import Paths_hackage_server (version)
import Data.Aeson (eitherDecode)
data Mode = Help [String]
| Init URI [URI]
| Stats
| Build [PackageId]
data BuildOpts = BuildOpts {
bo_verbosity :: Verbosity,
bo_runTime :: Maybe NominalDiffTime,
bo_stateDir :: FilePath,
bo_continuous :: Maybe Int,
bo_keepGoing :: Bool,
bo_dryRun :: Bool,
bo_prune :: Bool,
bo_username :: Maybe String,
bo_password :: Maybe String
}
data BuildConfig = BuildConfig {
bc_srcURI :: URI,
bc_auxURIs :: [URI],
bc_username :: String,
bc_password :: String
}
srcName :: URI -> String
srcName uri = fromMaybe (show uri) (uriHostName uri)
installDirectory :: BuildOpts -> FilePath
installDirectory bo = bo_stateDir bo </> "tmp-install"
resultsDirectory :: BuildOpts -> FilePath
resultsDirectory bo = bo_stateDir bo </> "results"
main :: IO ()
main = topHandler $ do
rethrowSignalsAsExceptions [SIGABRT, SIGINT, SIGQUIT, SIGTERM]
hSetBuffering stdout LineBuffering
args <- getArgs
(mode, opts) <- validateOpts args
case mode of
Help strs ->
do let usageHeader = intercalate "\n" [
"Usage: hackage-build init URL [auxiliary URLs] [options]",
" hackage-build build [packages] [options]",
" hackage-build stats",
"Options:"]
mapM_ putStrLn $ strs
putStrLn $ usageInfo usageHeader buildFlagDescrs
unless (null strs) exitFailure
Init uri auxUris -> initialise opts uri auxUris
Stats ->
do stateDir <- canonicalizePath $ bo_stateDir opts
let opts' = opts {
bo_stateDir = stateDir
}
stats opts'
Build pkgs ->
do stateDir <- canonicalizePath $ bo_stateDir opts
let opts' = opts {
bo_stateDir = stateDir
}
case bo_continuous opts' of
Nothing ->
buildOnce opts' pkgs
Just interval -> do
cron (bo_verbosity opts')
interval
(const (buildOnce opts' pkgs))
()
---------------------------------
-- Initialisation & config file
--
initialise :: BuildOpts -> URI -> [URI] -> IO ()
initialise opts uri auxUris
= do username <- readMissingOpt "Enter hackage username" (bo_username opts)
password <- readMissingOpt "Enter hackage password" (bo_password opts)
let config = BuildConfig {
bc_srcURI = uri,
bc_auxURIs = auxUris,
bc_username = username,
bc_password = password
}
createDirectoryIfMissing False $ bo_stateDir opts
createDirectoryIfMissing False $ resultsDirectory opts
writeConfig opts config
writeCabalConfig opts config
where
readMissingOpt prompt = maybe (putStrLn prompt >> getLine) return
writeConfig :: BuildOpts -> BuildConfig -> IO ()
writeConfig opts BuildConfig {
bc_srcURI = uri,
bc_auxURIs = auxUris,
bc_username = username,
bc_password = password
} =
-- [Note: Show/Read URI]
-- Ideally we'd just be showing a BuildConfig, but URI doesn't
-- have Show/Read, so that doesn't work. So instead, we write
-- out a tuple containing the uri as a string, and parse it
-- each time we read it.
let confStr = show (show uri, map show auxUris, username, password) in
writeFile (configFile opts) confStr
readConfig :: BuildOpts -> IO BuildConfig
readConfig opts = do
xs <- readFile $ configFile opts
case reads xs of
[((uriStr, auxUriStrs, username, password), _)] ->
case mapM validateHackageURI (uriStr : auxUriStrs) of
-- Shouldn't happen: We check that this
-- returns Right when we create the
-- config file. See [Note: Show/Read URI].
Left theError -> die theError
Right (uri : auxUris) ->
return $ BuildConfig {
bc_srcURI = uri,
bc_auxURIs = auxUris,
bc_username = username,
bc_password = password
}
Right _ -> error "The impossible happened"
_ ->
die "Can't parse config file (maybe re-run \"hackage-build init\")"
configFile :: BuildOpts -> FilePath
configFile opts = bo_stateDir opts </> "hackage-build-config"
writeCabalConfig :: BuildOpts -> BuildConfig -> IO ()
writeCabalConfig opts config = do
let tarballsDir = bo_stateDir opts </> "cached-tarballs"
writeFile (bo_stateDir opts </> "cabal-config") . unlines $
[ "remote-repo: " ++ srcName uri ++ ":" ++ show uri
| uri <- bc_srcURI config : bc_auxURIs config ]
++ [ "remote-repo-cache: " ++ tarballsDir ]
createDirectoryIfMissing False tarballsDir
----------------------
-- Displaying status
--
data StatResult = AllVersionsBuiltOk
| AllVersionsAttempted
| NoneBuilt
| SomeBuiltOk
| SomeFailed
deriving Eq
stats :: BuildOpts -> IO ()
stats opts = do
config <- readConfig opts
let verbosity = bo_verbosity opts
notice verbosity "Initialising"
(didFail, _, _) <- mkPackageFailed opts
pkgIdsHaveDocs <- getDocumentationStats verbosity config didFail
infoStats verbosity (Just statsFile) pkgIdsHaveDocs
where
statsFile = bo_stateDir opts </> "stats"
infoStats :: Verbosity -> Maybe FilePath -> [DocInfo] -> IO ()
infoStats verbosity mDetailedStats pkgIdsHaveDocs = do
nfo $ "There are "
++ show (length byPackage)
++ " packages with a total of "
++ show (length pkgIdsHaveDocs)
++ " package versions"
nfo $ "So far we have built or attempted to built "
++ show (length (filter ((/= DocsNotBuilt) . docInfoHasDocs) pkgIdsHaveDocs))
++ " packages; only "
++ show (length (filter ((== DocsNotBuilt) . docInfoHasDocs) pkgIdsHaveDocs))
++ " left!"
nfo "Considering the most recent version only:"
nfo . printTable . indent $ [
[show (length mostRecentBuilt) , "built succesfully"]
, [show (length mostRecentFailed) , "failed to build"]
, [show (length mostRecentNotBuilt), "not yet built"]
]
nfo "Considering all versions:"
nfo . printTable . indent $ [
[count AllVersionsBuiltOk, "all versions built successfully"]
, [count AllVersionsAttempted, "attempted to build all versions, but some failed"]
, [count SomeBuiltOk, "not all versions built yet, but those that did were ok"]
, [count SomeFailed, "not all versions built yet, and some failures"]
, [count NoneBuilt, "no versions built yet"]
]
case mDetailedStats of
Nothing -> return ()
Just statsFile -> do
writeFile statsFile $ printTable (["Package", "Version", "Has docs?"] : formattedStats)
notice verbosity $ "Detailed statistics written to " ++ statsFile
where
-- | We avoid 'info' here because it re-wraps the text
nfo :: String -> IO ()
nfo str = when (verbosity >= verbose) $ putStrLn str
byPackage :: [[DocInfo]]
byPackage = map (sortBy (flip (comparing docInfoPackageVersion)))
$ groupBy (equating docInfoPackageName)
$ sortBy (comparing docInfoPackageName) pkgIdsHaveDocs
mostRecentBuilt, mostRecentFailed, mostRecentNotBuilt :: [[DocInfo]]
mostRecentBuilt = filter ((== HasDocs) . docInfoHasDocs . head) byPackage
mostRecentFailed = filter ((== DocsFailed) . docInfoHasDocs . head) byPackage
mostRecentNotBuilt = filter ((== DocsNotBuilt) . docInfoHasDocs . head) byPackage
categorise :: [DocInfo] -> StatResult
categorise ps
| all (== HasDocs) hd = AllVersionsBuiltOk
| all (/= DocsNotBuilt) hd = AllVersionsAttempted
| all (== DocsNotBuilt) hd = NoneBuilt
| all (/= DocsFailed) hd = SomeBuiltOk
| otherwise = SomeFailed
where
hd = map docInfoHasDocs ps
categorised :: [StatResult]
categorised = map categorise byPackage
count :: StatResult -> String
count c = show (length (filter (c ==) categorised))
formatPkg :: [DocInfo] -> [[String]]
formatPkg = map $ \docInfo -> [
display (docInfoPackageName docInfo)
, display (docInfoPackageVersion docInfo)
, show (docInfoHasDocs docInfo)
]
formattedStats :: [[String]]
formattedStats = concatMap formatPkg byPackage
indent :: [[String]] -> [[String]]
indent = map (" " :)
-- | Formats a 2D table so that everything is nicely aligned
--
-- NOTE: Expects the same number of columns in every row!
printTable :: [[String]] -> String
printTable xss = intercalate "\n"
. map (intercalate " ")
. map padCols
$ xss
where
colWidths :: [[Int]]
colWidths = map (map length) $ xss
maxColWidths :: [Int]
maxColWidths = foldr1 (\xs ys -> map (uncurry max) (zip xs ys)) colWidths
padCols :: [String] -> [String]
padCols cols = map (uncurry padTo) (zip maxColWidths cols)
padTo :: Int -> String -> String
padTo len str = str ++ replicate (len - length str) ' '
data HasDocs = HasDocs | DocsNotBuilt | DocsFailed
deriving (Eq, Show)
data DocInfo = DocInfo {
docInfoPackage :: PackageIdentifier
, docInfoHasDocs :: HasDocs
, docInfoIsCandidate :: Bool
}
docInfoPackageName :: DocInfo -> PackageName
docInfoPackageName = pkgName . docInfoPackage
docInfoPackageVersion :: DocInfo -> Version
docInfoPackageVersion = pkgVersion . docInfoPackage
docInfoBaseURI :: BuildConfig -> DocInfo -> URI
docInfoBaseURI config docInfo =
if not (docInfoIsCandidate docInfo)
then bc_srcURI config <//> "package" </> display (docInfoPackage docInfo)
else bc_srcURI config <//> "package" </> display (docInfoPackage docInfo) </> "candidate"
docInfoDocsURI :: BuildConfig -> DocInfo -> URI
docInfoDocsURI config docInfo = docInfoBaseURI config docInfo <//> "docs"
docInfoTarGzURI :: BuildConfig -> DocInfo -> URI
docInfoTarGzURI config docInfo = docInfoBaseURI config docInfo <//> display (docInfoPackage docInfo) <.> "tar.gz"
docInfoReports :: BuildConfig -> DocInfo -> URI
docInfoReports config docInfo = docInfoBaseURI config docInfo <//> "reports/"
getDocumentationStats :: Verbosity
-> BuildConfig
-> (PackageId -> IO Bool)
-> IO [DocInfo]
getDocumentationStats verbosity config didFail = do
notice verbosity "Downloading documentation index"
httpSession verbosity "hackage-build" version $ do
mPackages <- liftM eitherDecode `liftM` requestGET' packagesUri
mCandidates <- liftM eitherDecode `liftM` requestGET' candidatesUri
case (mPackages, mCandidates) of
-- Download failure
(Nothing, _) -> fail $ "Could not download " ++ show packagesUri
(_, Nothing) -> fail $ "Could not download " ++ show candidatesUri
-- Decoding failure
(Just (Left e), _) -> fail $ "Could not decode " ++ show packagesUri ++ ": " ++ e
(_, Just (Left e)) -> fail $ "Could not decode " ++ show candidatesUri ++ ": " ++ e
-- Success
(Just (Right packages), Just (Right candidates)) -> do
packages' <- liftIO $ mapM checkFailed packages
candidates' <- liftIO $ mapM checkFailed candidates
return $ map (setIsCandidate False) packages'
++ map (setIsCandidate True) candidates'
where
packagesUri = bc_srcURI config <//> "packages" </> "docs.json"
candidatesUri = bc_srcURI config <//> "packages" </> "candidates" </> "docs.json"
checkFailed :: (String, Bool) -> IO (PackageIdentifier, HasDocs)
checkFailed (pkgId, docsBuilt) = do
let pkgId' = fromJust (simpleParse pkgId)
if docsBuilt
then return (pkgId', HasDocs)
else do failed <- didFail pkgId'
if failed then return (pkgId', DocsFailed)
else return (pkgId', DocsNotBuilt)
setIsCandidate :: Bool -> (PackageIdentifier, HasDocs) -> DocInfo
setIsCandidate isCandidate (pId, hasDocs) = DocInfo {
docInfoPackage = pId
, docInfoHasDocs = hasDocs
, docInfoIsCandidate = isCandidate
}
----------------------
-- Building packages
--
buildOnce :: BuildOpts -> [PackageId] -> IO ()
buildOnce opts pkgs = keepGoing $ do
config <- readConfig opts
notice verbosity "Initialising"
(has_failed, mark_as_failed, persist_failed) <- mkPackageFailed opts
flip finally persist_failed $ do
updatePackageIndex
pkgIdsHaveDocs <- getDocumentationStats verbosity config has_failed
infoStats verbosity Nothing pkgIdsHaveDocs
-- First build all of the latest versions of each package
-- Then go back and build all the older versions
-- NOTE: assumes all these lists are non-empty
let latestFirst :: [[DocInfo]] -> [DocInfo]
latestFirst ids = map head ids ++ concatMap tail ids
-- Find those files *not* marked as having documentation in our cache
let toBuild :: [DocInfo]
toBuild = filter shouldBuild
. latestFirst
. map (sortBy (flip (comparing docInfoPackageVersion)))
. groupBy (equating docInfoPackageName)
. sortBy (comparing docInfoPackageName)
$ pkgIdsHaveDocs
notice verbosity $ show (length toBuild) ++ " package(s) to build"
-- Try to build each of them, uploading the documentation and
-- build reports along the way. We mark each package as having
-- documentation in the cache even if the build fails because
-- we don't want to keep continually trying to build a failing
-- package!
startTime <- getCurrentTime
let go :: [DocInfo] -> IO ()
go [] = return ()
go (docInfo : toBuild') = do
(mTgz, mRpt, logfile) <- buildPackage verbosity opts config docInfo
case mTgz of
Nothing -> mark_as_failed (docInfoPackage docInfo)
Just _ -> return ()
case mRpt of
Just _ | bo_dryRun opts -> return ()
Just report -> uploadResults verbosity config docInfo
mTgz report logfile
_ -> return ()
-- We don't check the runtime until we've actually tried
-- to build a doc, so as to ensure we make progress.
outOfTime <- case bo_runTime opts of
Nothing -> return False
Just d -> do
currentTime <- getCurrentTime
return $ (currentTime `diffUTCTime` startTime) > d
if outOfTime then return ()
else go toBuild'
go toBuild
where
shouldBuild :: DocInfo -> Bool
shouldBuild docInfo =
case docInfoHasDocs docInfo of
DocsNotBuilt -> null pkgs || any (isSelectedPackage pkgid) pkgs
_ -> False
where
pkgid = docInfoPackage docInfo
-- do versionless matching if no version was given
isSelectedPackage pkgid pkgid'@(PackageIdentifier _ (Version [] _)) =
packageName pkgid == packageName pkgid'
isSelectedPackage pkgid pkgid' =
pkgid == pkgid'
keepGoing :: IO () -> IO ()
keepGoing act
| bo_keepGoing opts = Control.Exception.catch act showExceptionAsWarning
| otherwise = act
showExceptionAsWarning :: SomeException -> IO ()
showExceptionAsWarning e
-- except for signals telling us to really stop
| Just (ReceivedSignal {}) <- fromException e
= throwIO e
| Just UserInterrupt <- fromException e
= throwIO e
| otherwise
= do warn verbosity (show e)
notice verbosity "Abandoning this build attempt."
verbosity = bo_verbosity opts
updatePackageIndex = do
update_ec <- cabal opts "update" [] Nothing
unless (update_ec == ExitSuccess) $
die "Could not 'cabal update' from specified server"
-- Builds a little memoised function that can tell us whether a
-- particular package failed to build its documentation
mkPackageFailed :: BuildOpts
-> IO (PackageId -> IO Bool, PackageId -> IO (), IO ())
mkPackageFailed opts = do
init_failed <- readFailedCache (bo_stateDir opts)
cache_var <- newIORef init_failed
let mark_as_failed pkg_id = atomicModifyIORef cache_var $ \already_failed ->
(S.insert pkg_id already_failed, ())
has_failed pkg_id = liftM (pkg_id `S.member`) $ readIORef cache_var
persist = readIORef cache_var >>= writeFailedCache (bo_stateDir opts)
return (has_failed, mark_as_failed, persist)
where
readFailedCache :: FilePath -> IO (S.Set PackageId)
readFailedCache cache_dir = do
pkgstrs <- handleDoesNotExist [] $ liftM lines $ readFile (cache_dir </> "failed")
case validatePackageIds pkgstrs of
Left theError -> die theError
Right pkgs -> return (S.fromList pkgs)
writeFailedCache :: FilePath -> S.Set PackageId -> IO ()
writeFailedCache cache_dir pkgs =
writeFile (cache_dir </> "failed") $ unlines $ map display $ S.toList pkgs
-- | Build documentation and return @(Just tgz)@ for the built tgz file
-- on success, or @Nothing@ otherwise.
buildPackage :: Verbosity -> BuildOpts -> BuildConfig
-> DocInfo
-> IO (Maybe FilePath, Maybe FilePath, FilePath)
buildPackage verbosity opts config docInfo = do
let pkgid = docInfoPackage docInfo
notice verbosity ("Building " ++ display pkgid)
handleDoesNotExist () $
removeDirectoryRecursive $ installDirectory opts
createDirectory $ installDirectory opts
-- Create the local package db
let packageDb = installDirectory opts </> "packages.db"
-- TODO: use Distribution.Simple.Program.HcPkg
ph <- runProcess "ghc-pkg"
["init", packageDb]
Nothing Nothing Nothing Nothing Nothing
init_ec <- waitForProcess ph
unless (init_ec == ExitSuccess) $
die $ "Could not initialise the package db " ++ packageDb
-- The documentation is installed within the stateDir because we
-- set a prefix while installing
let doc_root = installDirectory opts </> "haddocks"
doc_dir_tmpl = doc_root </> "$pkgid-docs"
doc_dir_pkg = doc_root </> display pkgid ++ "-docs"
-- doc_dir_html = doc_dir </> "html"
-- deps_doc_dir = doc_dir </> "deps"
-- temp_doc_dir = doc_dir </> display (docInfoPackage docInfo) ++ "-docs"
pkg_url = "/package" </> "$pkg-$version"
pkg_flags =
["--enable-documentation",
"--htmldir=" ++ doc_dir_tmpl,
-- We only care about docs, so we want to build as
-- quickly as possible, and hence turn
-- optimisation off. Also explicitly pass -O0 as a
-- GHC option, in case it overrides a .cabal
-- setting or anything
"--disable-optimization", "--ghc-option", "-O0",
"--disable-library-for-ghci",
-- We don't want packages installed in the user
-- package.conf to affect things. In particular,
-- we don't want doc building to fail because
-- "packages are likely to be broken by the reinstalls"
"--package-db=clear", "--package-db=global",
"--package-db=" ++ packageDb,
-- Always build the package, even when it's been built
-- before. This lets us regenerate documentation when
-- dependencies are updated.
"--reinstall",
-- We know where this documentation will
-- eventually be hosted, bake that in.
-- The wiki claims we shouldn't include the
-- version in the hyperlinks so we don't have
-- to rehaddock some package when the dependent
-- packages get updated. However, this is NOT
-- what the Hackage v1 did, so ignore that:
"--haddock-html-location=" ++ pkg_url </> "docs",
-- Link "Contents" to the package page:
"--haddock-contents-location=" ++ pkg_url,
-- Link to colourised source code:
"--haddock-hyperlink-source",
"--prefix=" ++ installDirectory opts,
"--build-summary=" ++ installDirectory opts </> "reports" </> "$pkgid.report",
"--report-planning-failure",
-- We want both html documentation and hoogle database generated
"--haddock-html",
"--haddock-hoogle",
-- For candidates we need to use the full URL, because
-- otherwise cabal-install will not find the package.
-- For regular packages however we need to use just the
-- package name, otherwise cabal-install will not
-- generate a report
if docInfoIsCandidate docInfo
then show (docInfoTarGzURI config docInfo)
else display pkgid
]
-- The installDirectory is purely temporary, while the resultsDirectory is
-- more persistent. We will grab various outputs from the tmp dir and stash
-- them for safe keeping (for later upload or manual inspection) in the
-- results dir.
let resultDir = resultsDirectory opts
resultLogFile = resultDir </> display pkgid <.> "log"
resultReportFile = resultDir </> display pkgid <.> "report"
resultDocsTarball = resultDir </> (display pkgid ++ "-docs") <.> "tar.gz"
buildLogHnd <- openFile resultLogFile WriteMode
-- We ignore the result of calling @cabal install@ because
-- @cabal install@ succeeds even if the documentation fails to build.
void $ cabal opts "install" pkg_flags (Just buildLogHnd)
-- Grab the report for the package we want. Stash it for safe keeping.
report <- handleDoesNotExist Nothing $ do
renameFile (installDirectory opts </> "reports"
</> display pkgid <.> "report")
resultReportFile
appendFile resultReportFile "\ndoc-builder: True"
return (Just resultReportFile)
docs_generated <- fmap and $ sequence [
doesDirectoryExist doc_dir_pkg,
doesFileExist (doc_dir_pkg </> "doc-index.html"),
doesFileExist (doc_dir_pkg </> display (docInfoPackageName docInfo) <.> "haddock")]
docs <- if docs_generated
then do
when (bo_prune opts) (pruneHaddockFiles doc_dir_pkg)
BS.writeFile resultDocsTarball =<< tarGzDirectory doc_dir_pkg
return (Just resultDocsTarball)
else return Nothing
notice verbosity $ unlines
[ "Build results for " ++ display pkgid ++ ":"
, fromMaybe "no report" report
, fromMaybe "no docs" docs
, resultLogFile
]
return (docs, report, resultLogFile)
cabal :: BuildOpts -> String -> [String] -> Maybe Handle -> IO ExitCode
cabal opts cmd args moutput = do
let verbosity = bo_verbosity opts
cabalConfigFile = bo_stateDir opts </> "cabal-config"
verbosityArgs = if verbosity == silent
then ["-v0"]
else []
all_args = ("--config-file=" ++ cabalConfigFile)
: cmd
: verbosityArgs
++ args
info verbosity $ unwords ("cabal":all_args)
ph <- runProcess "cabal" all_args Nothing
Nothing Nothing moutput moutput
waitForProcess ph
pruneHaddockFiles :: FilePath -> IO ()
pruneHaddockFiles dir = do
-- Hackage doesn't support the haddock frames view, so remove it
-- both visually (no frames link) and save space too.
files <- getDirectoryContents dir
sequence_
[ removeFile (dir </> file)
| file <- files
, unwantedFile file ]
hackJsUtils
where
unwantedFile file
| "frames.html" == file = True
| "mini_" `isPrefixOf` file = True
-- The .haddock file is haddock-version specific
-- so it is not useful to make available for download
| ".haddock" <- takeExtension file = True
| otherwise = False
-- The "Frames" link is added by the JS, just comment it out.
hackJsUtils = do
content <- readFile (dir </> "haddock-util.js")
_ <- evaluate (length content)
writeFile (dir </> "haddock-util.js") (munge content)
where
munge = unlines
. map removeAddMenuItem
. lines
removeAddMenuItem l | (sp, l') <- span (==' ') l
, "addMenuItem" `isPrefixOf` l'
= sp ++ "//" ++ l'
removeAddMenuItem l = l
tarGzDirectory :: FilePath -> IO BS.ByteString
tarGzDirectory dir = do
res <- liftM (GZip.compress . Tar.write) $
Tar.pack containing_dir [nested_dir]
-- This seq is extremely important! Tar.pack is lazy, scanning
-- directories as entries are demanded.
-- This interacts very badly with the renameDirectory stuff with
-- which tarGzDirectory gets wrapped.
BS.length res `seq` return res
where (containing_dir, nested_dir) = splitFileName dir
uploadResults :: Verbosity -> BuildConfig -> DocInfo
-> Maybe FilePath -> FilePath -> FilePath -> IO ()
uploadResults verbosity config docInfo
mdocsTarballFile buildReportFile buildLogFile =
httpSession verbosity "hackage-build" version $ do
-- Make sure we authenticate to Hackage
setAuthorityGen (provideAuthInfo (bc_srcURI config)
(Just (bc_username config, bc_password config)))
case mdocsTarballFile of
Nothing -> return ()
Just docsTarballFile ->
putDocsTarball config docInfo docsTarballFile
buildId <- postBuildReport config docInfo buildReportFile
putBuildLog buildId buildLogFile
putDocsTarball :: BuildConfig -> DocInfo -> FilePath -> HttpSession ()
putDocsTarball config docInfo docsTarballFile =
requestPUTFile (docInfoDocsURI config docInfo)
"application/x-tar" (Just "gzip") docsTarballFile
type BuildReportId = URI
postBuildReport :: BuildConfig -> DocInfo -> FilePath -> HttpSession BuildReportId
postBuildReport config docInfo reportFile = do
let uri = docInfoReports config docInfo
body <- liftIO $ BS.readFile reportFile
setAllowRedirects False
(_, response) <- request Request {
rqURI = uri,
rqMethod = POST,
rqHeaders = [Header HdrContentType ("text/plain"),
Header HdrContentLength (show (BS.length body)),
Header HdrAccept ("text/plain")],
rqBody = body
}
case rspCode response of
--TODO: fix server to not do give 303, 201 is more appropriate
(3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location
return $ relativeTo rel uri
| Header HdrLocation location <- rspHeaders response ]
-> return buildId
_ -> do checkStatus uri response
fail "Unexpected response from server."
putBuildLog :: BuildReportId -> FilePath -> HttpSession ()
putBuildLog reportId buildLogFile = do
body <- liftIO $ BS.readFile buildLogFile
let uri = reportId <//> "log"
setAllowRedirects False
(_, response) <- request Request {
rqURI = uri,
rqMethod = PUT,
rqHeaders = [Header HdrContentType ("text/plain"),
Header HdrContentLength (show (BS.length body)),
Header HdrAccept ("text/plain")],
rqBody = body
}
case rspCode response of
--TODO: fix server to not to give 303, 201 is more appropriate
(3,0,3) -> return ()
_ -> checkStatus uri response
-------------------------
-- Command line handling
-------------------------
data BuildFlags = BuildFlags {
flagCacheDir :: Maybe FilePath,
flagVerbosity :: Verbosity,
flagRunTime :: Maybe NominalDiffTime,
flagHelp :: Bool,
flagForce :: Bool,
flagContinuous :: Bool,
flagKeepGoing :: Bool,
flagDryRun :: Bool,
flagInterval :: Maybe String,
flagPrune :: Bool,
flagUsername :: Maybe String,
flagPassword :: Maybe String
}
emptyBuildFlags :: BuildFlags
emptyBuildFlags = BuildFlags {
flagCacheDir = Nothing
, flagVerbosity = normal
, flagRunTime = Nothing
, flagHelp = False
, flagForce = False
, flagContinuous = False
, flagKeepGoing = False
, flagDryRun = False
, flagInterval = Nothing
, flagPrune = False
, flagUsername = Nothing
, flagPassword = Nothing
}
buildFlagDescrs :: [OptDescr (BuildFlags -> BuildFlags)]
buildFlagDescrs =
[ Option ['h'] ["help"]
(NoArg (\opts -> opts { flagHelp = True }))
"Show this help text"
, Option ['s'] []
(NoArg (\opts -> opts { flagVerbosity = silent }))
"Silent mode"
, Option ['v'] []
(NoArg (\opts -> opts { flagVerbosity = moreVerbose (flagVerbosity opts) }))
"Verbose mode (can be listed multiple times e.g. -vv)"
, Option [] ["run-time"]
(ReqArg (\mins opts -> case reads mins of
[(mins', "")] -> opts { flagRunTime = Just (fromInteger mins' * 60) }
_ -> error "Can't parse minutes") "MINS")
"Limit the running time of the build client"
, Option [] ["cache-dir"]
(ReqArg (\dir opts -> opts { flagCacheDir = Just dir }) "DIR")
"Where to put files during building"
, Option [] ["continuous"]
(NoArg (\opts -> opts { flagContinuous = True }))
"Build continuously rather than just once"
, Option [] ["keep-going"]
(NoArg (\opts -> opts { flagKeepGoing = True }))
"Keep going after errors"
, Option [] ["dry-run"]
(NoArg (\opts -> opts { flagDryRun = True }))
"Don't record results or upload"
, Option [] ["interval"]
(ReqArg (\int opts -> opts { flagInterval = Just int }) "MIN")
"Set the building interval in minutes (default 30)"
, Option [] ["prune-haddock-files"]
(NoArg (\opts -> opts { flagPrune = True }))
"Remove unnecessary haddock files (frames, .haddock file)"
, Option [] ["init-username"]
(ReqArg (\uname opts -> opts { flagUsername = Just uname }) "USERNAME")
"The Hackage user to run the build as (used with init)"
, Option [] ["init-password"]
(ReqArg (\passwd opts -> opts { flagPassword = Just passwd }) "PASSWORD")
"The password of the Hackage user to run the build as (used with init)"
]
validateOpts :: [String] -> IO (Mode, BuildOpts)
validateOpts args = do
let (flags0, args', errs) = getOpt Permute buildFlagDescrs args
flags = accum flags0 emptyBuildFlags
stateDir = fromMaybe "build-cache" (flagCacheDir flags)
opts = BuildOpts {
bo_verbosity = flagVerbosity flags,
bo_runTime = flagRunTime flags,
bo_stateDir = stateDir,
bo_continuous = case (flagContinuous flags, flagInterval flags) of
(True, Just i) -> Just (read i)
(True, Nothing) -> Just 30 -- default interval
(False, _) -> Nothing,
bo_keepGoing = flagKeepGoing flags,
bo_dryRun = flagDryRun flags,
bo_prune = flagPrune flags,
bo_username = flagUsername flags,
bo_password = flagPassword flags
}
mode = case args' of
_ | flagHelp flags -> Help []
| not (null errs) -> Help errs
"init" : uriStr : auxUriStrs ->
-- We don't actually want the URI at this point
-- (see [Note: Show/Read URI])
case mapM validateHackageURI (uriStr : auxUriStrs) of
Left theError -> Help [theError]
Right (uri:auxUris) -> Init uri auxUris
Right _ -> error "impossible"
["stats"] ->
Stats
"stats" : _ ->
Help ["stats takes no arguments"]
"build" : pkgstrs ->
case validatePackageIds pkgstrs of
Left theError -> Help [theError]
Right pkgs -> Build pkgs
cmd : _ -> Help ["Unrecognised command: " ++ show cmd]
[] -> Help []
-- Ensure we store the absolute state_dir, because we might
-- change the CWD later and we don't want the stateDir to be
-- invalidated by such a change
--
-- We have to ensure the directory exists before we do
-- canonicalizePath, or otherwise we get an exception if it
-- does not yet exist
return (mode, opts)
where
accum flags = foldr (flip (.)) id flags
| agrafix/hackage-server | BuildClient.hs | bsd-3-clause | 35,442 | 0 | 25 | 11,191 | 7,902 | 4,077 | 3,825 | 640 | 12 |
-----------------------------------------------------------------------------
--
-- Object-file symbols (called CLabel for histerical raisins).
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module CLabel (
CLabel, -- abstract type
ForeignLabelSource(..),
pprDebugCLabel,
mkClosureLabel,
mkSRTLabel,
mkInfoTableLabel,
mkEntryLabel,
mkSlowEntryLabel,
mkConEntryLabel,
mkStaticConEntryLabel,
mkRednCountsLabel,
mkConInfoTableLabel,
mkStaticInfoTableLabel,
mkLargeSRTLabel,
mkApEntryLabel,
mkApInfoTableLabel,
mkClosureTableLabel,
mkLocalClosureLabel,
mkLocalInfoTableLabel,
mkLocalEntryLabel,
mkLocalConEntryLabel,
mkLocalStaticConEntryLabel,
mkLocalConInfoTableLabel,
mkLocalStaticInfoTableLabel,
mkLocalClosureTableLabel,
mkReturnPtLabel,
mkReturnInfoLabel,
mkAltLabel,
mkDefaultLabel,
mkBitmapLabel,
mkStringLitLabel,
mkAsmTempLabel,
mkPlainModuleInitLabel,
mkSplitMarkerLabel,
mkDirty_MUT_VAR_Label,
mkUpdInfoLabel,
mkBHUpdInfoLabel,
mkIndStaticInfoLabel,
mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel,
mkMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel,
mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel,
mkCAFBlackHoleEntryLabel,
mkRtsPrimOpLabel,
mkRtsSlowTickyCtrLabel,
mkSelectorInfoLabel,
mkSelectorEntryLabel,
mkCmmInfoLabel,
mkCmmEntryLabel,
mkCmmRetInfoLabel,
mkCmmRetLabel,
mkCmmCodeLabel,
mkCmmDataLabel,
mkCmmGcPtrLabel,
mkRtsApFastLabel,
mkPrimCallLabel,
mkForeignLabel,
addLabelSize,
foreignLabelStdcallInfo,
mkCCLabel, mkCCSLabel,
DynamicLinkerLabelInfo(..),
mkDynamicLinkerLabel,
dynamicLinkerLabelInfo,
mkPicBaseLabel,
mkDeadStripPreventer,
mkHpcTicksLabel,
hasCAF,
needsCDecl, isAsmTemp, maybeAsmTemp, externallyVisibleCLabel,
isMathFun,
isCFunctionLabel, isGcPtrLabel, labelDynamic,
-- * Conversions
toClosureLbl, toSlowEntryLbl, toEntryLbl, toInfoLbl, toRednCountsLbl,
pprCLabel
) where
import IdInfo
import StaticFlags
import BasicTypes
import Packages
import DataCon
import Module
import Name
import Unique
import PrimOp
import Config
import CostCentre
import Outputable
import FastString
import DynFlags
import Platform
import UniqSet
-- -----------------------------------------------------------------------------
-- The CLabel type
{-
| CLabel is an abstract type that supports the following operations:
- Pretty printing
- In a C file, does it need to be declared before use? (i.e. is it
guaranteed to be already in scope in the places we need to refer to it?)
- If it needs to be declared, what type (code or data) should it be
declared to have?
- Is it visible outside this object file or not?
- Is it "dynamic" (see details below)
- Eq and Ord, so that we can make sets of CLabels (currently only
used in outputting C as far as I can tell, to avoid generating
more than one declaration for any given label).
- Converting an info table label into an entry label.
-}
data CLabel
= -- | A label related to the definition of a particular Id or Con in a .hs file.
IdLabel
Name
CafInfo
IdLabelInfo -- encodes the suffix of the label
-- | A label from a .cmm file that is not associated with a .hs level Id.
| CmmLabel
PackageId -- what package the label belongs to.
FastString -- identifier giving the prefix of the label
CmmLabelInfo -- encodes the suffix of the label
-- | A label with a baked-in \/ algorithmically generated name that definitely
-- comes from the RTS. The code for it must compile into libHSrts.a \/ libHSrts.so
-- If it doesn't have an algorithmically generated name then use a CmmLabel
-- instead and give it an appropriate PackageId argument.
| RtsLabel
RtsLabelInfo
-- | A 'C' (or otherwise foreign) label.
--
| ForeignLabel
FastString -- name of the imported label.
(Maybe Int) -- possible '@n' suffix for stdcall functions
-- When generating C, the '@n' suffix is omitted, but when
-- generating assembler we must add it to the label.
ForeignLabelSource -- what package the foreign label is in.
FunctionOrData
-- | A family of labels related to a particular case expression.
| CaseLabel
{-# UNPACK #-} !Unique -- Unique says which case expression
CaseLabelInfo
| AsmTempLabel
{-# UNPACK #-} !Unique
| StringLitLabel
{-# UNPACK #-} !Unique
| PlainModuleInitLabel -- without the version & way info
Module
| CC_Label CostCentre
| CCS_Label CostCentreStack
-- | These labels are generated and used inside the NCG only.
-- They are special variants of a label used for dynamic linking
-- see module PositionIndependentCode for details.
| DynamicLinkerLabel DynamicLinkerLabelInfo CLabel
-- | This label is generated and used inside the NCG only.
-- It is used as a base for PIC calculations on some platforms.
-- It takes the form of a local numeric assembler label '1'; and
-- is pretty-printed as 1b, referring to the previous definition
-- of 1: in the assembler source file.
| PicBaseLabel
-- | A label before an info table to prevent excessive dead-stripping on darwin
| DeadStripPreventer CLabel
-- | Per-module table of tick locations
| HpcTicksLabel Module
-- | Label of an StgLargeSRT
| LargeSRTLabel
{-# UNPACK #-} !Unique
-- | A bitmap (function or case return)
| LargeBitmapLabel
{-# UNPACK #-} !Unique
deriving (Eq, Ord)
-- | Record where a foreign label is stored.
data ForeignLabelSource
-- | Label is in a named package
= ForeignLabelInPackage PackageId
-- | Label is in some external, system package that doesn't also
-- contain compiled Haskell code, and is not associated with any .hi files.
-- We don't have to worry about Haskell code being inlined from
-- external packages. It is safe to treat the RTS package as "external".
| ForeignLabelInExternalPackage
-- | Label is in the package currenly being compiled.
-- This is only used for creating hacky tmp labels during code generation.
-- Don't use it in any code that might be inlined across a package boundary
-- (ie, core code) else the information will be wrong relative to the
-- destination module.
| ForeignLabelInThisPackage
deriving (Eq, Ord)
-- | For debugging problems with the CLabel representation.
-- We can't make a Show instance for CLabel because lots of its components don't have instances.
-- The regular Outputable instance only shows the label name, and not its other info.
--
pprDebugCLabel :: Platform -> CLabel -> SDoc
pprDebugCLabel platform lbl
= case lbl of
IdLabel{} -> pprPlatform platform lbl <> (parens $ text "IdLabel")
CmmLabel pkg _name _info
-> pprPlatform platform lbl <> (parens $ text "CmmLabel" <+> ppr pkg)
RtsLabel{} -> pprPlatform platform lbl <> (parens $ text "RtsLabel")
ForeignLabel _name mSuffix src funOrData
-> pprPlatform platform lbl <> (parens
$ text "ForeignLabel"
<+> ppr mSuffix
<+> ppr src
<+> ppr funOrData)
_ -> pprPlatform platform lbl <> (parens $ text "other CLabel)")
data IdLabelInfo
= Closure -- ^ Label for closure
| SRT -- ^ Static reference table
| InfoTable -- ^ Info tables for closures; always read-only
| Entry -- ^ Entry point
| Slow -- ^ Slow entry point
| LocalInfoTable -- ^ Like InfoTable but not externally visible
| LocalEntry -- ^ Like Entry but not externally visible
| RednCounts -- ^ Label of place to keep Ticky-ticky info for this Id
| ConEntry -- ^ Constructor entry point
| ConInfoTable -- ^ Corresponding info table
| StaticConEntry -- ^ Static constructor entry point
| StaticInfoTable -- ^ Corresponding info table
| ClosureTable -- ^ Table of closures for Enum tycons
deriving (Eq, Ord)
data CaseLabelInfo
= CaseReturnPt
| CaseReturnInfo
| CaseAlt ConTag
| CaseDefault
deriving (Eq, Ord)
data RtsLabelInfo
= RtsSelectorInfoTable Bool{-updatable-} Int{-offset-} -- ^ Selector thunks
| RtsSelectorEntry Bool{-updatable-} Int{-offset-}
| RtsApInfoTable Bool{-updatable-} Int{-arity-} -- ^ AP thunks
| RtsApEntry Bool{-updatable-} Int{-arity-}
| RtsPrimOp PrimOp
| RtsApFast FastString -- ^ _fast versions of generic apply
| RtsSlowTickyCtr String
deriving (Eq, Ord)
-- NOTE: Eq on LitString compares the pointer only, so this isn't
-- a real equality.
-- | What type of Cmm label we're dealing with.
-- Determines the suffix appended to the name when a CLabel.CmmLabel
-- is pretty printed.
data CmmLabelInfo
= CmmInfo -- ^ misc rts info tabless, suffix _info
| CmmEntry -- ^ misc rts entry points, suffix _entry
| CmmRetInfo -- ^ misc rts ret info tables, suffix _info
| CmmRet -- ^ misc rts return points, suffix _ret
| CmmData -- ^ misc rts data bits, eg CHARLIKE_closure
| CmmCode -- ^ misc rts code
| CmmGcPtr -- ^ GcPtrs eg CHARLIKE_closure
| CmmPrimCall -- ^ a prim call to some hand written Cmm code
deriving (Eq, Ord)
data DynamicLinkerLabelInfo
= CodeStub -- MachO: Lfoo$stub, ELF: foo@plt
| SymbolPtr -- MachO: Lfoo$non_lazy_ptr, Windows: __imp_foo
| GotSymbolPtr -- ELF: foo@got
| GotSymbolOffset -- ELF: foo@gotoff
deriving (Eq, Ord)
-- -----------------------------------------------------------------------------
-- Constructing CLabels
-- -----------------------------------------------------------------------------
-- Constructing IdLabels
-- These are always local:
mkSlowEntryLabel :: Name -> CafInfo -> CLabel
mkSlowEntryLabel name c = IdLabel name c Slow
mkSRTLabel :: Name -> CafInfo -> CLabel
mkRednCountsLabel :: Name -> CafInfo -> CLabel
mkSRTLabel name c = IdLabel name c SRT
mkRednCountsLabel name c = IdLabel name c RednCounts
-- These have local & (possibly) external variants:
mkLocalClosureLabel :: Name -> CafInfo -> CLabel
mkLocalInfoTableLabel :: Name -> CafInfo -> CLabel
mkLocalEntryLabel :: Name -> CafInfo -> CLabel
mkLocalClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalClosureLabel name c = IdLabel name c Closure
mkLocalInfoTableLabel name c = IdLabel name c LocalInfoTable
mkLocalEntryLabel name c = IdLabel name c LocalEntry
mkLocalClosureTableLabel name c = IdLabel name c ClosureTable
mkClosureLabel :: Name -> CafInfo -> CLabel
mkInfoTableLabel :: Name -> CafInfo -> CLabel
mkEntryLabel :: Name -> CafInfo -> CLabel
mkClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalConInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalConEntryLabel :: CafInfo -> Name -> CLabel
mkLocalStaticInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalStaticConEntryLabel :: CafInfo -> Name -> CLabel
mkConInfoTableLabel :: Name -> CafInfo -> CLabel
mkStaticInfoTableLabel :: Name -> CafInfo -> CLabel
mkClosureLabel name c = IdLabel name c Closure
mkInfoTableLabel name c = IdLabel name c InfoTable
mkEntryLabel name c = IdLabel name c Entry
mkClosureTableLabel name c = IdLabel name c ClosureTable
mkLocalConInfoTableLabel c con = IdLabel con c ConInfoTable
mkLocalConEntryLabel c con = IdLabel con c ConEntry
mkLocalStaticInfoTableLabel c con = IdLabel con c StaticInfoTable
mkLocalStaticConEntryLabel c con = IdLabel con c StaticConEntry
mkConInfoTableLabel name c = IdLabel name c ConInfoTable
mkStaticInfoTableLabel name c = IdLabel name c StaticInfoTable
mkConEntryLabel :: Name -> CafInfo -> CLabel
mkStaticConEntryLabel :: Name -> CafInfo -> CLabel
mkConEntryLabel name c = IdLabel name c ConEntry
mkStaticConEntryLabel name c = IdLabel name c StaticConEntry
-- Constructing Cmm Labels
mkSplitMarkerLabel, mkDirty_MUT_VAR_Label, mkUpdInfoLabel,
mkBHUpdInfoLabel, mkIndStaticInfoLabel, mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel, mkMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel, mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel, mkCAFBlackHoleEntryLabel :: CLabel
mkSplitMarkerLabel = CmmLabel rtsPackageId (fsLit "__stg_split_marker") CmmCode
mkDirty_MUT_VAR_Label = CmmLabel rtsPackageId (fsLit "dirty_MUT_VAR") CmmCode
mkUpdInfoLabel = CmmLabel rtsPackageId (fsLit "stg_upd_frame") CmmInfo
mkBHUpdInfoLabel = CmmLabel rtsPackageId (fsLit "stg_bh_upd_frame" ) CmmInfo
mkIndStaticInfoLabel = CmmLabel rtsPackageId (fsLit "stg_IND_STATIC") CmmInfo
mkMainCapabilityLabel = CmmLabel rtsPackageId (fsLit "MainCapability") CmmData
mkMAP_FROZEN_infoLabel = CmmLabel rtsPackageId (fsLit "stg_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkMAP_DIRTY_infoLabel = CmmLabel rtsPackageId (fsLit "stg_MUT_ARR_PTRS_DIRTY") CmmInfo
mkEMPTY_MVAR_infoLabel = CmmLabel rtsPackageId (fsLit "stg_EMPTY_MVAR") CmmInfo
mkTopTickyCtrLabel = CmmLabel rtsPackageId (fsLit "top_ct") CmmData
mkCAFBlackHoleInfoTableLabel = CmmLabel rtsPackageId (fsLit "stg_CAF_BLACKHOLE") CmmInfo
mkCAFBlackHoleEntryLabel = CmmLabel rtsPackageId (fsLit "stg_CAF_BLACKHOLE") CmmEntry
-----
mkCmmInfoLabel, mkCmmEntryLabel, mkCmmRetInfoLabel, mkCmmRetLabel,
mkCmmCodeLabel, mkCmmDataLabel, mkCmmGcPtrLabel
:: PackageId -> FastString -> CLabel
mkCmmInfoLabel pkg str = CmmLabel pkg str CmmInfo
mkCmmEntryLabel pkg str = CmmLabel pkg str CmmEntry
mkCmmRetInfoLabel pkg str = CmmLabel pkg str CmmRetInfo
mkCmmRetLabel pkg str = CmmLabel pkg str CmmRet
mkCmmCodeLabel pkg str = CmmLabel pkg str CmmCode
mkCmmDataLabel pkg str = CmmLabel pkg str CmmData
mkCmmGcPtrLabel pkg str = CmmLabel pkg str CmmGcPtr
-- Constructing RtsLabels
mkRtsPrimOpLabel :: PrimOp -> CLabel
mkRtsPrimOpLabel primop = RtsLabel (RtsPrimOp primop)
mkSelectorInfoLabel :: Bool -> Int -> CLabel
mkSelectorEntryLabel :: Bool -> Int -> CLabel
mkSelectorInfoLabel upd off = RtsLabel (RtsSelectorInfoTable upd off)
mkSelectorEntryLabel upd off = RtsLabel (RtsSelectorEntry upd off)
mkApInfoTableLabel :: Bool -> Int -> CLabel
mkApEntryLabel :: Bool -> Int -> CLabel
mkApInfoTableLabel upd off = RtsLabel (RtsApInfoTable upd off)
mkApEntryLabel upd off = RtsLabel (RtsApEntry upd off)
-- A call to some primitive hand written Cmm code
mkPrimCallLabel :: PrimCall -> CLabel
mkPrimCallLabel (PrimCall str pkg)
= CmmLabel pkg str CmmPrimCall
-- Constructing ForeignLabels
-- | Make a foreign label
mkForeignLabel
:: FastString -- name
-> Maybe Int -- size prefix
-> ForeignLabelSource -- what package it's in
-> FunctionOrData
-> CLabel
mkForeignLabel str mb_sz src fod
= ForeignLabel str mb_sz src fod
-- | Update the label size field in a ForeignLabel
addLabelSize :: CLabel -> Int -> CLabel
addLabelSize (ForeignLabel str _ src fod) sz
= ForeignLabel str (Just sz) src fod
addLabelSize label _
= label
-- | Get the label size field from a ForeignLabel
foreignLabelStdcallInfo :: CLabel -> Maybe Int
foreignLabelStdcallInfo (ForeignLabel _ info _ _) = info
foreignLabelStdcallInfo _lbl = Nothing
-- Constructing Large*Labels
mkLargeSRTLabel :: Unique -> CLabel
mkBitmapLabel :: Unique -> CLabel
mkLargeSRTLabel uniq = LargeSRTLabel uniq
mkBitmapLabel uniq = LargeBitmapLabel uniq
-- Constructin CaseLabels
mkReturnPtLabel :: Unique -> CLabel
mkReturnInfoLabel :: Unique -> CLabel
mkAltLabel :: Unique -> ConTag -> CLabel
mkDefaultLabel :: Unique -> CLabel
mkReturnPtLabel uniq = CaseLabel uniq CaseReturnPt
mkReturnInfoLabel uniq = CaseLabel uniq CaseReturnInfo
mkAltLabel uniq tag = CaseLabel uniq (CaseAlt tag)
mkDefaultLabel uniq = CaseLabel uniq CaseDefault
-- Constructing Cost Center Labels
mkCCLabel :: CostCentre -> CLabel
mkCCSLabel :: CostCentreStack -> CLabel
mkCCLabel cc = CC_Label cc
mkCCSLabel ccs = CCS_Label ccs
mkRtsApFastLabel :: FastString -> CLabel
mkRtsApFastLabel str = RtsLabel (RtsApFast str)
mkRtsSlowTickyCtrLabel :: String -> CLabel
mkRtsSlowTickyCtrLabel pat = RtsLabel (RtsSlowTickyCtr pat)
-- Constructing Code Coverage Labels
mkHpcTicksLabel :: Module -> CLabel
mkHpcTicksLabel = HpcTicksLabel
-- Constructing labels used for dynamic linking
mkDynamicLinkerLabel :: DynamicLinkerLabelInfo -> CLabel -> CLabel
mkDynamicLinkerLabel = DynamicLinkerLabel
dynamicLinkerLabelInfo :: CLabel -> Maybe (DynamicLinkerLabelInfo, CLabel)
dynamicLinkerLabelInfo (DynamicLinkerLabel info lbl) = Just (info, lbl)
dynamicLinkerLabelInfo _ = Nothing
mkPicBaseLabel :: CLabel
mkPicBaseLabel = PicBaseLabel
-- Constructing miscellaneous other labels
mkDeadStripPreventer :: CLabel -> CLabel
mkDeadStripPreventer lbl = DeadStripPreventer lbl
mkStringLitLabel :: Unique -> CLabel
mkStringLitLabel = StringLitLabel
mkAsmTempLabel :: Uniquable a => a -> CLabel
mkAsmTempLabel a = AsmTempLabel (getUnique a)
mkPlainModuleInitLabel :: Module -> CLabel
mkPlainModuleInitLabel mod = PlainModuleInitLabel mod
-- -----------------------------------------------------------------------------
-- Convert between different kinds of label
toClosureLbl :: Platform -> CLabel -> CLabel
toClosureLbl _ (IdLabel n c _) = IdLabel n c Closure
toClosureLbl platform l = pprPanic "toClosureLbl" (pprCLabel platform l)
toSlowEntryLbl :: Platform -> CLabel -> CLabel
toSlowEntryLbl _ (IdLabel n c _) = IdLabel n c Slow
toSlowEntryLbl platform l = pprPanic "toSlowEntryLbl" (pprCLabel platform l)
toRednCountsLbl :: Platform -> CLabel -> CLabel
toRednCountsLbl _ (IdLabel n c _) = IdLabel n c RednCounts
toRednCountsLbl platform l = pprPanic "toRednCountsLbl" (pprCLabel platform l)
toEntryLbl :: Platform -> CLabel -> CLabel
toEntryLbl _ (IdLabel n c LocalInfoTable) = IdLabel n c LocalEntry
toEntryLbl _ (IdLabel n c ConInfoTable) = IdLabel n c ConEntry
toEntryLbl _ (IdLabel n c StaticInfoTable) = IdLabel n c StaticConEntry
toEntryLbl _ (IdLabel n c _) = IdLabel n c Entry
toEntryLbl _ (CaseLabel n CaseReturnInfo) = CaseLabel n CaseReturnPt
toEntryLbl _ (CmmLabel m str CmmInfo) = CmmLabel m str CmmEntry
toEntryLbl _ (CmmLabel m str CmmRetInfo) = CmmLabel m str CmmRet
toEntryLbl platform l = pprPanic "toEntryLbl" (pprCLabel platform l)
toInfoLbl :: Platform -> CLabel -> CLabel
toInfoLbl _ (IdLabel n c Entry) = IdLabel n c InfoTable
toInfoLbl _ (IdLabel n c LocalEntry) = IdLabel n c LocalInfoTable
toInfoLbl _ (IdLabel n c ConEntry) = IdLabel n c ConInfoTable
toInfoLbl _ (IdLabel n c StaticConEntry) = IdLabel n c StaticInfoTable
toInfoLbl _ (IdLabel n c _) = IdLabel n c InfoTable
toInfoLbl _ (CaseLabel n CaseReturnPt) = CaseLabel n CaseReturnInfo
toInfoLbl _ (CmmLabel m str CmmEntry) = CmmLabel m str CmmInfo
toInfoLbl _ (CmmLabel m str CmmRet) = CmmLabel m str CmmRetInfo
toInfoLbl platform l = pprPanic "CLabel.toInfoLbl" (pprCLabel platform l)
-- -----------------------------------------------------------------------------
-- Does a CLabel refer to a CAF?
hasCAF :: CLabel -> Bool
hasCAF (IdLabel _ MayHaveCafRefs _) = True
hasCAF _ = False
-- -----------------------------------------------------------------------------
-- Does a CLabel need declaring before use or not?
--
-- See wiki:Commentary/Compiler/Backends/PprC#Prototypes
needsCDecl :: CLabel -> Bool
-- False <=> it's pre-declared; don't bother
-- don't bother declaring SRT & Bitmap labels, we always make sure
-- they are defined before use.
needsCDecl (IdLabel _ _ SRT) = False
needsCDecl (LargeSRTLabel _) = False
needsCDecl (LargeBitmapLabel _) = False
needsCDecl (IdLabel _ _ _) = True
needsCDecl (CaseLabel _ _) = True
needsCDecl (PlainModuleInitLabel _) = True
needsCDecl (StringLitLabel _) = False
needsCDecl (AsmTempLabel _) = False
needsCDecl (RtsLabel _) = False
needsCDecl (CmmLabel pkgId _ _)
-- Prototypes for labels defined in the runtime system are imported
-- into HC files via includes/Stg.h.
| pkgId == rtsPackageId = False
-- For other labels we inline one into the HC file directly.
| otherwise = True
needsCDecl l@(ForeignLabel{}) = not (isMathFun l)
needsCDecl (CC_Label _) = True
needsCDecl (CCS_Label _) = True
needsCDecl (HpcTicksLabel _) = True
needsCDecl (DynamicLinkerLabel {}) = panic "needsCDecl DynamicLinkerLabel"
needsCDecl PicBaseLabel = panic "needsCDecl PicBaseLabel"
needsCDecl (DeadStripPreventer {}) = panic "needsCDecl DeadStripPreventer"
-- | Check whether a label is a local temporary for native code generation
isAsmTemp :: CLabel -> Bool
isAsmTemp (AsmTempLabel _) = True
isAsmTemp _ = False
-- | If a label is a local temporary used for native code generation
-- then return just its unique, otherwise nothing.
maybeAsmTemp :: CLabel -> Maybe Unique
maybeAsmTemp (AsmTempLabel uq) = Just uq
maybeAsmTemp _ = Nothing
-- | Check whether a label corresponds to a C function that has
-- a prototype in a system header somehere, or is built-in
-- to the C compiler. For these labels we avoid generating our
-- own C prototypes.
isMathFun :: CLabel -> Bool
isMathFun (ForeignLabel fs _ _ _) = fs `elementOfUniqSet` math_funs
isMathFun _ = False
math_funs :: UniqSet FastString
math_funs = mkUniqSet [
-- _ISOC99_SOURCE
(fsLit "acos"), (fsLit "acosf"), (fsLit "acosh"),
(fsLit "acoshf"), (fsLit "acoshl"), (fsLit "acosl"),
(fsLit "asin"), (fsLit "asinf"), (fsLit "asinl"),
(fsLit "asinh"), (fsLit "asinhf"), (fsLit "asinhl"),
(fsLit "atan"), (fsLit "atanf"), (fsLit "atanl"),
(fsLit "atan2"), (fsLit "atan2f"), (fsLit "atan2l"),
(fsLit "atanh"), (fsLit "atanhf"), (fsLit "atanhl"),
(fsLit "cbrt"), (fsLit "cbrtf"), (fsLit "cbrtl"),
(fsLit "ceil"), (fsLit "ceilf"), (fsLit "ceill"),
(fsLit "copysign"), (fsLit "copysignf"), (fsLit "copysignl"),
(fsLit "cos"), (fsLit "cosf"), (fsLit "cosl"),
(fsLit "cosh"), (fsLit "coshf"), (fsLit "coshl"),
(fsLit "erf"), (fsLit "erff"), (fsLit "erfl"),
(fsLit "erfc"), (fsLit "erfcf"), (fsLit "erfcl"),
(fsLit "exp"), (fsLit "expf"), (fsLit "expl"),
(fsLit "exp2"), (fsLit "exp2f"), (fsLit "exp2l"),
(fsLit "expm1"), (fsLit "expm1f"), (fsLit "expm1l"),
(fsLit "fabs"), (fsLit "fabsf"), (fsLit "fabsl"),
(fsLit "fdim"), (fsLit "fdimf"), (fsLit "fdiml"),
(fsLit "floor"), (fsLit "floorf"), (fsLit "floorl"),
(fsLit "fma"), (fsLit "fmaf"), (fsLit "fmal"),
(fsLit "fmax"), (fsLit "fmaxf"), (fsLit "fmaxl"),
(fsLit "fmin"), (fsLit "fminf"), (fsLit "fminl"),
(fsLit "fmod"), (fsLit "fmodf"), (fsLit "fmodl"),
(fsLit "frexp"), (fsLit "frexpf"), (fsLit "frexpl"),
(fsLit "hypot"), (fsLit "hypotf"), (fsLit "hypotl"),
(fsLit "ilogb"), (fsLit "ilogbf"), (fsLit "ilogbl"),
(fsLit "ldexp"), (fsLit "ldexpf"), (fsLit "ldexpl"),
(fsLit "lgamma"), (fsLit "lgammaf"), (fsLit "lgammal"),
(fsLit "llrint"), (fsLit "llrintf"), (fsLit "llrintl"),
(fsLit "llround"), (fsLit "llroundf"), (fsLit "llroundl"),
(fsLit "log"), (fsLit "logf"), (fsLit "logl"),
(fsLit "log10l"), (fsLit "log10"), (fsLit "log10f"),
(fsLit "log1pl"), (fsLit "log1p"), (fsLit "log1pf"),
(fsLit "log2"), (fsLit "log2f"), (fsLit "log2l"),
(fsLit "logb"), (fsLit "logbf"), (fsLit "logbl"),
(fsLit "lrint"), (fsLit "lrintf"), (fsLit "lrintl"),
(fsLit "lround"), (fsLit "lroundf"), (fsLit "lroundl"),
(fsLit "modf"), (fsLit "modff"), (fsLit "modfl"),
(fsLit "nan"), (fsLit "nanf"), (fsLit "nanl"),
(fsLit "nearbyint"), (fsLit "nearbyintf"), (fsLit "nearbyintl"),
(fsLit "nextafter"), (fsLit "nextafterf"), (fsLit "nextafterl"),
(fsLit "nexttoward"), (fsLit "nexttowardf"), (fsLit "nexttowardl"),
(fsLit "pow"), (fsLit "powf"), (fsLit "powl"),
(fsLit "remainder"), (fsLit "remainderf"), (fsLit "remainderl"),
(fsLit "remquo"), (fsLit "remquof"), (fsLit "remquol"),
(fsLit "rint"), (fsLit "rintf"), (fsLit "rintl"),
(fsLit "round"), (fsLit "roundf"), (fsLit "roundl"),
(fsLit "scalbln"), (fsLit "scalblnf"), (fsLit "scalblnl"),
(fsLit "scalbn"), (fsLit "scalbnf"), (fsLit "scalbnl"),
(fsLit "sin"), (fsLit "sinf"), (fsLit "sinl"),
(fsLit "sinh"), (fsLit "sinhf"), (fsLit "sinhl"),
(fsLit "sqrt"), (fsLit "sqrtf"), (fsLit "sqrtl"),
(fsLit "tan"), (fsLit "tanf"), (fsLit "tanl"),
(fsLit "tanh"), (fsLit "tanhf"), (fsLit "tanhl"),
(fsLit "tgamma"), (fsLit "tgammaf"), (fsLit "tgammal"),
(fsLit "trunc"), (fsLit "truncf"), (fsLit "truncl"),
-- ISO C 99 also defines these function-like macros in math.h:
-- fpclassify, isfinite, isinf, isnormal, signbit, isgreater,
-- isgreaterequal, isless, islessequal, islessgreater, isunordered
-- additional symbols from _BSD_SOURCE
(fsLit "drem"), (fsLit "dremf"), (fsLit "dreml"),
(fsLit "finite"), (fsLit "finitef"), (fsLit "finitel"),
(fsLit "gamma"), (fsLit "gammaf"), (fsLit "gammal"),
(fsLit "isinf"), (fsLit "isinff"), (fsLit "isinfl"),
(fsLit "isnan"), (fsLit "isnanf"), (fsLit "isnanl"),
(fsLit "j0"), (fsLit "j0f"), (fsLit "j0l"),
(fsLit "j1"), (fsLit "j1f"), (fsLit "j1l"),
(fsLit "jn"), (fsLit "jnf"), (fsLit "jnl"),
(fsLit "lgamma_r"), (fsLit "lgammaf_r"), (fsLit "lgammal_r"),
(fsLit "scalb"), (fsLit "scalbf"), (fsLit "scalbl"),
(fsLit "significand"), (fsLit "significandf"), (fsLit "significandl"),
(fsLit "y0"), (fsLit "y0f"), (fsLit "y0l"),
(fsLit "y1"), (fsLit "y1f"), (fsLit "y1l"),
(fsLit "yn"), (fsLit "ynf"), (fsLit "ynl")
]
-- -----------------------------------------------------------------------------
-- | Is a CLabel visible outside this object file or not?
-- From the point of view of the code generator, a name is
-- externally visible if it has to be declared as exported
-- in the .o file's symbol table; that is, made non-static.
externallyVisibleCLabel :: CLabel -> Bool -- not C "static"
externallyVisibleCLabel (CaseLabel _ _) = False
externallyVisibleCLabel (StringLitLabel _) = False
externallyVisibleCLabel (AsmTempLabel _) = False
externallyVisibleCLabel (PlainModuleInitLabel _)= True
externallyVisibleCLabel (RtsLabel _) = True
externallyVisibleCLabel (CmmLabel _ _ _) = True
externallyVisibleCLabel (ForeignLabel{}) = True
externallyVisibleCLabel (IdLabel name _ info) = isExternalName name && externallyVisibleIdLabel info
externallyVisibleCLabel (CC_Label _) = True
externallyVisibleCLabel (CCS_Label _) = True
externallyVisibleCLabel (DynamicLinkerLabel _ _) = False
externallyVisibleCLabel (HpcTicksLabel _) = True
externallyVisibleCLabel (LargeBitmapLabel _) = False
externallyVisibleCLabel (LargeSRTLabel _) = False
externallyVisibleCLabel (PicBaseLabel {}) = panic "externallyVisibleCLabel PicBaseLabel"
externallyVisibleCLabel (DeadStripPreventer {}) = panic "externallyVisibleCLabel DeadStripPreventer"
externallyVisibleIdLabel :: IdLabelInfo -> Bool
externallyVisibleIdLabel SRT = False
externallyVisibleIdLabel LocalInfoTable = False
externallyVisibleIdLabel LocalEntry = False
externallyVisibleIdLabel _ = True
-- -----------------------------------------------------------------------------
-- Finding the "type" of a CLabel
-- For generating correct types in label declarations:
data CLabelType
= CodeLabel -- Address of some executable instructions
| DataLabel -- Address of data, not a GC ptr
| GcPtrLabel -- Address of a (presumably static) GC object
isCFunctionLabel :: CLabel -> Bool
isCFunctionLabel lbl = case labelType lbl of
CodeLabel -> True
_other -> False
isGcPtrLabel :: CLabel -> Bool
isGcPtrLabel lbl = case labelType lbl of
GcPtrLabel -> True
_other -> False
-- | Work out the general type of data at the address of this label
-- whether it be code, data, or static GC object.
labelType :: CLabel -> CLabelType
labelType (CmmLabel _ _ CmmData) = DataLabel
labelType (CmmLabel _ _ CmmGcPtr) = GcPtrLabel
labelType (CmmLabel _ _ CmmCode) = CodeLabel
labelType (CmmLabel _ _ CmmInfo) = DataLabel
labelType (CmmLabel _ _ CmmEntry) = CodeLabel
labelType (CmmLabel _ _ CmmRetInfo) = DataLabel
labelType (CmmLabel _ _ CmmRet) = CodeLabel
labelType (RtsLabel (RtsSelectorInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApFast _)) = CodeLabel
labelType (CaseLabel _ CaseReturnInfo) = DataLabel
labelType (CaseLabel _ _) = CodeLabel
labelType (PlainModuleInitLabel _) = CodeLabel
labelType (LargeSRTLabel _) = DataLabel
labelType (LargeBitmapLabel _) = DataLabel
labelType (ForeignLabel _ _ _ IsFunction) = CodeLabel
labelType (IdLabel _ _ info) = idInfoLabelType info
labelType _ = DataLabel
idInfoLabelType :: IdLabelInfo -> CLabelType
idInfoLabelType info =
case info of
InfoTable -> DataLabel
LocalInfoTable -> DataLabel
Closure -> GcPtrLabel
ConInfoTable -> DataLabel
StaticInfoTable -> DataLabel
ClosureTable -> DataLabel
RednCounts -> DataLabel
_ -> CodeLabel
-- -----------------------------------------------------------------------------
-- Does a CLabel need dynamic linkage?
-- When referring to data in code, we need to know whether
-- that data resides in a DLL or not. [Win32 only.]
-- @labelDynamic@ returns @True@ if the label is located
-- in a DLL, be it a data reference or not.
labelDynamic :: DynFlags -> PackageId -> CLabel -> Bool
labelDynamic dflags this_pkg lbl =
case lbl of
-- is the RTS in a DLL or not?
RtsLabel _ -> not opt_Static && (this_pkg /= rtsPackageId)
IdLabel n _ _ -> isDllName this_pkg n
-- When compiling in the "dyn" way, each package is to be linked into
-- its own shared library.
CmmLabel pkg _ _
| os == OSMinGW32 ->
not opt_Static && (this_pkg /= pkg)
| otherwise ->
True
ForeignLabel _ _ source _ ->
if os == OSMinGW32
then case source of
-- Foreign label is in some un-named foreign package (or DLL).
ForeignLabelInExternalPackage -> True
-- Foreign label is linked into the same package as the
-- source file currently being compiled.
ForeignLabelInThisPackage -> False
-- Foreign label is in some named package.
-- When compiling in the "dyn" way, each package is to be
-- linked into its own DLL.
ForeignLabelInPackage pkgId ->
(not opt_Static) && (this_pkg /= pkgId)
else -- On Mac OS X and on ELF platforms, false positives are OK,
-- so we claim that all foreign imports come from dynamic
-- libraries
True
PlainModuleInitLabel m -> not opt_Static && this_pkg /= (modulePackageId m)
-- Note that DynamicLinkerLabels do NOT require dynamic linking themselves.
_ -> False
where os = platformOS (targetPlatform dflags)
{-
OLD?: These GRAN functions are needed for spitting out GRAN_FETCH() at the
right places. It is used to detect when the abstractC statement of an
CCodeBlock actually contains the code for a slow entry point. -- HWL
We need at least @Eq@ for @CLabels@, because we want to avoid
duplicate declarations in generating C (see @labelSeenTE@ in
@PprAbsC@).
-}
-----------------------------------------------------------------------------
-- Printing out CLabels.
{-
Convention:
<name>_<type>
where <name> is <Module>_<name> for external names and <unique> for
internal names. <type> is one of the following:
info Info table
srt Static reference table
srtd Static reference table descriptor
entry Entry code (function, closure)
slow Slow entry code (if any)
ret Direct return address
vtbl Vector table
<n>_alt Case alternative (tag n)
dflt Default case alternative
btm Large bitmap vector
closure Static closure
con_entry Dynamic Constructor entry code
con_info Dynamic Constructor info table
static_entry Static Constructor entry code
static_info Static Constructor info table
sel_info Selector info table
sel_entry Selector entry code
cc Cost centre
ccs Cost centre stack
Many of these distinctions are only for documentation reasons. For
example, _ret is only distinguished from _entry to make it easy to
tell whether a code fragment is a return point or a closure/function
entry.
Note [Closure and info labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For a function 'foo, we have:
foo_info : Points to the info table describing foo's closure
(and entry code for foo with tables next to code)
foo_closure : Static (no-free-var) closure only:
points to the statically-allocated closure
For a data constructor (such as Just or Nothing), we have:
Just_con_info: Info table for the data constructor itself
the first word of a heap-allocated Just
Just_info: Info table for the *worker function*, an
ordinary Haskell function of arity 1 that
allocates a (Just x) box:
Just = \x -> Just x
Just_closure: The closure for this worker
Nothing_closure: a statically allocated closure for Nothing
Nothing_static_info: info table for Nothing_closure
All these must be exported symbol, EXCEPT Just_info. We don't need to
export this because in other modules we either have
* A reference to 'Just'; use Just_closure
* A saturated call 'Just x'; allocate using Just_con_info
Not exporting these Just_info labels reduces the number of symbols
somewhat.
-}
instance PlatformOutputable CLabel where
pprPlatform = pprCLabel
pprCLabel :: Platform -> CLabel -> SDoc
pprCLabel platform (AsmTempLabel u)
| cGhcWithNativeCodeGen == "YES"
= getPprStyle $ \ sty ->
if asmStyle sty then
ptext (asmTempLabelPrefix platform) <> pprUnique u
else
char '_' <> pprUnique u
pprCLabel platform (DynamicLinkerLabel info lbl)
| cGhcWithNativeCodeGen == "YES"
= pprDynamicLinkerAsmLabel platform info lbl
pprCLabel _ PicBaseLabel
| cGhcWithNativeCodeGen == "YES"
= ptext (sLit "1b")
pprCLabel platform (DeadStripPreventer lbl)
| cGhcWithNativeCodeGen == "YES"
= pprCLabel platform lbl <> ptext (sLit "_dsp")
pprCLabel platform lbl
= getPprStyle $ \ sty ->
if cGhcWithNativeCodeGen == "YES" && asmStyle sty
then maybe_underscore (pprAsmCLbl platform lbl)
else pprCLbl lbl
maybe_underscore :: SDoc -> SDoc
maybe_underscore doc
| underscorePrefix = pp_cSEP <> doc
| otherwise = doc
pprAsmCLbl :: Platform -> CLabel -> SDoc
pprAsmCLbl platform (ForeignLabel fs (Just sz) _ _)
| platformOS platform == OSMinGW32
-- In asm mode, we need to put the suffix on a stdcall ForeignLabel.
-- (The C compiler does this itself).
= ftext fs <> char '@' <> int sz
pprAsmCLbl _ lbl
= pprCLbl lbl
pprCLbl :: CLabel -> SDoc
pprCLbl (StringLitLabel u)
= pprUnique u <> ptext (sLit "_str")
pprCLbl (CaseLabel u CaseReturnPt)
= hcat [pprUnique u, ptext (sLit "_ret")]
pprCLbl (CaseLabel u CaseReturnInfo)
= hcat [pprUnique u, ptext (sLit "_info")]
pprCLbl (CaseLabel u (CaseAlt tag))
= hcat [pprUnique u, pp_cSEP, int tag, ptext (sLit "_alt")]
pprCLbl (CaseLabel u CaseDefault)
= hcat [pprUnique u, ptext (sLit "_dflt")]
pprCLbl (LargeSRTLabel u) = pprUnique u <> pp_cSEP <> ptext (sLit "srtd")
pprCLbl (LargeBitmapLabel u) = text "b" <> pprUnique u <> pp_cSEP <> ptext (sLit "btm")
-- Some bitsmaps for tuple constructors have a numeric tag (e.g. '7')
-- until that gets resolved we'll just force them to start
-- with a letter so the label will be legal assmbly code.
pprCLbl (CmmLabel _ str CmmCode) = ftext str
pprCLbl (CmmLabel _ str CmmData) = ftext str
pprCLbl (CmmLabel _ str CmmGcPtr) = ftext str
pprCLbl (CmmLabel _ str CmmPrimCall) = ftext str
pprCLbl (RtsLabel (RtsApFast str)) = ftext str <> ptext (sLit "_fast")
pprCLbl (RtsLabel (RtsSelectorInfoTable upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsSelectorEntry upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (RtsLabel (RtsApInfoTable upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsApEntry upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (CmmLabel _ fs CmmInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmEntry)
= ftext fs <> ptext (sLit "_entry")
pprCLbl (CmmLabel _ fs CmmRetInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmRet)
= ftext fs <> ptext (sLit "_ret")
pprCLbl (RtsLabel (RtsPrimOp primop))
= ptext (sLit "stg_") <> ppr primop
pprCLbl (RtsLabel (RtsSlowTickyCtr pat))
= ptext (sLit "SLOW_CALL_") <> text pat <> ptext (sLit "_ctr")
pprCLbl (ForeignLabel str _ _ _)
= ftext str
pprCLbl (IdLabel name _cafs flavor) = ppr name <> ppIdFlavor flavor
pprCLbl (CC_Label cc) = ppr cc
pprCLbl (CCS_Label ccs) = ppr ccs
pprCLbl (PlainModuleInitLabel mod)
= ptext (sLit "__stginit_") <> ppr mod
pprCLbl (HpcTicksLabel mod)
= ptext (sLit "_hpc_tickboxes_") <> ppr mod <> ptext (sLit "_hpc")
pprCLbl (AsmTempLabel {}) = panic "pprCLbl AsmTempLabel"
pprCLbl (DynamicLinkerLabel {}) = panic "pprCLbl DynamicLinkerLabel"
pprCLbl (PicBaseLabel {}) = panic "pprCLbl PicBaseLabel"
pprCLbl (DeadStripPreventer {}) = panic "pprCLbl DeadStripPreventer"
ppIdFlavor :: IdLabelInfo -> SDoc
ppIdFlavor x = pp_cSEP <>
(case x of
Closure -> ptext (sLit "closure")
SRT -> ptext (sLit "srt")
InfoTable -> ptext (sLit "info")
LocalInfoTable -> ptext (sLit "info")
Entry -> ptext (sLit "entry")
LocalEntry -> ptext (sLit "entry")
Slow -> ptext (sLit "slow")
RednCounts -> ptext (sLit "ct")
ConEntry -> ptext (sLit "con_entry")
ConInfoTable -> ptext (sLit "con_info")
StaticConEntry -> ptext (sLit "static_entry")
StaticInfoTable -> ptext (sLit "static_info")
ClosureTable -> ptext (sLit "closure_tbl")
)
pp_cSEP :: SDoc
pp_cSEP = char '_'
instance Outputable ForeignLabelSource where
ppr fs
= case fs of
ForeignLabelInPackage pkgId -> parens $ text "package: " <> ppr pkgId
ForeignLabelInThisPackage -> parens $ text "this package"
ForeignLabelInExternalPackage -> parens $ text "external package"
-- -----------------------------------------------------------------------------
-- Machine-dependent knowledge about labels.
underscorePrefix :: Bool -- leading underscore on assembler labels?
underscorePrefix = (cLeadingUnderscore == "YES")
asmTempLabelPrefix :: Platform -> LitString -- for formatting labels
asmTempLabelPrefix platform =
if platformOS platform == OSDarwin
then sLit "L"
else sLit ".L"
pprDynamicLinkerAsmLabel :: Platform -> DynamicLinkerLabelInfo -> CLabel -> SDoc
pprDynamicLinkerAsmLabel platform dllInfo lbl
= if platformOS platform == OSDarwin
then if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> char 'L' <> pprCLabel platform lbl <> text "$stub"
SymbolPtr -> char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr"
GotSymbolPtr -> pprCLabel platform lbl <> text "@GOTPCREL"
GotSymbolOffset -> pprCLabel platform lbl
else case dllInfo of
CodeStub -> char 'L' <> pprCLabel platform lbl <> text "$stub"
SymbolPtr -> char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr"
_ -> panic "pprDynamicLinkerAsmLabel"
else if osElfTarget (platformOS platform)
then if platformArch platform == ArchPPC
then case dllInfo of
CodeStub -> pprCLabel platform lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> pprCLabel platform lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> pprCLabel platform lbl <> text "@plt"
GotSymbolPtr -> pprCLabel platform lbl <> text "@gotpcrel"
GotSymbolOffset -> pprCLabel platform lbl
SymbolPtr -> text ".LC_" <> pprCLabel platform lbl
else case dllInfo of
CodeStub -> pprCLabel platform lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> pprCLabel platform lbl
GotSymbolPtr -> pprCLabel platform lbl <> text "@got"
GotSymbolOffset -> pprCLabel platform lbl <> text "@gotoff"
else if platformOS platform == OSMinGW32
then case dllInfo of
SymbolPtr -> text "__imp_" <> pprCLabel platform lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else panic "pprDynamicLinkerAsmLabel"
| mcmaniac/ghc | compiler/cmm/CLabel.hs | bsd-3-clause | 46,035 | 0 | 14 | 13,148 | 9,229 | 4,860 | 4,369 | 703 | 21 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcInstDecls: Typechecking instance declarations
-}
{-# LANGUAGE CPP #-}
module ETA.TypeCheck.TcInstDcls ( tcInstDecls1, tcInstDecls2 ) where
import ETA.HsSyn.HsSyn
import ETA.TypeCheck.TcBinds
import ETA.TypeCheck.TcTyClsDecls
import ETA.TypeCheck.TcClassDcl( tcClassDecl2,
HsSigFun, lookupHsSig, mkHsSigFun,
findMethodBind, instantiateMethod, tcInstanceMethodBody )
import ETA.TypeCheck.TcPat ( addInlinePrags )
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcValidity
import ETA.TypeCheck.TcMType
import ETA.TypeCheck.TcType
import ETA.Iface.BuildTyCl
import ETA.TypeCheck.Inst
import ETA.Types.InstEnv
import ETA.TypeCheck.FamInst
import ETA.Types.FamInstEnv
import ETA.TypeCheck.TcDeriv
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcHsType
import ETA.TypeCheck.TcUnify
import ETA.Types.Coercion ( pprCoAxiom )
import ETA.Core.MkCore ( nO_METHOD_BINDING_ERROR_ID )
import ETA.Types.Type
import ETA.TypeCheck.TcEvidence
import ETA.Types.TyCon
import ETA.Types.CoAxiom
import ETA.BasicTypes.DataCon
import ETA.Types.Class
import ETA.BasicTypes.Var
import qualified ETA.BasicTypes.Var as Var
import ETA.BasicTypes.VarEnv
import ETA.BasicTypes.VarSet
import ETA.Prelude.PrelNames ( typeableClassName, genericClassNames )
import ETA.Utils.Bag
import ETA.BasicTypes.BasicTypes
import ETA.Main.DynFlags
import ETA.Main.ErrUtils
import ETA.Utils.FastString
import ETA.Main.HscTypes ( isHsBootOrSig )
import ETA.BasicTypes.Id
import ETA.BasicTypes.MkId
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameSet
import ETA.Utils.Outputable
import qualified ETA.Utils.Outputable as Outputable
import ETA.BasicTypes.SrcLoc
import ETA.Utils.Util
import ETA.Utils.BooleanFormula ( isUnsatisfied, pprBooleanFormulaNice )
import Control.Monad
import ETA.Utils.Maybes ( isNothing, isJust, whenIsJust )
import Data.List ( mapAccumL, partition )
#include "HsVersions.h"
{-
Typechecking instance declarations is done in two passes. The first
pass, made by @tcInstDecls1@, collects information to be used in the
second pass.
This pre-processed info includes the as-yet-unprocessed bindings
inside the instance declaration. These are type-checked in the second
pass, when the class-instance envs and GVE contain all the info from
all the instance and value decls. Indeed that's the reason we need
two passes over the instance decls.
Note [How instance declarations are translated]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is how we translation instance declarations into Core
Running example:
class C a where
op1, op2 :: Ix b => a -> b -> b
op2 = <dm-rhs>
instance C a => C [a]
{-# INLINE [2] op1 #-}
op1 = <rhs>
===>
-- Method selectors
op1,op2 :: forall a. C a => forall b. Ix b => a -> b -> b
op1 = ...
op2 = ...
-- Default methods get the 'self' dictionary as argument
-- so they can call other methods at the same type
-- Default methods get the same type as their method selector
$dmop2 :: forall a. C a => forall b. Ix b => a -> b -> b
$dmop2 = /\a. \(d:C a). /\b. \(d2: Ix b). <dm-rhs>
-- NB: type variables 'a' and 'b' are *both* in scope in <dm-rhs>
-- Note [Tricky type variable scoping]
-- A top-level definition for each instance method
-- Here op1_i, op2_i are the "instance method Ids"
-- The INLINE pragma comes from the user pragma
{-# INLINE [2] op1_i #-} -- From the instance decl bindings
op1_i, op2_i :: forall a. C a => forall b. Ix b => [a] -> b -> b
op1_i = /\a. \(d:C a).
let this :: C [a]
this = df_i a d
-- Note [Subtle interaction of recursion and overlap]
local_op1 :: forall b. Ix b => [a] -> b -> b
local_op1 = <rhs>
-- Source code; run the type checker on this
-- NB: Type variable 'a' (but not 'b') is in scope in <rhs>
-- Note [Tricky type variable scoping]
in local_op1 a d
op2_i = /\a \d:C a. $dmop2 [a] (df_i a d)
-- The dictionary function itself
{-# NOINLINE CONLIKE df_i #-} -- Never inline dictionary functions
df_i :: forall a. C a -> C [a]
df_i = /\a. \d:C a. MkC (op1_i a d) (op2_i a d)
-- But see Note [Default methods in instances]
-- We can't apply the type checker to the default-method call
-- Use a RULE to short-circuit applications of the class ops
{-# RULE "op1@C[a]" forall a, d:C a.
op1 [a] (df_i d) = op1_i a d #-}
Note [Instances and loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Note that df_i may be mutually recursive with both op1_i and op2_i.
It's crucial that df_i is not chosen as the loop breaker, even
though op1_i has a (user-specified) INLINE pragma.
* Instead the idea is to inline df_i into op1_i, which may then select
methods from the MkC record, and thereby break the recursion with
df_i, leaving a *self*-recurisve op1_i. (If op1_i doesn't call op at
the same type, it won't mention df_i, so there won't be recursion in
the first place.)
* If op1_i is marked INLINE by the user there's a danger that we won't
inline df_i in it, and that in turn means that (since it'll be a
loop-breaker because df_i isn't), op1_i will ironically never be
inlined. But this is OK: the recursion breaking happens by way of
a RULE (the magic ClassOp rule above), and RULES work inside InlineRule
unfoldings. See Note [RULEs enabled in SimplGently] in SimplUtils
Note [ClassOp/DFun selection]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One thing we see a lot is stuff like
op2 (df d1 d2)
where 'op2' is a ClassOp and 'df' is DFun. Now, we could inline *both*
'op2' and 'df' to get
case (MkD ($cop1 d1 d2) ($cop2 d1 d2) ... of
MkD _ op2 _ _ _ -> op2
And that will reduce to ($cop2 d1 d2) which is what we wanted.
But it's tricky to make this work in practice, because it requires us to
inline both 'op2' and 'df'. But neither is keen to inline without having
seen the other's result; and it's very easy to get code bloat (from the
big intermediate) if you inline a bit too much.
Instead we use a cunning trick.
* We arrange that 'df' and 'op2' NEVER inline.
* We arrange that 'df' is ALWAYS defined in the sylised form
df d1 d2 = MkD ($cop1 d1 d2) ($cop2 d1 d2) ...
* We give 'df' a magical unfolding (DFunUnfolding [$cop1, $cop2, ..])
that lists its methods.
* We make CoreUnfold.exprIsConApp_maybe spot a DFunUnfolding and return
a suitable constructor application -- inlining df "on the fly" as it
were.
* ClassOp rules: We give the ClassOp 'op2' a BuiltinRule that
extracts the right piece iff its argument satisfies
exprIsConApp_maybe. This is done in MkId mkDictSelId
* We make 'df' CONLIKE, so that shared uses still match; eg
let d = df d1 d2
in ...(op2 d)...(op1 d)...
Note [Single-method classes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the class has just one method (or, more accurately, just one element
of {superclasses + methods}), then we use a different strategy.
class C a where op :: a -> a
instance C a => C [a] where op = <blah>
We translate the class decl into a newtype, which just gives a
top-level axiom. The "constructor" MkC expands to a cast, as does the
class-op selector.
axiom Co:C a :: C a ~ (a->a)
op :: forall a. C a -> (a -> a)
op a d = d |> (Co:C a)
MkC :: forall a. (a->a) -> C a
MkC = /\a.\op. op |> (sym Co:C a)
The clever RULE stuff doesn't work now, because ($df a d) isn't
a constructor application, so exprIsConApp_maybe won't return
Just <blah>.
Instead, we simply rely on the fact that casts are cheap:
$df :: forall a. C a => C [a]
{-# INLINE df #-} -- NB: INLINE this
$df = /\a. \d. MkC [a] ($cop_list a d)
= $cop_list |> forall a. C a -> (sym (Co:C [a]))
$cop_list :: forall a. C a => [a] -> [a]
$cop_list = <blah>
So if we see
(op ($df a d))
we'll inline 'op' and '$df', since both are simply casts, and
good things happen.
Why do we use this different strategy? Because otherwise we
end up with non-inlined dictionaries that look like
$df = $cop |> blah
which adds an extra indirection to every use, which seems stupid. See
Trac #4138 for an example (although the regression reported there
wasn't due to the indirection).
There is an awkward wrinkle though: we want to be very
careful when we have
instance C a => C [a] where
{-# INLINE op #-}
op = ...
then we'll get an INLINE pragma on $cop_list but it's important that
$cop_list only inlines when it's applied to *two* arguments (the
dictionary and the list argument). So we must not eta-expand $df
above. We ensure that this doesn't happen by putting an INLINE
pragma on the dfun itself; after all, it ends up being just a cast.
There is one more dark corner to the INLINE story, even more deeply
buried. Consider this (Trac #3772):
class DeepSeq a => C a where
gen :: Int -> a
instance C a => C [a] where
gen n = ...
class DeepSeq a where
deepSeq :: a -> b -> b
instance DeepSeq a => DeepSeq [a] where
{-# INLINE deepSeq #-}
deepSeq xs b = foldr deepSeq b xs
That gives rise to these defns:
$cdeepSeq :: DeepSeq a -> [a] -> b -> b
-- User INLINE( 3 args )!
$cdeepSeq a (d:DS a) b (x:[a]) (y:b) = ...
$fDeepSeq[] :: DeepSeq a -> DeepSeq [a]
-- DFun (with auto INLINE pragma)
$fDeepSeq[] a d = $cdeepSeq a d |> blah
$cp1 a d :: C a => DeepSep [a]
-- We don't want to eta-expand this, lest
-- $cdeepSeq gets inlined in it!
$cp1 a d = $fDeepSep[] a (scsel a d)
$fC[] :: C a => C [a]
-- Ordinary DFun
$fC[] a d = MkC ($cp1 a d) ($cgen a d)
Here $cp1 is the code that generates the superclass for C [a]. The
issue is this: we must not eta-expand $cp1 either, or else $fDeepSeq[]
and then $cdeepSeq will inline there, which is definitely wrong. Like
on the dfun, we solve this by adding an INLINE pragma to $cp1.
Note [Subtle interaction of recursion and overlap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
class C a where { op1,op2 :: a -> a }
instance C a => C [a] where
op1 x = op2 x ++ op2 x
op2 x = ...
instance C [Int] where
...
When type-checking the C [a] instance, we need a C [a] dictionary (for
the call of op2). If we look up in the instance environment, we find
an overlap. And in *general* the right thing is to complain (see Note
[Overlapping instances] in InstEnv). But in *this* case it's wrong to
complain, because we just want to delegate to the op2 of this same
instance.
Why is this justified? Because we generate a (C [a]) constraint in
a context in which 'a' cannot be instantiated to anything that matches
other overlapping instances, or else we would not be executing this
version of op1 in the first place.
It might even be a bit disguised:
nullFail :: C [a] => [a] -> [a]
nullFail x = op2 x ++ op2 x
instance C a => C [a] where
op1 x = nullFail x
Precisely this is used in package 'regex-base', module Context.hs.
See the overlapping instances for RegexContext, and the fact that they
call 'nullFail' just like the example above. The DoCon package also
does the same thing; it shows up in module Fraction.hs.
Conclusion: when typechecking the methods in a C [a] instance, we want to
treat the 'a' as an *existential* type variable, in the sense described
by Note [Binding when looking up instances]. That is why isOverlappableTyVar
responds True to an InstSkol, which is the kind of skolem we use in
tcInstDecl2.
Note [Tricky type variable scoping]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In our example
class C a where
op1, op2 :: Ix b => a -> b -> b
op2 = <dm-rhs>
instance C a => C [a]
{-# INLINE [2] op1 #-}
op1 = <rhs>
note that 'a' and 'b' are *both* in scope in <dm-rhs>, but only 'a' is
in scope in <rhs>. In particular, we must make sure that 'b' is in
scope when typechecking <dm-rhs>. This is achieved by subFunTys,
which brings appropriate tyvars into scope. This happens for both
<dm-rhs> and for <rhs>, but that doesn't matter: the *renamer* will have
complained if 'b' is mentioned in <rhs>.
************************************************************************
* *
\subsection{Extracting instance decls}
* *
************************************************************************
Gather up the instance declarations from their various sources
-}
tcInstDecls1 -- Deal with both source-code and imported instance decls
:: [LTyClDecl Name] -- For deriving stuff
-> [LInstDecl Name] -- Source code instance decls
-> [LDerivDecl Name] -- Source code stand-alone deriving decls
-> TcM (TcGblEnv, -- The full inst env
[InstInfo Name], -- Source-code instance decls to process;
-- contains all dfuns for this module
HsValBinds Name) -- Supporting bindings for derived instances
tcInstDecls1 tycl_decls inst_decls deriv_decls
= checkNoErrs $
do { -- Stop if addInstInfos etc discovers any errors
-- (they recover, so that we get more than one error each
-- round)
-- Do class and family instance declarations
; stuff <- mapAndRecoverM tcLocalInstDecl inst_decls
; let (local_infos_s, fam_insts_s) = unzip stuff
fam_insts = concat fam_insts_s
local_infos' = concat local_infos_s
-- Handwritten instances of the poly-kinded Typeable class are
-- forbidden, so we handle those separately
(typeable_instances, local_infos)
= partition bad_typeable_instance local_infos'
; addClsInsts local_infos $
addFamInsts fam_insts $
do { -- Compute instances from "deriving" clauses;
-- This stuff computes a context for the derived instance
-- decl, so it needs to know about all the instances possible
-- NB: class instance declarations can contain derivings as
-- part of associated data type declarations
failIfErrsM -- If the addInsts stuff gave any errors, don't
-- try the deriving stuff, because that may give
-- more errors still
; traceTc "tcDeriving" Outputable.empty
; th_stage <- getStage -- See Note [Deriving inside TH brackets ]
; (gbl_env, deriv_inst_info, deriv_binds)
<- if isBrackStage th_stage
then do { gbl_env <- getGblEnv
; return (gbl_env, emptyBag, emptyValBindsOut) }
else tcDeriving tycl_decls inst_decls deriv_decls
-- Fail if there are any handwritten instance of poly-kinded Typeable
; mapM_ typeable_err typeable_instances
-- Check that if the module is compiled with -XSafe, there are no
-- hand written instances of old Typeable as then unsafe casts could be
-- performed. Derived instances are OK.
; dflags <- getDynFlags
; when (safeLanguageOn dflags) $ forM_ local_infos $ \x -> case x of
_ | genInstCheck x -> addErrAt (getSrcSpan $ iSpec x) (genInstErr x)
_ -> return ()
-- As above but for Safe Inference mode.
; when (safeInferOn dflags) $ forM_ local_infos $ \x -> case x of
_ | genInstCheck x -> recordUnsafeInfer
_ | overlapCheck x -> recordUnsafeInfer
_ -> return ()
; return ( gbl_env
, bagToList deriv_inst_info ++ local_infos
, deriv_binds)
}}
where
-- Separate the Typeable instances from the rest
bad_typeable_instance i
= typeableClassName == is_cls_nm (iSpec i)
overlapCheck ty = case overlapMode (is_flag $ iSpec ty) of
NoOverlap _ -> False
_ -> True
genInstCheck ty = is_cls_nm (iSpec ty) `elem` genericClassNames
genInstErr i = hang (ptext (sLit $ "Generic instances can only be "
++ "derived in Safe Haskell.") $+$
ptext (sLit "Replace the following instance:"))
2 (pprInstanceHdr (iSpec i))
-- Report an error or a warning for a `Typeable` instances.
-- If we are workikng on an .hs-boot file, we just report a warning,
-- and ignore the instance. We do this, to give users a chance to fix
-- their code.
typeable_err i =
setSrcSpan (getSrcSpan (iSpec i)) $
do env <- getGblEnv
if isHsBootOrSig (tcg_src env)
then
do warn <- woptM Opt_WarnDerivingTypeable
when warn $ addWarnTc $ vcat
[ ptext (sLit "`Typeable` instances in .hs-boot files are ignored.")
, ptext (sLit "This warning will become an error in future versions of the compiler.")
]
else addErrTc $ ptext (sLit "Class `Typeable` does not support user-specified instances.")
addClsInsts :: [InstInfo Name] -> TcM a -> TcM a
addClsInsts infos thing_inside
= tcExtendLocalInstEnv (map iSpec infos) thing_inside
addFamInsts :: [FamInst] -> TcM a -> TcM a
-- Extend (a) the family instance envt
-- (b) the type envt with stuff from data type decls
addFamInsts fam_insts thing_inside
= tcExtendLocalFamInstEnv fam_insts $
tcExtendGlobalEnv things $
do { traceTc "addFamInsts" (pprFamInsts fam_insts)
; tcg_env <- tcAddImplicits things
; setGblEnv tcg_env thing_inside }
where
axioms = map (toBranchedAxiom . famInstAxiom) fam_insts
tycons = famInstsRepTyCons fam_insts
things = map ATyCon tycons ++ map ACoAxiom axioms
{-
Note [Deriving inside TH brackets]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given a declaration bracket
[d| data T = A | B deriving( Show ) |]
there is really no point in generating the derived code for deriving(
Show) and then type-checking it. This will happen at the call site
anyway, and the type check should never fail! Moreover (Trac #6005)
the scoping of the generated code inside the bracket does not seem to
work out.
The easy solution is simply not to generate the derived instances at
all. (A less brutal solution would be to generate them with no
bindings.) This will become moot when we shift to the new TH plan, so
the brutal solution will do.
-}
tcLocalInstDecl :: LInstDecl Name
-> TcM ([InstInfo Name], [FamInst])
-- A source-file instance declaration
-- Type-check all the stuff before the "where"
--
-- We check for respectable instance type, and context
tcLocalInstDecl (L loc (TyFamInstD { tfid_inst = decl }))
= do { fam_inst <- tcTyFamInstDecl Nothing (L loc decl)
; return ([], [fam_inst]) }
tcLocalInstDecl (L loc (DataFamInstD { dfid_inst = decl }))
= do { fam_inst <- tcDataFamInstDecl Nothing (L loc decl)
; return ([], [fam_inst]) }
tcLocalInstDecl (L loc (ClsInstD { cid_inst = decl }))
= do { (insts, fam_insts) <- tcClsInstDecl (L loc decl)
; return (insts, fam_insts) }
tcClsInstDecl :: LClsInstDecl Name -> TcM ([InstInfo Name], [FamInst])
tcClsInstDecl (L loc (ClsInstDecl { cid_poly_ty = poly_ty, cid_binds = binds
, cid_sigs = uprags, cid_tyfam_insts = ats
, cid_overlap_mode = overlap_mode
, cid_datafam_insts = adts }))
= setSrcSpan loc $
addErrCtxt (instDeclCtxt1 poly_ty) $
do { is_boot <- tcIsHsBootOrSig
; checkTc (not is_boot || (isEmptyLHsBinds binds && null uprags))
badBootDeclErr
; (tyvars, theta, clas, inst_tys) <- tcHsInstHead InstDeclCtxt poly_ty
; let mini_env = mkVarEnv (classTyVars clas `zip` inst_tys)
mini_subst = mkTvSubst (mkInScopeSet (mkVarSet tyvars)) mini_env
mb_info = Just (clas, mini_env)
-- Next, process any associated types.
; traceTc "tcLocalInstDecl" (ppr poly_ty)
; tyfam_insts0 <- tcExtendTyVarEnv tyvars $
mapAndRecoverM (tcTyFamInstDecl mb_info) ats
; datafam_insts <- tcExtendTyVarEnv tyvars $
mapAndRecoverM (tcDataFamInstDecl mb_info) adts
-- Check for missing associated types and build them
-- from their defaults (if available)
; let defined_ats = mkNameSet (map (tyFamInstDeclName . unLoc) ats)
`unionNameSet`
mkNameSet (map (unLoc . dfid_tycon . unLoc) adts)
; tyfam_insts1 <- mapM (tcATDefault mini_subst defined_ats)
(classATItems clas)
-- Finally, construct the Core representation of the instance.
-- (This no longer includes the associated types.)
; dfun_name <- newDFunName clas inst_tys (getLoc poly_ty)
-- Dfun location is that of instance *header*
; ispec <- newClsInst (fmap unLoc overlap_mode) dfun_name tyvars theta
clas inst_tys
; let inst_info = InstInfo { iSpec = ispec
, iBinds = InstBindings
{ ib_binds = binds
, ib_tyvars = map Var.varName tyvars -- Scope over bindings
, ib_pragmas = uprags
, ib_extensions = []
, ib_derived = False } }
; return ( [inst_info], tyfam_insts0 ++ concat tyfam_insts1 ++ datafam_insts) }
tcATDefault :: TvSubst -> NameSet -> ClassATItem -> TcM [FamInst]
-- ^ Construct default instances for any associated types that
-- aren't given a user definition
-- Returns [] or singleton
tcATDefault inst_subst defined_ats (ATI fam_tc defs)
-- User supplied instances ==> everything is OK
| tyConName fam_tc `elemNameSet` defined_ats
= return []
-- No user instance, have defaults ==> instatiate them
-- Example: class C a where { type F a b :: *; type F a b = () }
-- instance C [x]
-- Then we want to generate the decl: type F [x] b = ()
| Just (rhs_ty, _loc) <- defs
= do { let (subst', pat_tys') = mapAccumL subst_tv inst_subst
(tyConTyVars fam_tc)
rhs' = substTy subst' rhs_ty
tv_set' = tyVarsOfTypes pat_tys'
tvs' = varSetElemsKvsFirst tv_set'
; rep_tc_name <- newFamInstTyConName (noLoc (tyConName fam_tc)) pat_tys'
; let axiom = mkSingleCoAxiom rep_tc_name tvs' fam_tc pat_tys' rhs'
; traceTc "mk_deflt_at_instance" (vcat [ ppr fam_tc, ppr rhs_ty
, pprCoAxiom axiom ])
; fam_inst <- ASSERT( tyVarsOfType rhs' `subVarSet` tv_set' )
newFamInst SynFamilyInst axiom
; return [fam_inst] }
-- No defaults ==> generate a warning
| otherwise -- defs = Nothing
= do { warnMissingMethodOrAT "associated type" (tyConName fam_tc)
; return [] }
where
subst_tv subst tc_tv
| Just ty <- lookupVarEnv (getTvSubstEnv subst) tc_tv
= (subst, ty)
| otherwise
= (extendTvSubst subst tc_tv ty', ty')
where
ty' = mkTyVarTy (updateTyVarKind (substTy subst) tc_tv)
{-
************************************************************************
* *
Type checking family instances
* *
************************************************************************
Family instances are somewhat of a hybrid. They are processed together with
class instance heads, but can contain data constructors and hence they share a
lot of kinding and type checking code with ordinary algebraic data types (and
GADTs).
-}
tcFamInstDeclCombined :: Maybe (Class, VarEnv Type) -- the class & mini_env if applicable
-> Located Name -> TcM TyCon
tcFamInstDeclCombined mb_clsinfo fam_tc_lname
= do { -- Type family instances require -XTypeFamilies
-- and can't (currently) be in an hs-boot file
; traceTc "tcFamInstDecl" (ppr fam_tc_lname)
; type_families <- xoptM Opt_TypeFamilies
; is_boot <- tcIsHsBootOrSig -- Are we compiling an hs-boot file?
; checkTc type_families $ badFamInstDecl fam_tc_lname
; checkTc (not is_boot) $ badBootFamInstDeclErr
-- Look up the family TyCon and check for validity including
-- check that toplevel type instances are not for associated types.
; fam_tc <- tcLookupLocatedTyCon fam_tc_lname
; when (isNothing mb_clsinfo && -- Not in a class decl
isTyConAssoc fam_tc) -- but an associated type
(addErr $ assocInClassErr fam_tc_lname)
; return fam_tc }
tcTyFamInstDecl :: Maybe (Class, VarEnv Type) -- the class & mini_env if applicable
-> LTyFamInstDecl Name -> TcM FamInst
-- "type instance"
tcTyFamInstDecl mb_clsinfo (L loc decl@(TyFamInstDecl { tfid_eqn = eqn }))
= setSrcSpan loc $
tcAddTyFamInstCtxt decl $
do { let fam_lname = tfe_tycon (unLoc eqn)
; fam_tc <- tcFamInstDeclCombined mb_clsinfo fam_lname
-- (0) Check it's an open type family
; checkTc (isFamilyTyCon fam_tc) (notFamily fam_tc)
; checkTc (isTypeFamilyTyCon fam_tc) (wrongKindOfFamily fam_tc)
; checkTc (isOpenTypeFamilyTyCon fam_tc) (notOpenFamily fam_tc)
-- (1) do the work of verifying the synonym group
; co_ax_branch <- tcTyFamInstEqn (famTyConShape fam_tc) eqn
-- (2) check for validity
; checkValidTyFamInst mb_clsinfo fam_tc co_ax_branch
-- (3) construct coercion axiom
; rep_tc_name <- newFamInstAxiomName loc (unLoc fam_lname)
[co_ax_branch]
; let axiom = mkUnbranchedCoAxiom rep_tc_name fam_tc co_ax_branch
; newFamInst SynFamilyInst axiom }
tcDataFamInstDecl :: Maybe (Class, VarEnv Type)
-> LDataFamInstDecl Name -> TcM FamInst
-- "newtype instance" and "data instance"
tcDataFamInstDecl mb_clsinfo
(L loc decl@(DataFamInstDecl
{ dfid_pats = pats
, dfid_tycon = fam_tc_name
, dfid_defn = defn@HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = ctxt, dd_cons = cons } }))
= setSrcSpan loc $
tcAddDataFamInstCtxt decl $
do { fam_tc <- tcFamInstDeclCombined mb_clsinfo fam_tc_name
-- Check that the family declaration is for the right kind
; checkTc (isFamilyTyCon fam_tc) (notFamily fam_tc)
; checkTc (isAlgTyCon fam_tc) (wrongKindOfFamily fam_tc)
-- Kind check type patterns
; tcFamTyPats (famTyConShape fam_tc) pats
(kcDataDefn defn) $
\tvs' pats' res_kind -> do
{ -- Check that left-hand side contains no type family applications
-- (vanilla synonyms are fine, though, and we checked for
-- foralls earlier)
checkValidFamPats fam_tc tvs' pats'
-- Check that type patterns match class instance head, if any
; checkConsistentFamInst mb_clsinfo fam_tc tvs' pats'
-- Result kind must be '*' (otherwise, we have too few patterns)
; checkTc (isLiftedTypeKind res_kind) $ tooFewParmsErr (tyConArity fam_tc)
; stupid_theta <- tcHsContext ctxt
; gadt_syntax <- dataDeclChecks (tyConName fam_tc) new_or_data stupid_theta cons
-- Construct representation tycon
; rep_tc_name <- newFamInstTyConName fam_tc_name pats'
; axiom_name <- newImplicitBinder rep_tc_name mkInstTyCoOcc
; let orig_res_ty = mkTyConApp fam_tc pats'
; (rep_tc, fam_inst) <- fixM $ \ ~(rec_rep_tc, _) ->
do { data_cons <- tcConDecls new_or_data rec_rep_tc
(tvs', orig_res_ty) cons
; tc_rhs <- case new_or_data of
DataType -> return (mkDataTyConRhs data_cons)
NewType -> ASSERT( not (null data_cons) )
mkNewTyConRhs rep_tc_name rec_rep_tc (head data_cons)
-- freshen tyvars
; let (eta_tvs, eta_pats) = eta_reduce tvs' pats'
axiom = mkSingleCoAxiom axiom_name eta_tvs fam_tc eta_pats
(mkTyConApp rep_tc (mkTyVarTys eta_tvs))
parent = FamInstTyCon axiom fam_tc pats'
roles = map (const Nominal) tvs'
rep_tc = buildAlgTyCon rep_tc_name tvs' roles
(fmap unLoc cType) stupid_theta
tc_rhs
Recursive
False -- No promotable to the kind level
gadt_syntax parent
-- We always assume that indexed types are recursive. Why?
-- (1) Due to their open nature, we can never be sure that a
-- further instance might not introduce a new recursive
-- dependency. (2) They are always valid loop breakers as
-- they involve a coercion.
; fam_inst <- newFamInst (DataFamilyInst rep_tc) axiom
; return (rep_tc, fam_inst) }
-- Remember to check validity; no recursion to worry about here
; checkValidTyCon rep_tc
; return fam_inst } }
where
-- See Note [Eta reduction for data family axioms]
-- [a,b,c,d].T [a] c Int c d ==> [a,b,c]. T [a] c Int c
eta_reduce tvs pats = go (reverse tvs) (reverse pats)
go (tv:tvs) (pat:pats)
| Just tv' <- getTyVar_maybe pat
, tv == tv'
, not (tv `elemVarSet` tyVarsOfTypes pats)
= go tvs pats
go tvs pats = (reverse tvs, reverse pats)
{-
Note [Eta reduction for data family axioms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
data family T a b :: *
newtype instance T Int a = MkT (IO a) deriving( Monad )
We'd like this to work. From the 'newtype instance' you might
think we'd get:
newtype TInt a = MkT (IO a)
axiom ax1 a :: T Int a ~ TInt a -- The type-instance part
axiom ax2 a :: TInt a ~ IO a -- The newtype part
But now what can we do? We have this problem
Given: d :: Monad IO
Wanted: d' :: Monad (T Int) = d |> ????
What coercion can we use for the ???
Solution: eta-reduce both axioms, thus:
axiom ax1 :: T Int ~ TInt
axiom ax2 :: TInt ~ IO
Now
d' = d |> Monad (sym (ax2 ; ax1))
This eta reduction happens both for data instances and newtype instances.
See Note [Newtype eta] in TyCon.
************************************************************************
* *
Type-checking instance declarations, pass 2
* *
************************************************************************
-}
tcInstDecls2 :: [LTyClDecl Name] -> [InstInfo Name]
-> TcM (LHsBinds Id)
-- (a) From each class declaration,
-- generate any default-method bindings
-- (b) From each instance decl
-- generate the dfun binding
tcInstDecls2 tycl_decls inst_decls
= do { -- (a) Default methods from class decls
let class_decls = filter (isClassDecl . unLoc) tycl_decls
; dm_binds_s <- mapM tcClassDecl2 class_decls
; let dm_binds = unionManyBags dm_binds_s
-- (b) instance declarations
; let dm_ids = collectHsBindsBinders dm_binds
-- Add the default method Ids (again)
-- See Note [Default methods and instances]
; inst_binds_s <- tcExtendLetEnv TopLevel TopLevel dm_ids $
mapM tcInstDecl2 inst_decls
-- Done
; return (dm_binds `unionBags` unionManyBags inst_binds_s) }
{-
See Note [Default methods and instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The default method Ids are already in the type environment (see Note
[Default method Ids and Template Haskell] in TcTyClsDcls), BUT they
don't have their InlinePragmas yet. Usually that would not matter,
because the simplifier propagates information from binding site to
use. But, unusually, when compiling instance decls we *copy* the
INLINE pragma from the default method to the method for that
particular operation (see Note [INLINE and default methods] below).
So right here in tcInstDecls2 we must re-extend the type envt with
the default method Ids replete with their INLINE pragmas. Urk.
-}
tcInstDecl2 :: InstInfo Name -> TcM (LHsBinds Id)
-- Returns a binding for the dfun
tcInstDecl2 (InstInfo { iSpec = ispec, iBinds = ibinds })
= recoverM (return emptyLHsBinds) $
setSrcSpan loc $
addErrCtxt (instDeclCtxt2 (idType dfun_id)) $
do { -- Instantiate the instance decl with skolem constants
; (inst_tyvars, dfun_theta, inst_head) <- tcSkolDFunType (idType dfun_id)
-- We instantiate the dfun_id with superSkolems.
-- See Note [Subtle interaction of recursion and overlap]
-- and Note [Binding when looking up instances]
; let (clas, inst_tys) = tcSplitDFunHead inst_head
(class_tyvars, sc_theta, _, op_items) = classBigSig clas
sc_theta' = substTheta (zipOpenTvSubst class_tyvars inst_tys) sc_theta
; dfun_ev_vars <- newEvVars dfun_theta
; sc_ev_vars <- tcSuperClasses dfun_id inst_tyvars dfun_ev_vars sc_theta'
-- Deal with 'SPECIALISE instance' pragmas
-- See Note [SPECIALISE instance pragmas]
; spec_inst_info@(spec_inst_prags,_) <- tcSpecInstPrags dfun_id ibinds
-- Typecheck the methods
; (meth_ids, meth_binds)
<- tcInstanceMethods dfun_id clas inst_tyvars dfun_ev_vars
inst_tys spec_inst_info
op_items ibinds
-- Create the result bindings
; self_dict <- newDict clas inst_tys
; let class_tc = classTyCon clas
[dict_constr] = tyConDataCons class_tc
dict_bind = mkVarBind self_dict (L loc con_app_args)
-- We don't produce a binding for the dict_constr; instead we
-- rely on the simplifier to unfold this saturated application
-- We do this rather than generate an HsCon directly, because
-- it means that the special cases (e.g. dictionary with only one
-- member) are dealt with by the common MkId.mkDataConWrapId
-- code rather than needing to be repeated here.
-- con_app_tys = MkD ty1 ty2
-- con_app_scs = MkD ty1 ty2 sc1 sc2
-- con_app_args = MkD ty1 ty2 sc1 sc2 op1 op2
con_app_tys = wrapId (mkWpTyApps inst_tys)
(dataConWrapId dict_constr)
con_app_scs = mkHsWrap (mkWpEvApps (map EvId sc_ev_vars)) con_app_tys
con_app_args = foldl app_to_meth con_app_scs meth_ids
app_to_meth :: HsExpr Id -> Id -> HsExpr Id
app_to_meth fun meth_id = L loc fun `HsApp` L loc (wrapId arg_wrapper meth_id)
inst_tv_tys = mkTyVarTys inst_tyvars
arg_wrapper = mkWpEvVarApps dfun_ev_vars <.> mkWpTyApps inst_tv_tys
-- Do not inline the dfun; instead give it a magic DFunFunfolding
dfun_spec_prags
| isNewTyCon class_tc = SpecPrags []
-- Newtype dfuns just inline unconditionally,
-- so don't attempt to specialise them
| otherwise
= SpecPrags spec_inst_prags
export = ABE { abe_wrap = idHsWrapper, abe_poly = dfun_id
, abe_mono = self_dict, abe_prags = dfun_spec_prags }
-- NB: see Note [SPECIALISE instance pragmas]
main_bind = AbsBinds { abs_tvs = inst_tyvars
, abs_ev_vars = dfun_ev_vars
, abs_exports = [export]
, abs_ev_binds = emptyTcEvBinds
, abs_binds = unitBag dict_bind }
; return (unitBag (L loc main_bind) `unionBags`
listToBag meth_binds)
}
where
dfun_id = instanceDFunId ispec
loc = getSrcSpan dfun_id
------------------------------
tcSuperClasses :: DFunId -> [TcTyVar] -> [EvVar] -> TcThetaType
-> TcM [EvVar]
-- See Note [Silent superclass arguments]
tcSuperClasses dfun_id inst_tyvars dfun_ev_vars sc_theta
| null inst_tyvars && null dfun_ev_vars
= emitWanteds ScOrigin sc_theta
| otherwise
= do { -- Check that all superclasses can be deduced from
-- the originally-specified dfun arguments
; _ <- checkConstraints InstSkol inst_tyvars orig_ev_vars $
emitWanteds ScOrigin sc_theta
; return (map (find dfun_ev_vars) sc_theta) }
where
n_silent = dfunNSilent dfun_id
orig_ev_vars = drop n_silent dfun_ev_vars
find [] pred
= pprPanic "tcInstDecl2" (ppr dfun_id $$ ppr (idType dfun_id) $$ ppr pred)
find (ev:evs) pred
| pred `eqPred` evVarPred ev = ev
| otherwise = find evs pred
----------------------
mkMethIds :: HsSigFun -> Class -> [TcTyVar] -> [EvVar]
-> [TcType] -> Id -> TcM (TcId, TcSigInfo, HsWrapper)
mkMethIds sig_fn clas tyvars dfun_ev_vars inst_tys sel_id
= do { poly_meth_name <- newName (mkClassOpAuxOcc sel_occ)
; local_meth_name <- newName sel_occ
-- Base the local_meth_name on the selector name, because
-- type errors from tcInstanceMethodBody come from here
; let poly_meth_id = mkLocalId poly_meth_name poly_meth_ty
local_meth_id = mkLocalId local_meth_name local_meth_ty
; case lookupHsSig sig_fn sel_name of
Just lhs_ty -- There is a signature in the instance declaration
-- See Note [Instance method signatures]
-> setSrcSpan (getLoc lhs_ty) $
do { inst_sigs <- xoptM Opt_InstanceSigs
; checkTc inst_sigs (misplacedInstSig sel_name lhs_ty)
; sig_ty <- tcHsSigType (FunSigCtxt sel_name) lhs_ty
; let poly_sig_ty = mkSigmaTy tyvars theta sig_ty
; tc_sig <- instTcTySig lhs_ty sig_ty Nothing [] local_meth_name
; hs_wrap <- addErrCtxtM (methSigCtxt sel_name poly_sig_ty poly_meth_ty) $
tcSubType (FunSigCtxt sel_name) poly_sig_ty poly_meth_ty
; return (poly_meth_id, tc_sig, hs_wrap) }
Nothing -- No type signature
-> do { tc_sig <- instTcTySigFromId local_meth_id
; return (poly_meth_id, tc_sig, idHsWrapper) } }
-- Absent a type sig, there are no new scoped type variables here
-- Only the ones from the instance decl itself, which are already
-- in scope. Example:
-- class C a where { op :: forall b. Eq b => ... }
-- instance C [c] where { op = <rhs> }
-- In <rhs>, 'c' is scope but 'b' is not!
where
sel_name = idName sel_id
sel_occ = nameOccName sel_name
local_meth_ty = instantiateMethod clas sel_id inst_tys
poly_meth_ty = mkSigmaTy tyvars theta local_meth_ty
theta = map idType dfun_ev_vars
methSigCtxt :: Name -> TcType -> TcType -> TidyEnv -> TcM (TidyEnv, MsgDoc)
methSigCtxt sel_name sig_ty meth_ty env0
= do { (env1, sig_ty) <- zonkTidyTcType env0 sig_ty
; (env2, meth_ty) <- zonkTidyTcType env1 meth_ty
; let msg = hang (ptext (sLit "When checking that instance signature for") <+> quotes (ppr sel_name))
2 (vcat [ ptext (sLit "is more general than its signature in the class")
, ptext (sLit "Instance sig:") <+> ppr sig_ty
, ptext (sLit " Class sig:") <+> ppr meth_ty ])
; return (env2, msg) }
misplacedInstSig :: Name -> LHsType Name -> SDoc
misplacedInstSig name hs_ty
= vcat [ hang (ptext (sLit "Illegal type signature in instance declaration:"))
2 (hang (pprPrefixName name)
2 (dcolon <+> ppr hs_ty))
, ptext (sLit "(Use InstanceSigs to allow this)") ]
------------------------------
tcSpecInstPrags :: DFunId -> InstBindings Name
-> TcM ([Located TcSpecPrag], PragFun)
tcSpecInstPrags dfun_id (InstBindings { ib_binds = binds, ib_pragmas = uprags })
= do { spec_inst_prags <- mapM (wrapLocM (tcSpecInst dfun_id)) $
filter isSpecInstLSig uprags
-- The filter removes the pragmas for methods
; return (spec_inst_prags, mkPragFun uprags binds) }
{-
Note [Instance method signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With -XInstanceSigs we allow the user to supply a signature for the
method in an instance declaration. Here is an artificial example:
data Age = MkAge Int
instance Ord Age where
compare :: a -> a -> Bool
compare = error "You can't compare Ages"
We achieve this by building a TcSigInfo for the method, whether or not
there is an instance method signature, and using that to typecheck
the declaration (in tcInstanceMethodBody). That means, conveniently,
that the type variables bound in the signature will scope over the body.
What about the check that the instance method signature is more
polymorphic than the instantiated class method type? We just do a
tcSubType call in mkMethIds, and use the HsWrapper thus generated in
the method AbsBind. It's very like the tcSubType impedence-matching
call in mkExport. We have to pass the HsWrapper into
tcInstanceMethodBody.
Note [Silent superclass arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See Trac #3731, #4809, #5751, #5913, #6117, which all
describe somewhat more complicated situations, but ones
encountered in practice.
THE PROBLEM
The problem is that it is all too easy to create a class whose
superclass is bottom when it should not be.
Consider the following (extreme) situation:
class C a => D a where ...
instance D [a] => D [a] where ... (dfunD)
instance C [a] => C [a] where ... (dfunC)
Although this looks wrong (assume D [a] to prove D [a]), it is only a
more extreme case of what happens with recursive dictionaries, and it
can, just about, make sense because the methods do some work before
recursing.
To implement the dfunD we must generate code for the superclass C [a],
which we had better not get by superclass selection from the supplied
argument:
dfunD :: forall a. D [a] -> D [a]
dfunD = \d::D [a] -> MkD (scsel d) ..
Otherwise if we later encounter a situation where
we have a [Wanted] dw::D [a] we might solve it thus:
dw := dfunD dw
Which is all fine except that now ** the superclass C is bottom **!
The instance we want is:
dfunD :: forall a. D [a] -> D [a]
dfunD = \d::D [a] -> MkD (dfunC (scsel d)) ...
THE SOLUTION
Our solution to this problem "silent superclass arguments". We pass
to each dfun some ``silent superclass arguments’’, which are the
immediate superclasses of the dictionary we are trying to
construct. In our example:
dfun :: forall a. C [a] -> D [a] -> D [a]
dfun = \(dc::C [a]) (dd::D [a]) -> DOrd dc ...
Notice the extra (dc :: C [a]) argument compared to the previous version.
This gives us:
-----------------------------------------------------------
DFun Superclass Invariant
~~~~~~~~~~~~~~~~~~~~~~~~
In the body of a DFun, every superclass argument to the
returned dictionary is
either * one of the arguments of the DFun,
or * constant, bound at top level
-----------------------------------------------------------
This net effect is that it is safe to treat a dfun application as
wrapping a dictionary constructor around its arguments (in particular,
a dfun never picks superclasses from the arguments under the
dictionary constructor). No superclass is hidden inside a dfun
application.
The extra arguments required to satisfy the DFun Superclass Invariant
always come first, and are called the "silent" arguments. You can
find out how many silent arguments there are using Id.dfunNSilent;
and then you can just drop that number of arguments to see the ones
that were in the original instance declaration.
DFun types are built (only) by MkId.mkDictFunId, so that is where we
decide what silent arguments are to be added.
In our example, if we had [Wanted] dw :: D [a] we would get via the instance:
dw := dfun d1 d2
[Wanted] (d1 :: C [a])
[Wanted] (d2 :: D [a])
And now, though we *can* solve:
d2 := dw
That's fine; and we solve d1:C[a] separately.
Test case SCLoop tests this fix.
Note [SPECIALISE instance pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
instance (Ix a, Ix b) => Ix (a,b) where
{-# SPECIALISE instance Ix (Int,Int) #-}
range (x,y) = ...
We make a specialised version of the dictionary function, AND
specialised versions of each *method*. Thus we should generate
something like this:
$dfIxPair :: (Ix a, Ix b) => Ix (a,b)
{-# DFUN [$crangePair, ...] #-}
{-# SPECIALISE $dfIxPair :: Ix (Int,Int) #-}
$dfIxPair da db = Ix ($crangePair da db) (...other methods...)
$crange :: (Ix a, Ix b) -> ((a,b),(a,b)) -> [(a,b)]
{-# SPECIALISE $crange :: ((Int,Int),(Int,Int)) -> [(Int,Int)] #-}
$crange da db = <blah>
The SPECIALISE pragmas are acted upon by the desugarer, which generate
dii :: Ix Int
dii = ...
$s$dfIxPair :: Ix ((Int,Int),(Int,Int))
{-# DFUN [$crangePair di di, ...] #-}
$s$dfIxPair = Ix ($crangePair di di) (...)
{-# RULE forall (d1,d2:Ix Int). $dfIxPair Int Int d1 d2 = $s$dfIxPair #-}
$s$crangePair :: ((Int,Int),(Int,Int)) -> [(Int,Int)]
$c$crangePair = ...specialised RHS of $crangePair...
{-# RULE forall (d1,d2:Ix Int). $crangePair Int Int d1 d2 = $s$crangePair #-}
Note that
* The specialised dictionary $s$dfIxPair is very much needed, in case we
call a function that takes a dictionary, but in a context where the
specialised dictionary can be used. See Trac #7797.
* The ClassOp rule for 'range' works equally well on $s$dfIxPair, because
it still has a DFunUnfolding. See Note [ClassOp/DFun selection]
* A call (range ($dfIxPair Int Int d1 d2)) might simplify two ways:
--> {ClassOp rule for range} $crangePair Int Int d1 d2
--> {SPEC rule for $crangePair} $s$crangePair
or thus:
--> {SPEC rule for $dfIxPair} range $s$dfIxPair
--> {ClassOpRule for range} $s$crangePair
It doesn't matter which way.
* We want to specialise the RHS of both $dfIxPair and $crangePair,
but the SAME HsWrapper will do for both! We can call tcSpecPrag
just once, and pass the result (in spec_inst_info) to tcInstanceMethods.
-}
tcSpecInst :: Id -> Sig Name -> TcM TcSpecPrag
tcSpecInst dfun_id prag@(SpecInstSig _src hs_ty)
= addErrCtxt (spec_ctxt prag) $
do { (tyvars, theta, clas, tys) <- tcHsInstHead SpecInstCtxt hs_ty
; let (_, spec_dfun_ty) = mkDictFunTy tyvars theta clas tys
; co_fn <- tcSubType SpecInstCtxt (idType dfun_id) spec_dfun_ty
; return (SpecPrag dfun_id co_fn defaultInlinePragma) }
where
spec_ctxt prag = hang (ptext (sLit "In the SPECIALISE pragma")) 2 (ppr prag)
tcSpecInst _ _ = panic "tcSpecInst"
{-
************************************************************************
* *
Type-checking an instance method
* *
************************************************************************
tcInstanceMethod
- Make the method bindings, as a [(NonRec, HsBinds)], one per method
- Remembering to use fresh Name (the instance method Name) as the binder
- Bring the instance method Ids into scope, for the benefit of tcInstSig
- Use sig_fn mapping instance method Name -> instance tyvars
- Ditto prag_fn
- Use tcValBinds to do the checking
-}
tcInstanceMethods :: DFunId -> Class -> [TcTyVar]
-> [EvVar]
-> [TcType]
-> ([Located TcSpecPrag], PragFun)
-> [(Id, DefMeth)]
-> InstBindings Name
-> TcM ([Id], [LHsBind Id])
-- The returned inst_meth_ids all have types starting
-- forall tvs. theta => ...
tcInstanceMethods dfun_id clas tyvars dfun_ev_vars inst_tys
(spec_inst_prags, prag_fn)
op_items (InstBindings { ib_binds = binds
, ib_tyvars = lexical_tvs
, ib_pragmas = sigs
, ib_extensions = exts
, ib_derived = is_derived })
= tcExtendTyVarEnv2 (lexical_tvs `zip` tyvars) $
-- The lexical_tvs scope over the 'where' part
do { traceTc "tcInstMeth" (ppr sigs $$ ppr binds)
; let hs_sig_fn = mkHsSigFun sigs
; checkMinimalDefinition
; set_exts exts $ mapAndUnzipM (tc_item hs_sig_fn) op_items }
where
set_exts :: [ExtensionFlag] -> TcM a -> TcM a
set_exts es thing = foldr setXOptM thing es
----------------------
tc_item :: HsSigFun -> (Id, DefMeth) -> TcM (Id, LHsBind Id)
tc_item sig_fn (sel_id, dm_info)
= case findMethodBind (idName sel_id) binds of
Just (user_bind, bndr_loc)
-> tc_body sig_fn sel_id user_bind bndr_loc
Nothing -> do { traceTc "tc_def" (ppr sel_id)
; tc_default sig_fn sel_id dm_info }
----------------------
tc_body :: HsSigFun -> Id -> LHsBind Name
-> SrcSpan -> TcM (TcId, LHsBind Id)
tc_body sig_fn sel_id rn_bind bndr_loc
= add_meth_ctxt sel_id rn_bind $
do { traceTc "tc_item" (ppr sel_id <+> ppr (idType sel_id))
; (meth_id, local_meth_sig, hs_wrap)
<- setSrcSpan bndr_loc $
mkMethIds sig_fn clas tyvars dfun_ev_vars
inst_tys sel_id
; let prags = prag_fn (idName sel_id)
; meth_id1 <- addInlinePrags meth_id prags
; spec_prags <- tcSpecPrags meth_id1 prags
; bind <- tcInstanceMethodBody InstSkol
tyvars dfun_ev_vars
meth_id1 local_meth_sig hs_wrap
(mk_meth_spec_prags meth_id1 spec_prags)
rn_bind
; return (meth_id1, bind) }
----------------------
tc_default :: HsSigFun -> Id -> DefMeth -> TcM (TcId, LHsBind Id)
tc_default sig_fn sel_id (GenDefMeth dm_name)
= do { meth_bind <- mkGenericDefMethBind clas inst_tys sel_id dm_name
; tc_body sig_fn sel_id meth_bind inst_loc }
tc_default sig_fn sel_id NoDefMeth -- No default method at all
= do { traceTc "tc_def: warn" (ppr sel_id)
; (meth_id, _, _) <- mkMethIds sig_fn clas tyvars dfun_ev_vars
inst_tys sel_id
; dflags <- getDynFlags
; return (meth_id,
mkVarBind meth_id $
mkLHsWrap lam_wrapper (error_rhs dflags)) }
where
error_rhs dflags = L inst_loc $ HsApp error_fun (error_msg dflags)
error_fun = L inst_loc $ wrapId (WpTyApp meth_tau) nO_METHOD_BINDING_ERROR_ID
error_msg dflags = L inst_loc (HsLit (HsStringPrim ""
(unsafeMkByteString (error_string dflags))))
meth_tau = funResultTy (applyTys (idType sel_id) inst_tys)
error_string dflags = showSDoc dflags (hcat [ppr inst_loc, text "|", ppr sel_id ])
lam_wrapper = mkWpTyLams tyvars <.> mkWpLams dfun_ev_vars
tc_default sig_fn sel_id (DefMeth dm_name) -- A polymorphic default method
= do { -- Build the typechecked version directly,
-- without calling typecheck_method;
-- see Note [Default methods in instances]
-- Generate /\as.\ds. let self = df as ds
-- in $dm inst_tys self
-- The 'let' is necessary only because HsSyn doesn't allow
-- you to apply a function to a dictionary *expression*.
; self_dict <- newDict clas inst_tys
; let self_ev_bind = EvBind self_dict
(EvDFunApp dfun_id (mkTyVarTys tyvars) (map EvId dfun_ev_vars))
; (meth_id, local_meth_sig, hs_wrap)
<- mkMethIds sig_fn clas tyvars dfun_ev_vars inst_tys sel_id
; dm_id <- tcLookupId dm_name
; let dm_inline_prag = idInlinePragma dm_id
rhs = HsWrap (mkWpEvVarApps [self_dict] <.> mkWpTyApps inst_tys) $
HsVar dm_id
local_meth_id = sig_id local_meth_sig
meth_bind = mkVarBind local_meth_id (L inst_loc rhs)
meth_id1 = meth_id `setInlinePragma` dm_inline_prag
-- Copy the inline pragma (if any) from the default
-- method to this version. Note [INLINE and default methods]
export = ABE { abe_wrap = hs_wrap, abe_poly = meth_id1
, abe_mono = local_meth_id
, abe_prags = mk_meth_spec_prags meth_id1 [] }
bind = AbsBinds { abs_tvs = tyvars, abs_ev_vars = dfun_ev_vars
, abs_exports = [export]
, abs_ev_binds = EvBinds (unitBag self_ev_bind)
, abs_binds = unitBag meth_bind }
-- Default methods in an instance declaration can't have their own
-- INLINE or SPECIALISE pragmas. It'd be possible to allow them, but
-- currently they are rejected with
-- "INLINE pragma lacks an accompanying binding"
; return (meth_id1, L inst_loc bind) }
----------------------
mk_meth_spec_prags :: Id -> [LTcSpecPrag] -> TcSpecPrags
-- Adapt the 'SPECIALISE instance' pragmas to work for this method Id
-- There are two sources:
-- * spec_prags_for_me: {-# SPECIALISE op :: <blah> #-}
-- * spec_prags_from_inst: derived from {-# SPECIALISE instance :: <blah> #-}
-- These ones have the dfun inside, but [perhaps surprisingly]
-- the correct wrapper.
mk_meth_spec_prags meth_id spec_prags_for_me
= SpecPrags (spec_prags_for_me ++ spec_prags_from_inst)
where
spec_prags_from_inst
| isInlinePragma (idInlinePragma meth_id)
= [] -- Do not inherit SPECIALISE from the instance if the
-- method is marked INLINE, because then it'll be inlined
-- and the specialisation would do nothing. (Indeed it'll provoke
-- a warning from the desugarer
| otherwise
= [ L inst_loc (SpecPrag meth_id wrap inl)
| L inst_loc (SpecPrag _ wrap inl) <- spec_inst_prags]
inst_loc = getSrcSpan dfun_id
-- For instance decls that come from deriving clauses
-- we want to print out the full source code if there's an error
-- because otherwise the user won't see the code at all
add_meth_ctxt sel_id rn_bind thing
| is_derived = addLandmarkErrCtxt (derivBindCtxt sel_id clas inst_tys rn_bind) thing
| otherwise = thing
----------------------
-- check if one of the minimal complete definitions is satisfied
checkMinimalDefinition
= whenIsJust (isUnsatisfied methodExists (classMinimalDef clas)) $
warnUnsatisifiedMinimalDefinition
where
methodExists meth = isJust (findMethodBind meth binds)
mkGenericDefMethBind :: Class -> [Type] -> Id -> Name -> TcM (LHsBind Name)
mkGenericDefMethBind clas inst_tys sel_id dm_name
= -- A generic default method
-- If the method is defined generically, we only have to call the
-- dm_name.
do { dflags <- getDynFlags
; liftIO (dumpIfSet_dyn dflags Opt_D_dump_deriv "Filling in method body"
(vcat [ppr clas <+> ppr inst_tys,
nest 2 (ppr sel_id <+> equals <+> ppr rhs)]))
; return (noLoc $ mkTopFunBind Generated (noLoc (idName sel_id))
[mkSimpleMatch [] rhs]) }
where
rhs = nlHsVar dm_name
----------------------
wrapId :: HsWrapper -> id -> HsExpr id
wrapId wrapper id = mkHsWrap wrapper (HsVar id)
derivBindCtxt :: Id -> Class -> [Type ] -> LHsBind Name -> SDoc
derivBindCtxt sel_id clas tys _bind
= vcat [ ptext (sLit "When typechecking the code for ") <+> quotes (ppr sel_id)
, nest 2 (ptext (sLit "in a derived instance for")
<+> quotes (pprClassPred clas tys) <> colon)
, nest 2 $ ptext (sLit "To see the code I am typechecking, use -ddump-deriv") ]
warnMissingMethodOrAT :: String -> Name -> TcM ()
warnMissingMethodOrAT what name
= do { warn <- woptM Opt_WarnMissingMethods
; traceTc "warn" (ppr name <+> ppr warn <+> ppr (not (startsWithUnderscore (getOccName name))))
; warnTc (warn -- Warn only if -fwarn-missing-methods
&& not (startsWithUnderscore (getOccName name)))
-- Don't warn about _foo methods
(ptext (sLit "No explicit") <+> text what <+> ptext (sLit "or default declaration for")
<+> quotes (ppr name)) }
warnUnsatisifiedMinimalDefinition :: ClassMinimalDef -> TcM ()
warnUnsatisifiedMinimalDefinition mindef
= do { warn <- woptM Opt_WarnMissingMethods
; warnTc warn message
}
where
message = vcat [ptext (sLit "No explicit implementation for")
,nest 2 $ pprBooleanFormulaNice mindef
]
{-
Note [Export helper functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We arrange to export the "helper functions" of an instance declaration,
so that they are not subject to preInlineUnconditionally, even if their
RHS is trivial. Reason: they are mentioned in the DFunUnfolding of
the dict fun as Ids, not as CoreExprs, so we can't substitute a
non-variable for them.
We could change this by making DFunUnfoldings have CoreExprs, but it
seems a bit simpler this way.
Note [Default methods in instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
class Baz v x where
foo :: x -> x
foo y = <blah>
instance Baz Int Int
From the class decl we get
$dmfoo :: forall v x. Baz v x => x -> x
$dmfoo y = <blah>
Notice that the type is ambiguous. That's fine, though. The instance
decl generates
$dBazIntInt = MkBaz fooIntInt
fooIntInt = $dmfoo Int Int $dBazIntInt
BUT this does mean we must generate the dictionary translation of
fooIntInt directly, rather than generating source-code and
type-checking it. That was the bug in Trac #1061. In any case it's
less work to generate the translated version!
Note [INLINE and default methods]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Default methods need special case. They are supposed to behave rather like
macros. For exmample
class Foo a where
op1, op2 :: Bool -> a -> a
{-# INLINE op1 #-}
op1 b x = op2 (not b) x
instance Foo Int where
-- op1 via default method
op2 b x = <blah>
The instance declaration should behave
just as if 'op1' had been defined with the
code, and INLINE pragma, from its original
definition.
That is, just as if you'd written
instance Foo Int where
op2 b x = <blah>
{-# INLINE op1 #-}
op1 b x = op2 (not b) x
So for the above example we generate:
{-# INLINE $dmop1 #-}
-- $dmop1 has an InlineCompulsory unfolding
$dmop1 d b x = op2 d (not b) x
$fFooInt = MkD $cop1 $cop2
{-# INLINE $cop1 #-}
$cop1 = $dmop1 $fFooInt
$cop2 = <blah>
Note carefully:
* We *copy* any INLINE pragma from the default method $dmop1 to the
instance $cop1. Otherwise we'll just inline the former in the
latter and stop, which isn't what the user expected
* Regardless of its pragma, we give the default method an
unfolding with an InlineCompulsory source. That means
that it'll be inlined at every use site, notably in
each instance declaration, such as $cop1. This inlining
must happen even though
a) $dmop1 is not saturated in $cop1
b) $cop1 itself has an INLINE pragma
It's vital that $dmop1 *is* inlined in this way, to allow the mutual
recursion between $fooInt and $cop1 to be broken
* To communicate the need for an InlineCompulsory to the desugarer
(which makes the Unfoldings), we use the IsDefaultMethod constructor
in TcSpecPrags.
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
instDeclCtxt1 :: LHsType Name -> SDoc
instDeclCtxt1 hs_inst_ty
= inst_decl_ctxt (case unLoc hs_inst_ty of
HsForAllTy _ _ _ _ (L _ ty') -> ppr ty'
_ -> ppr hs_inst_ty) -- Don't expect this
instDeclCtxt2 :: Type -> SDoc
instDeclCtxt2 dfun_ty
= inst_decl_ctxt (ppr (mkClassPred cls tys))
where
(_,_,cls,tys) = tcSplitDFunTy dfun_ty
inst_decl_ctxt :: SDoc -> SDoc
inst_decl_ctxt doc = hang (ptext (sLit "In the instance declaration for"))
2 (quotes doc)
badBootFamInstDeclErr :: SDoc
badBootFamInstDeclErr
= ptext (sLit "Illegal family instance in hs-boot file")
notFamily :: TyCon -> SDoc
notFamily tycon
= vcat [ ptext (sLit "Illegal family instance for") <+> quotes (ppr tycon)
, nest 2 $ parens (ppr tycon <+> ptext (sLit "is not an indexed type family"))]
tooFewParmsErr :: Arity -> SDoc
tooFewParmsErr arity
= ptext (sLit "Family instance has too few parameters; expected") <+>
ppr arity
assocInClassErr :: Located Name -> SDoc
assocInClassErr name
= ptext (sLit "Associated type") <+> quotes (ppr name) <+>
ptext (sLit "must be inside a class instance")
badFamInstDecl :: Located Name -> SDoc
badFamInstDecl tc_name
= vcat [ ptext (sLit "Illegal family instance for") <+>
quotes (ppr tc_name)
, nest 2 (parens $ ptext (sLit "Use TypeFamilies to allow indexed type families")) ]
notOpenFamily :: TyCon -> SDoc
notOpenFamily tc
= ptext (sLit "Illegal instance for closed family") <+> quotes (ppr tc)
| pparkkin/eta | compiler/ETA/TypeCheck/TcInstDcls.hs | bsd-3-clause | 64,101 | 6 | 26 | 18,840 | 8,168 | 4,246 | 3,922 | 578 | 7 |
-- | This module defines 'TriggerEvent', which describes actions that may create
-- new 'Event's that can be triggered from 'IO'.
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
module Reflex.TriggerEvent.Class
( TriggerEvent (..)
) where
import Reflex.Class
import Control.Monad.Reader
import Control.Monad.State
import qualified Control.Monad.State.Strict as Strict
import Control.Monad.Trans.Maybe (MaybeT)
--TODO: Shouldn't have IO hard-coded
-- | 'TriggerEvent' represents actions that can create 'Event's that can be
-- triggered by 'IO' actions.
class Monad m => TriggerEvent t m | m -> t where
-- | Create a triggerable 'Event'. Whenever the resulting function is called,
-- the resulting 'Event' will fire at some point in the future. Note that
-- this may not be synchronous.
newTriggerEvent :: m (Event t a, a -> IO ())
-- | Like 'newTriggerEvent', but the callback itself takes another callback,
-- to be invoked once the requested 'Event' occurrence has finished firing.
-- This allows synchronous operation.
newTriggerEventWithOnComplete :: m (Event t a, a -> IO () -> IO ()) --TODO: This and newTriggerEvent should be unified somehow
-- | Like 'newTriggerEventWithOnComplete', but with setup and teardown. This
-- relatively complex type signature allows any external listeners to be
-- subscribed lazily and then removed whenever the returned 'Event' is no
-- longer being listened to. Note that the setup/teardown may happen multiple
-- times, and there is no guarantee that the teardown will be executed
-- promptly, or even at all, in the case of program termination.
newEventWithLazyTriggerWithOnComplete :: ((a -> IO () -> IO ()) -> IO (IO ())) -> m (Event t a)
instance TriggerEvent t m => TriggerEvent t (ReaderT r m) where
newTriggerEvent = lift newTriggerEvent
newTriggerEventWithOnComplete = lift newTriggerEventWithOnComplete
newEventWithLazyTriggerWithOnComplete = lift . newEventWithLazyTriggerWithOnComplete
instance TriggerEvent t m => TriggerEvent t (StateT s m) where
newTriggerEvent = lift newTriggerEvent
newTriggerEventWithOnComplete = lift newTriggerEventWithOnComplete
newEventWithLazyTriggerWithOnComplete = lift . newEventWithLazyTriggerWithOnComplete
instance TriggerEvent t m => TriggerEvent t (Strict.StateT s m) where
newTriggerEvent = lift newTriggerEvent
newTriggerEventWithOnComplete = lift newTriggerEventWithOnComplete
newEventWithLazyTriggerWithOnComplete = lift . newEventWithLazyTriggerWithOnComplete
instance TriggerEvent t m => TriggerEvent t (MaybeT m) where
newTriggerEvent = lift newTriggerEvent
newTriggerEventWithOnComplete = lift newTriggerEventWithOnComplete
newEventWithLazyTriggerWithOnComplete = lift . newEventWithLazyTriggerWithOnComplete
| ryantrinkle/reflex | src/Reflex/TriggerEvent/Class.hs | bsd-3-clause | 2,839 | 0 | 14 | 431 | 432 | 236 | 196 | -1 | -1 |
module Dwarf.Types
( -- * Dwarf information
DwarfInfo(..)
, pprDwarfInfo
, pprAbbrevDecls
-- * Dwarf frame
, DwarfFrame(..), DwarfFrameProc(..), DwarfFrameBlock(..)
, pprDwarfFrame
-- * Utilities
, pprByte
, pprData4'
, pprDwWord
, pprWord
, pprLEBWord
, pprLEBInt
, wordAlign
, sectionOffset
)
where
import Debug
import CLabel
import CmmExpr ( GlobalReg(..) )
import Encoding
import FastString
import Outputable
import Platform
import Reg
import Dwarf.Constants
import Data.Bits
import Data.List ( mapAccumL )
import qualified Data.Map as Map
import Data.Word
import Data.Char
import CodeGen.Platform
-- | Individual dwarf records. Each one will be encoded as an entry in
-- the .debug_info section.
data DwarfInfo
= DwarfCompileUnit { dwChildren :: [DwarfInfo]
, dwName :: String
, dwProducer :: String
, dwCompDir :: String
, dwLineLabel :: LitString }
| DwarfSubprogram { dwChildren :: [DwarfInfo]
, dwName :: String
, dwLabel :: CLabel }
| DwarfBlock { dwChildren :: [DwarfInfo]
, dwLabel :: CLabel
, dwMarker :: CLabel }
-- | Abbreviation codes used for encoding above records in the
-- .debug_info section.
data DwarfAbbrev
= DwAbbrNull -- ^ Pseudo, used for marking the end of lists
| DwAbbrCompileUnit
| DwAbbrSubprogram
| DwAbbrBlock
deriving (Eq, Enum)
-- | Generate assembly for the given abbreviation code
pprAbbrev :: DwarfAbbrev -> SDoc
pprAbbrev = pprLEBWord . fromIntegral . fromEnum
-- | Abbreviation declaration. This explains the binary encoding we
-- use for representing @DwarfInfo@.
pprAbbrevDecls :: Bool -> SDoc
pprAbbrevDecls haveDebugLine =
let mkAbbrev abbr tag chld flds =
let fld (tag, form) = pprLEBWord tag $$ pprLEBWord form
in pprAbbrev abbr $$ pprLEBWord tag $$ pprByte chld $$
vcat (map fld flds) $$ pprByte 0 $$ pprByte 0
in dwarfAbbrevSection $$
ptext dwarfAbbrevLabel <> colon $$
mkAbbrev DwAbbrCompileUnit dW_TAG_compile_unit dW_CHILDREN_yes
([ (dW_AT_name, dW_FORM_string)
, (dW_AT_producer, dW_FORM_string)
, (dW_AT_language, dW_FORM_data4)
, (dW_AT_comp_dir, dW_FORM_string)
, (dW_AT_use_UTF8, dW_FORM_flag)
] ++
(if haveDebugLine
then [ (dW_AT_stmt_list, dW_FORM_data4) ]
else [])) $$
mkAbbrev DwAbbrSubprogram dW_TAG_subprogram dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_MIPS_linkage_name, dW_FORM_string)
, (dW_AT_external, dW_FORM_flag)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
, (dW_AT_frame_base, dW_FORM_block1)
] $$
mkAbbrev DwAbbrBlock dW_TAG_lexical_block dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
] $$
pprByte 0
-- | Generate assembly for DWARF data
pprDwarfInfo :: Bool -> DwarfInfo -> SDoc
pprDwarfInfo haveSrc d
= pprDwarfInfoOpen haveSrc d $$
vcat (map (pprDwarfInfo haveSrc) (dwChildren d)) $$
pprDwarfInfoClose
-- | Prints assembler data corresponding to DWARF info records. Note
-- that the binary format of this is paramterized in @abbrevDecls@ and
-- has to be kept in synch.
pprDwarfInfoOpen :: Bool -> DwarfInfo -> SDoc
pprDwarfInfoOpen haveSrc (DwarfCompileUnit _ name producer compDir lineLbl) =
pprAbbrev DwAbbrCompileUnit
$$ pprString name
$$ pprString producer
$$ pprData4 dW_LANG_Haskell
$$ pprString compDir
$$ pprFlag True -- use UTF8
$$ if haveSrc
then sectionOffset lineLbl dwarfLineLabel
else empty
pprDwarfInfoOpen _ (DwarfSubprogram _ name label) = sdocWithDynFlags $ \df ->
pprAbbrev DwAbbrSubprogram
$$ pprString name
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprFlag (externallyVisibleCLabel label)
$$ pprWord (ppr label)
$$ pprWord (ppr $ mkAsmTempEndLabel label)
$$ pprByte 1
$$ pprByte dW_OP_call_frame_cfa
pprDwarfInfoOpen _ (DwarfBlock _ label marker) = sdocWithDynFlags $ \df ->
pprAbbrev DwAbbrBlock
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprWord (ppr marker)
$$ pprWord (ppr $ mkAsmTempEndLabel marker)
-- | Close a DWARF info record with children
pprDwarfInfoClose :: SDoc
pprDwarfInfoClose = pprAbbrev DwAbbrNull
-- | Information about unwind instructions for a procedure. This
-- corresponds to a "Common Information Entry" (CIE) in DWARF.
data DwarfFrame
= DwarfFrame
{ dwCieLabel :: CLabel
, dwCieInit :: UnwindTable
, dwCieProcs :: [DwarfFrameProc]
}
-- | Unwind instructions for an individual procedure. Corresponds to a
-- "Frame Description Entry" (FDE) in DWARF.
data DwarfFrameProc
= DwarfFrameProc
{ dwFdeProc :: CLabel
, dwFdeHasInfo :: Bool
, dwFdeBlocks :: [DwarfFrameBlock]
-- ^ List of blocks. Order must match asm!
}
-- | Unwind instructions for a block. Will become part of the
-- containing FDE.
data DwarfFrameBlock
= DwarfFrameBlock
{ dwFdeBlock :: CLabel
, dwFdeBlkHasInfo :: Bool
, dwFdeUnwind :: UnwindTable
}
-- | Header for the .debug_frame section. Here we emit the "Common
-- Information Entry" record that etablishes general call frame
-- parameters and the default stack layout.
pprDwarfFrame :: DwarfFrame -> SDoc
pprDwarfFrame DwarfFrame{dwCieLabel=cieLabel,dwCieInit=cieInit,dwCieProcs=procs}
= sdocWithPlatform $ \plat ->
let cieStartLabel= mkAsmTempDerivedLabel cieLabel (fsLit "_start")
cieEndLabel = mkAsmTempEndLabel cieLabel
length = ppr cieEndLabel <> char '-' <> ppr cieStartLabel
spReg = dwarfGlobalRegNo plat Sp
retReg = dwarfReturnRegNo plat
wordSize = platformWordSize plat
pprInit (g, uw) = pprSetUnwind plat g (Nothing, uw)
in vcat [ ppr cieLabel <> colon
, pprData4' length -- Length of CIE
, ppr cieStartLabel <> colon
, pprData4' (ptext (sLit "-1"))
-- Common Information Entry marker (-1 = 0xf..f)
, pprByte 3 -- CIE version (we require DWARF 3)
, pprByte 0 -- Augmentation (none)
, pprByte 1 -- Code offset multiplicator
, pprByte (128-fromIntegral wordSize)
-- Data offset multiplicator
-- (stacks grow down => "-w" in signed LEB128)
, pprByte retReg -- virtual register holding return address
] $$
-- Initial unwind table
vcat (map pprInit $ Map.toList cieInit) $$
vcat [ -- RET = *CFA
pprByte (dW_CFA_offset+retReg)
, pprByte 0
-- Sp' = CFA
-- (we need to set this manually as our Sp register is
-- often not the architecture's default stack register)
, pprByte dW_CFA_val_offset
, pprLEBWord (fromIntegral spReg)
, pprLEBWord 0
] $$
wordAlign $$
ppr cieEndLabel <> colon $$
-- Procedure unwind tables
vcat (map (pprFrameProc cieLabel cieInit) procs)
-- | Writes a "Frame Description Entry" for a procedure. This consists
-- mainly of referencing the CIE and writing state machine
-- instructions to describe how the frame base (CFA) changes.
pprFrameProc :: CLabel -> UnwindTable -> DwarfFrameProc -> SDoc
pprFrameProc frameLbl initUw (DwarfFrameProc procLbl hasInfo blocks)
= let fdeLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde")
fdeEndLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde_end")
procEnd = mkAsmTempEndLabel procLbl
ifInfo str = if hasInfo then text str else empty
-- see [Note: Info Offset]
in vcat [ pprData4' (ppr fdeEndLabel <> char '-' <> ppr fdeLabel)
, ppr fdeLabel <> colon
, pprData4' (ppr frameLbl <> char '-' <>
ptext dwarfFrameLabel) -- Reference to CIE
, pprWord (ppr procLbl <> ifInfo "-1") -- Code pointer
, pprWord (ppr procEnd <> char '-' <>
ppr procLbl <> ifInfo "+1") -- Block byte length
] $$
vcat (snd $ mapAccumL pprFrameBlock initUw blocks) $$
wordAlign $$
ppr fdeEndLabel <> colon
-- | Generates unwind information for a block. We only generate
-- instructions where unwind information actually changes. This small
-- optimisations saves a lot of space, as subsequent blocks often have
-- the same unwind information.
pprFrameBlock :: UnwindTable -> DwarfFrameBlock -> (UnwindTable, SDoc)
pprFrameBlock oldUws (DwarfFrameBlock blockLbl hasInfo uws)
| uws == oldUws
= (oldUws, empty)
| otherwise
= (,) uws $ sdocWithPlatform $ \plat ->
let lbl = ppr blockLbl <> if hasInfo then text "-1" else empty
-- see [Note: Info Offset]
isChanged g v | old == Just v = Nothing
| otherwise = Just (old, v)
where old = Map.lookup g oldUws
changed = Map.toList $ Map.mapMaybeWithKey isChanged uws
died = Map.toList $ Map.difference oldUws uws
in pprByte dW_CFA_set_loc $$ pprWord lbl $$
vcat (map (uncurry $ pprSetUnwind plat) changed) $$
vcat (map (pprUndefUnwind plat . fst) died)
-- [Note: Info Offset]
--
-- GDB was pretty much written with C-like programs in mind, and as a
-- result they assume that once you have a return address, it is a
-- good idea to look at (PC-1) to unwind further - as that's where the
-- "call" instruction is supposed to be.
--
-- Now on one hand, code generated by GHC looks nothing like what GDB
-- expects, and in fact going up from a return pointer is guaranteed
-- to land us inside an info table! On the other hand, that actually
-- gives us some wiggle room, as we expect IP to never *actually* end
-- up inside the info table, so we can "cheat" by putting whatever GDB
-- expects to see there. This is probably pretty safe, as GDB cannot
-- assume (PC-1) to be a valid code pointer in the first place - and I
-- have seen no code trying to correct this.
--
-- Note that this will not prevent GDB from failing to look-up the
-- correct function name for the frame, as that uses the symbol table,
-- which we can not manipulate as easily.
-- | Get DWARF register ID for a given GlobalReg
dwarfGlobalRegNo :: Platform -> GlobalReg -> Word8
dwarfGlobalRegNo p = maybe 0 (dwarfRegNo p . RegReal) . globalRegMaybe p
-- | Generate code for setting the unwind information for a register,
-- optimized using its known old value in the table. Note that "Sp" is
-- special: We see it as synonym for the CFA.
pprSetUnwind :: Platform -> GlobalReg -> (Maybe UnwindExpr, UnwindExpr) -> SDoc
pprSetUnwind _ Sp (Just (UwReg s _), UwReg s' o') | s == s'
= if o' >= 0
then pprByte dW_CFA_def_cfa_offset $$ pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_offset_sf $$ pprLEBInt o'
pprSetUnwind plat Sp (_, UwReg s' o')
= if o' >= 0
then pprByte dW_CFA_def_cfa $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat s') $$
pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_sf $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat s') $$
pprLEBInt o'
pprSetUnwind _ Sp (_, uw)
= pprByte dW_CFA_def_cfa_expression $$ pprUnwindExpr False uw
pprSetUnwind plat g (_, UwDeref (UwReg Sp o))
| o < 0 && ((-o) `mod` platformWordSize plat) == 0 -- expected case
= pprByte (dW_CFA_offset + dwarfGlobalRegNo plat g) $$
pprLEBWord (fromIntegral ((-o) `div` platformWordSize plat))
| otherwise
= pprByte dW_CFA_offset_extended_sf $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprLEBInt o
pprSetUnwind plat g (_, UwDeref uw)
= pprByte dW_CFA_expression $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprUnwindExpr True uw
pprSetUnwind plat g (_, uw)
= pprByte dW_CFA_val_expression $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprUnwindExpr True uw
-- | Generates a DWARF expression for the given unwind expression. If
-- @spIsCFA@ is true, we see @Sp@ as the frame base CFA where it gets
-- mentioned.
pprUnwindExpr :: Bool -> UnwindExpr -> SDoc
pprUnwindExpr spIsCFA expr
= sdocWithPlatform $ \plat ->
let ppr (UwConst i)
| i >= 0 && i < 32 = pprByte (dW_OP_lit0 + fromIntegral i)
| otherwise = pprByte dW_OP_consts $$ pprLEBInt i -- lazy...
ppr (UwReg Sp i) | spIsCFA
= if i == 0
then pprByte dW_OP_call_frame_cfa
else ppr (UwPlus (UwReg Sp 0) (UwConst i))
ppr (UwReg g i) = pprByte (dW_OP_breg0+dwarfGlobalRegNo plat g) $$
pprLEBInt i
ppr (UwDeref u) = ppr u $$ pprByte dW_OP_deref
ppr (UwPlus u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_plus
ppr (UwMinus u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_minus
ppr (UwTimes u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_mul
in ptext (sLit "\t.byte 1f-.-1") $$
ppr expr $$
ptext (sLit "1:")
-- | Generate code for re-setting the unwind information for a
-- register to "undefined"
pprUndefUnwind :: Platform -> GlobalReg -> SDoc
pprUndefUnwind _ Sp = panic "pprUndefUnwind Sp" -- should never happen
pprUndefUnwind plat g = pprByte dW_CFA_undefined $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat g)
-- | Align assembly at (machine) word boundary
wordAlign :: SDoc
wordAlign = sdocWithPlatform $ \plat ->
ptext (sLit "\t.align ") <> case platformOS plat of
OSDarwin -> case platformWordSize plat of
8 -> text "3"
4 -> text "2"
_other -> error "wordAlign: Unsupported word size!"
_other -> ppr (platformWordSize plat)
-- | Assembly for a single byte of constant DWARF data
pprByte :: Word8 -> SDoc
pprByte x = ptext (sLit "\t.byte ") <> ppr (fromIntegral x :: Word)
-- | Assembly for a constant DWARF flag
pprFlag :: Bool -> SDoc
pprFlag f = pprByte (if f then 0xff else 0x00)
-- | Assembly for 4 bytes of dynamic DWARF data
pprData4' :: SDoc -> SDoc
pprData4' x = ptext (sLit "\t.long ") <> x
-- | Assembly for 4 bytes of constant DWARF data
pprData4 :: Word -> SDoc
pprData4 = pprData4' . ppr
-- | Assembly for a DWARF word of dynamic data. This means 32 bit, as
-- we are generating 32 bit DWARF.
pprDwWord :: SDoc -> SDoc
pprDwWord = pprData4'
-- | Assembly for a machine word of dynamic data. Depends on the
-- architecture we are currently generating code for.
pprWord :: SDoc -> SDoc
pprWord s = (<> s) . sdocWithPlatform $ \plat ->
case platformWordSize plat of
4 -> ptext (sLit "\t.long ")
8 -> ptext (sLit "\t.quad ")
n -> panic $ "pprWord: Unsupported target platform word length " ++
show n ++ "!"
-- | Prints a number in "little endian base 128" format. The idea is
-- to optimize for small numbers by stopping once all further bytes
-- would be 0. The highest bit in every byte signals whether there
-- are further bytes to read.
pprLEBWord :: Word -> SDoc
pprLEBWord x | x < 128 = pprByte (fromIntegral x)
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBWord (x `shiftR` 7)
-- | Same as @pprLEBWord@, but for a signed number
pprLEBInt :: Int -> SDoc
pprLEBInt x | x >= -64 && x < 64
= pprByte (fromIntegral (x .&. 127))
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBInt (x `shiftR` 7)
-- | Generates a dynamic null-terminated string. If required the
-- caller needs to make sure that the string is escaped properly.
pprString' :: SDoc -> SDoc
pprString' str = ptext (sLit "\t.asciz \"") <> str <> char '"'
-- | Generate a string constant. We take care to escape the string.
pprString :: String -> SDoc
pprString str
= pprString' $ hcat $ map escapeChar $
if utf8EncodedLength str == length str
then str
else map (chr . fromIntegral) $ bytesFS $ mkFastString str
-- | Escape a single non-unicode character
escapeChar :: Char -> SDoc
escapeChar '\\' = ptext (sLit "\\\\")
escapeChar '\"' = ptext (sLit "\\\"")
escapeChar '\n' = ptext (sLit "\\n")
escapeChar c
| isAscii c && isPrint c && c /= '?' -- prevents trigraph warnings
= char c
| otherwise
= char '\\' <> char (intToDigit (ch `div` 64)) <>
char (intToDigit ((ch `div` 8) `mod` 8)) <>
char (intToDigit (ch `mod` 8))
where ch = ord c
-- | Generate an offset into another section. This is tricky because
-- this is handled differently depending on platform: Mac Os expects
-- us to calculate the offset using assembler arithmetic. Linux expects
-- us to just reference the target directly, and will figure out on
-- their own that we actually need an offset. Finally, Windows has
-- a special directive to refer to relative offsets. Fun.
sectionOffset :: LitString -> LitString -> SDoc
sectionOffset target section = sdocWithPlatform $ \plat ->
case platformOS plat of
OSDarwin -> pprDwWord (ptext target <> char '-' <> ptext section)
OSMinGW32 -> text "\t.secrel32 " <> ptext target
_other -> pprDwWord (ptext target)
| urbanslug/ghc | compiler/nativeGen/Dwarf/Types.hs | bsd-3-clause | 17,462 | 0 | 21 | 4,499 | 3,905 | 2,043 | 1,862 | 308 | 8 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.Quit
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Implements quit commands.
module Yi.Keymap.Vim.Ex.Commands.Quit (parse) where
import Control.Applicative (Alternative ((<|>)), (<$>))
import Control.Lens (use, uses)
import Control.Monad (void, when)
import Data.Foldable (find)
import qualified Data.List.PointedList.Circular as PL (length)
import Data.Monoid ((<>))
import qualified Data.Text as T (append)
import qualified Text.ParserCombinators.Parsec as P (char, choice, many, string, try)
import Yi.Buffer (bkey, file)
import Yi.Core (closeWindow, errorEditor, quitEditor)
import Yi.Editor
import Yi.File (deservesSave, fwriteAllY, viWrite)
import Yi.Keymap (Action (YiA), YiM, readEditor)
import Yi.Keymap.Vim.Common (EventString)
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common (impureExCommand, needsSaving, parse)
import Yi.Keymap.Vim.Ex.Types (ExCommand (cmdAction, cmdShow))
import Yi.Monad (gets)
import Yi.String (showT)
import Yi.Window (bufkey)
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ P.choice
[ do
void $ P.try ( P.string "xit") <|> P.string "x"
bangs <- P.many (P.char '!')
return (quit True (not $ null bangs) False)
, do
ws <- P.many (P.char 'w')
void $ P.try ( P.string "quit") <|> P.string "q"
as <- P.many (P.try ( P.string "all") <|> P.string "a")
bangs <- P.many (P.char '!')
return $! quit (not $ null ws) (not $ null bangs) (not $ null as)
]
quit :: Bool -> Bool -> Bool -> ExCommand
quit w f a = Common.impureExCommand {
cmdShow = (if w then "w" else "")
`T.append` "quit"
`T.append` (if a then "all" else "")
`T.append` (if f then "!" else "")
, cmdAction = YiA $ action w f a
}
action :: Bool -> Bool -> Bool -> YiM ()
action False False False = quitWindowE
action False False True = quitAllE
action True False False = viWrite >> closeWindow
action True False True = saveAndQuitAllE
action False True False = closeWindow
action False True True = quitEditor
action True True False = viWrite >> closeWindow
action True True True = saveAndQuitAllE
quitWindowE :: YiM ()
quitWindowE = do
nw <- gets currentBuffer >>= Common.needsSaving
ws <- withEditor $ use currentWindowA >>= windowsOnBufferE . bufkey
if length ws == 1 && nw
then errorEditor "No write since last change (add ! to override)"
else do
winCount <- withEditor $ uses windowsA PL.length
tabCount <- withEditor $ uses tabsA PL.length
if winCount == 1 && tabCount == 1
-- if its the last window, quitting will quit the editor
then quitAllE
else closeWindow
quitAllE :: YiM ()
quitAllE = do
let needsWindow b = (b,) <$> deservesSave b
bs <- readEditor bufferSet >>= mapM needsWindow
-- Vim only shows the first modified buffer in the error.
case find snd bs of
Nothing -> quitEditor
Just (b, _) -> do
bufferName <- withEditor $ withGivenBuffer (bkey b) $ gets file
errorEditor $ "No write since last change for buffer "
<> showT bufferName
<> " (add ! to override)"
saveAndQuitAllE :: YiM ()
saveAndQuitAllE = do
succeed <- fwriteAllY
when succeed quitEditor
| TOSPIO/yi | src/library/Yi/Keymap/Vim/Ex/Commands/Quit.hs | gpl-2.0 | 4,052 | 0 | 17 | 1,316 | 1,104 | 600 | 504 | 79 | 4 |
{-
Physical Engine Simulation
*Main> plotSpeed simuldata1
*Main> plotD
plotDispl plotDots
*Main> plotDispl simuldata1
*Main>
*Main> plotDispl simuldata2
*Main> plotSpeed simuldata2
-}
import Graphics.Gnuplot.Simple
type Time = Double
type Displ = Double
type Speed = Double
type Accel = Double
type State = (Time, Displ, Speed)
type AccelFunction = State -> Accel
s2time :: State -> Displ
s2time (t, x, v) = t
s2speed :: State -> Speed
s2speed (t, x, v) = v
s2displ :: State -> Displ
s2displ (t, x, v) = x
--stateStep :: Time -> (State -> Accel) -> State -> State
stateStep :: Time -> AccelFunction -> State -> State
stateStep dt a (t, x, v) = (t', x', v')
where
t' = t + dt
x' = x + v*dt
v' = v + (a (t, x, v))*dt
--state_step = stateStep 0.1 accel
motion :: Time -> AccelFunction -> (State -> Bool) -> State -> [State]
motion dt accelFunc constraintFunc start_state = states
where
state_generator = stateStep dt accelFunc
states = takeWhile constraintFunc $ iterate state_generator start_state
plotDispl :: [State] -> IO ()
plotDispl simuldata =
plotList [] ( zip (map s2time simuldata ) (map s2displ simuldata))
plotSpeed :: [State] -> IO ()
plotSpeed simuldata =
plotList [] ( zip (map s2time simuldata ) (map s2speed simuldata))
--states = takeWhile (\s -> s2displ s >= 0) $ iterate state_step (0, 0, 20)
until_time :: Time -> State -> Bool
until_time time state = s2time state <= time
-- Keeps the simulation running until the particle hits the ground
until_fall :: State -> Bool
until_fall state = s2displ state >= 0
{-
A rock is launched vertically from the ground x = 0 with the velocity of 20 m/s,
regard the acceleration of gravity as 9.81, simulate the motion and
generate the all data points until the rock is back to earch
-}
accel :: AccelFunction
accel (t, x, v) = -9.81 -- m/s2
simuldata1 = motion 0.1 accel until_fall (0.0, 0.0, 20.0)
{--
A pendulum with a 1 meter string is launched
from 90 from the resting position with zero velocity.
Regard the gravity acceleration as 9.81 m/s2
--}
pendulumAccel :: Double -> State -> Accel
pendulumAccel rod_length (t, θ, dθ) = -9.81/rod_length*sin(θ)
-- Pendulum Acceleration for 1 meter lenght string
accpend = pendulumAccel 1.0
simuldata2 = motion 0.01 accpend (until_time 3.0) (0, pi/2, 0)
angle_vector = map s2displ simuldata2
pendulum_xy = zip (map sin angle_vector) (map (\θ -> 1-cos(θ)) angle_vector)
{-----------------------------------------------------------------------
Spring Mass Damping
You can see that the original 2nd-order ODE becomes 1st-order, which is called state-space method. One more thing, let's define the initial conditions so that the last thing is prepared for the coding part:
x0=1m
x0˙=1m/s
And the following parameters:
Mass m=50kg
Spring stiffness k=5000N/m
Damping coefficient c=100Ns/m
External sinusoidal force: u(t)=500cos(5t)
The total time is 10s, and time step dt=0.001s
Now we have all resources prepared, then let's do some coding
http://nbviewer.ipython.org/github/Zhiweix/assignment-bank/blob/fb8a5e0aaec2af2fd263b38288738d88b1fa1091/Final%20projects/Final%20Project-Zhiwei%20Zhang/Spring-mass-damper%20system%20-%20Zhiwei%20Zhang.ipynb
-}
{-
k - Spring stiffness N/m
c - Damping coefficient
-}
accelSpring :: Double -> Double -> Double -> (State -> Double) -> State -> Accel
accelSpring m k c extForce (t, x, v) =
(extForce (t, x, v))/m - k/m*x - c/m*v
-- Spring Acceleration as function of external force function
springAc_force = accelSpring 50.0 5000.0 100.0
force1 :: State -> Double
force1 (t, x, v) = 500*cos(5*t)
force2 :: State -> Double
force2 (t, x, v) = 100*t^2
force3 :: State -> Double
force3 (t, x, v) = 0.000
-- Attach Spring Mass-Damping system to force1
--springAccel = springAc_force force1
simulateSpring force =
motion 0.001 (springAc_force force) (until_time 10.0) (0.0, 1.0, 1.0)
simuldata3 = simulateSpring force1
simuldata4 = simulateSpring force2
simuldata5 = simulateSpring force3
--plotSpeed = undefined
--plotDispl simulation
| Khady/Functional-Programming | codes/newton.hs | unlicense | 4,190 | 1 | 14 | 842 | 963 | 526 | 437 | 56 | 1 |
{-|
Module : Idris.Elab.Type
Description : Code to elaborate types.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.Elab.Type (
buildType, elabType, elabType'
, elabPostulate, elabExtern
) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.DSL
import Idris.Error
import Idris.Delaborate
import Idris.Imports
import Idris.Elab.Term
import Idris.Coverage
import Idris.DataOpts
import Idris.Providers
import Idris.Primitives
import Idris.Inliner
import Idris.PartialEval
import Idris.DeepSeq
import Idris.Output (iputStrLn, pshow, iWarn, sendHighlighting)
import IRTS.Lang
import Idris.Elab.Utils
import Idris.Elab.Value
import Idris.Core.TT
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.Execute
import Idris.Core.Typecheck
import Idris.Core.CaseTree
import Idris.Docstrings (Docstring)
import Prelude hiding (id, (.))
import Control.Category
import Control.Applicative hiding (Const)
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict as State
import Data.List
import Data.Maybe
import Debug.Trace
import qualified Data.Traversable as Traversable
import qualified Data.Map as Map
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Char(isLetter, toLower)
import Data.List.Split (splitOn)
import Util.Pretty(pretty, text)
buildType :: ElabInfo
-> SyntaxInfo
-> FC
-> FnOpts
-> Name
-> PTerm
-> Idris (Type, Type, PTerm, [(Int, Name)])
buildType info syn fc opts n ty' = do
ctxt <- getContext
i <- getIState
logElab 2 $ show n ++ " pre-type " ++ showTmImpls ty' ++ show (no_imp syn)
ty' <- addUsingConstraints syn fc ty'
ty' <- addUsingImpls syn n fc ty'
let ty = addImpl (imp_methods syn) i ty'
logElab 5 $ show n ++ " type pre-addimpl " ++ showTmImpls ty'
logElab 5 $ "with methods " ++ show (imp_methods syn)
logElab 2 $ show n ++ " type " ++ show (using syn) ++ "\n" ++ showTmImpls ty
((ElabResult tyT' defer is ctxt' newDecls highlights newGName, est), log) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) n (TType (UVal 0)) initEState
(errAt "type of " n Nothing
(erunAux fc (build i info ETyDecl [] n ty)))
displayWarnings est
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
let tyT = patToImp tyT'
logElab 3 $ show ty ++ "\nElaborated: " ++ show tyT'
ds <- checkAddDef True False info fc iderr True defer
-- if the type is not complete, note that we'll need to infer
-- things later (for solving metavariables)
when (length ds > length is) -- more deferred than case blocks
$ addTyInferred n
mapM_ (elabCaseBlock info opts) is
ctxt <- getContext
logElab 5 "Rechecking"
logElab 6 (show tyT)
logElab 10 $ "Elaborated to " ++ showEnvDbg [] tyT
(cty, ckind) <- recheckC (constraintNS info) fc id [] tyT
-- record the implicit and inaccessible arguments
i <- getIState
let (inaccData, impls) = unzip $ getUnboundImplicits i cty ty
let inacc = inaccessibleImps 0 cty inaccData
logElab 3 $ show n ++ ": inaccessible arguments: " ++ show inacc ++
" from " ++ show cty ++ "\n" ++ showTmImpls ty
putIState $ i { idris_implicits = addDef n impls (idris_implicits i) }
logElab 3 ("Implicit " ++ show n ++ " " ++ show impls)
addIBC (IBCImp n)
-- Add the names referenced to the call graph, and check we're not
-- referring to anything less visible
-- In particular, a public/export type can not refer to anything
-- private, but can refer to any public/export
let refs = freeNames cty
nvis <- getFromHideList n
case nvis of
Nothing -> return ()
Just acc -> mapM_ (checkVisibility fc n (max Frozen acc) acc) refs
addCalls n refs
addIBC (IBCCG n)
when (Constructor `notElem` opts) $ do
let pnames = getParamsInType i [] impls cty
let fninfo = FnInfo (param_pos 0 pnames cty)
setFnInfo n fninfo
addIBC (IBCFnInfo n fninfo)
return (cty, ckind, ty, inacc)
where
patToImp (Bind n (PVar t) sc) = Bind n (Pi Nothing t (TType (UVar [] 0))) (patToImp sc)
patToImp (Bind n b sc) = Bind n b (patToImp sc)
patToImp t = t
param_pos i ns (Bind n (Pi _ t _) sc)
| n `elem` ns = i : param_pos (i + 1) ns sc
| otherwise = param_pos (i + 1) ns sc
param_pos i ns t = []
-- | Elaborate a top-level type declaration - for example, "foo : Int -> Int".
elabType :: ElabInfo
-> SyntaxInfo
-> Docstring (Either Err PTerm)
-> [(Name, Docstring (Either Err PTerm))]
-> FC
-> FnOpts
-> Name
-> FC -- ^ The precise location of the name
-> PTerm
-> Idris Type
elabType = elabType' False
elabType' :: Bool -- normalise it
-> ElabInfo
-> SyntaxInfo
-> Docstring (Either Err PTerm)
-> [(Name, Docstring (Either Err PTerm))]
-> FC
-> FnOpts
-> Name
-> FC
-> PTerm
-> Idris Type
elabType' norm info syn doc argDocs fc opts n nfc ty' = {- let ty' = piBind (params info) ty_in
n = liftname info n_in in -}
do checkUndefined fc n
(cty, _, ty, inacc) <- buildType info syn fc opts n ty'
addStatics n cty ty
let nty = cty -- normalise ctxt [] cty
-- if the return type is something coinductive, freeze the definition
ctxt <- getContext
logElab 2 $ "Rechecked to " ++ show nty
let nty' = normalise ctxt [] nty
logElab 2 $ "Rechecked to " ++ show nty'
-- Add function name to internals (used for making :addclause know
-- the name unambiguously)
i <- getIState
rep <- useREPL
when rep $ do
addInternalApp (fc_fname fc) (fst . fc_start $ fc) (PTyped (PRef fc [] n) ty') -- (mergeTy ty' (delab i nty')) -- TODO: Should use span instead of line and filename?
addIBC (IBCLineApp (fc_fname fc) (fst . fc_start $ fc) (PTyped (PRef fc [] n) ty')) -- (mergeTy ty' (delab i nty')))
let (fam, _) = unApply (getRetTy nty')
let corec = case fam of
P _ rcty _ -> case lookupCtxt rcty (idris_datatypes i) of
[TI _ True _ _ _] -> True
_ -> False
_ -> False
-- Productivity checking now via checking for guarded 'Delay'
let opts' = opts -- if corec then (Coinductive : opts) else opts
let usety = if norm then nty' else nty
ds <- checkDef info fc iderr True [(n, (-1, Nothing, usety, []))]
addIBC (IBCDef n)
addDefinedName n
let ds' = map (\(n, (i, top, fam, ns)) -> (n, (i, top, fam, ns, True, True))) ds
addDeferred ds'
setFlags n opts'
checkDocs fc argDocs ty
doc' <- elabDocTerms info doc
argDocs' <- mapM (\(n, d) -> do d' <- elabDocTerms info d
return (n, d')) argDocs
addDocStr n doc' argDocs'
addIBC (IBCDoc n)
addIBC (IBCFlags n opts')
fputState (opt_inaccessible . ist_optimisation n) inacc
addIBC (IBCOpt n)
when (Implicit `elem` opts') $ do addCoercion n
addIBC (IBCCoercion n)
when (AutoHint `elem` opts') $
case fam of
P _ tyn _ -> do addAutoHint tyn n
addIBC (IBCAutoHint tyn n)
t -> ifail "Hints must return a data or record type"
-- If the function is declared as an error handler and the language
-- extension is enabled, then add it to the list of error handlers.
errorReflection <- fmap (elem ErrorReflection . idris_language_extensions) getIState
when (ErrorHandler `elem` opts) $ do
if errorReflection
then
-- Check that the declared type is the correct type for an error handler:
-- handler : List (TTName, TT) -> Err -> ErrorReport - for now no ctxt
if tyIsHandler nty'
then do i <- getIState
putIState $ i { idris_errorhandlers = idris_errorhandlers i ++ [n] }
addIBC (IBCErrorHandler n)
else ifail $ "The type " ++ show nty' ++ " is invalid for an error handler"
else ifail "Error handlers can only be defined when the ErrorReflection language extension is enabled."
-- Send highlighting information about the name being declared
sendHighlighting [(nfc, AnnName n Nothing Nothing Nothing)]
-- if it's an export list type, make a note of it
case (unApply usety) of
(P _ ut _, _)
| ut == ffiexport -> do addIBC (IBCExport n)
addExport n
_ -> return ()
return usety
where
-- for making an internalapp, we only want the explicit ones, and don't
-- want the parameters, so just take the arguments which correspond to the
-- user declared explicit ones
mergeTy (PPi e n fc ty sc) (PPi e' n' _ _ sc')
| e == e' = PPi e n fc ty (mergeTy sc sc')
| otherwise = mergeTy sc sc'
mergeTy _ sc = sc
ffiexport = sNS (sUN "FFI_Export") ["FFI_Export"]
err = txt "Err"
maybe = txt "Maybe"
lst = txt "List"
errrep = txt "ErrorReportPart"
tyIsHandler (Bind _ (Pi _ (P _ (NS (UN e) ns1) _) _)
(App _ (P _ (NS (UN m) ns2) _)
(App _ (P _ (NS (UN l) ns3) _)
(P _ (NS (UN r) ns4) _))))
| e == err && m == maybe && l == lst && r == errrep
, ns1 == map txt ["Errors","Reflection","Language"]
, ns2 == map txt ["Maybe", "Prelude"]
, ns3 == map txt ["List", "Prelude"]
, ns4 == map txt ["Reflection","Language"] = True
tyIsHandler _ = False
elabPostulate :: ElabInfo -> SyntaxInfo -> Docstring (Either Err PTerm) ->
FC -> FC -> FnOpts -> Name -> PTerm -> Idris ()
elabPostulate info syn doc fc nfc opts n ty = do
elabType info syn doc [] fc opts n NoFC ty
putIState . (\ist -> ist{ idris_postulates = S.insert n (idris_postulates ist) }) =<< getIState
addIBC (IBCPostulate n)
sendHighlighting [(nfc, AnnName n (Just PostulateOutput) Nothing Nothing)]
-- remove it from the deferred definitions list
solveDeferred fc n
elabExtern :: ElabInfo -> SyntaxInfo -> Docstring (Either Err PTerm) ->
FC -> FC -> FnOpts -> Name -> PTerm -> Idris ()
elabExtern info syn doc fc nfc opts n ty = do
cty <- elabType info syn doc [] fc opts n NoFC ty
ist <- getIState
let arity = length (getArgTys (normalise (tt_ctxt ist) [] cty))
putIState . (\ist -> ist{ idris_externs = S.insert (n, arity) (idris_externs ist) }) =<< getIState
addIBC (IBCExtern (n, arity))
-- remove it from the deferred definitions list
solveDeferred fc n
| ozgurakgun/Idris-dev | src/Idris/Elab/Type.hs | bsd-3-clause | 11,785 | 0 | 19 | 3,919 | 3,506 | 1,751 | 1,755 | 228 | 10 |
module A5 where
import D5
main = sumFun [1..4] + (sum ( map (f f_gen) [1..7]))
| kmate/HaRe | old/testing/generaliseDef/A5_TokOut.hs | bsd-3-clause | 93 | 0 | 11 | 30 | 49 | 27 | 22 | 3 | 1 |
{-# OPTIONS_JHC -fno-prelude -fffi #-}
module System.C.Stdio where
import Jhc.Basics
import Jhc.Type.C
import Jhc.Type.Ptr
type FILE = Ptr CFile
foreign import ccall "stdio.h fopen" c_fopen :: Ptr CChar -> Ptr CChar -> IO FILE
foreign import ccall "stdio.h popen" c_popen :: Ptr CChar -> Ptr CChar -> IO FILE
foreign import ccall "stdio.h fclose" c_fclose :: FILE -> IO CInt
foreign import ccall "stdio.h pclose" c_pclose :: FILE -> IO CInt
foreign import ccall "stdio.h jhc_utf8_putchar" c_putwchar :: Int -> IO ()
foreign import ccall "wchar.h jhc_utf8_getc" c_fgetwc :: FILE -> IO Int
foreign import ccall "wchar.h jhc_utf8_getchar" c_getwchar :: IO Int
foreign import ccall "wchar.h jhc_utf8_putc" c_fputwc :: Int -> FILE -> IO Int
foreign import ccall "stdio.h fwrite_unlocked" c_fwrite :: Ptr a -> CSize -> CSize -> FILE -> IO CSize
foreign import ccall "stdio.h fread_unlocked" c_fread :: Ptr a -> CSize -> CSize -> FILE -> IO CSize
foreign import ccall "stdio.h fflush" c_fflush :: FILE -> IO ()
foreign import ccall "stdio.h feof" c_feof :: FILE -> IO Int
foreign import ccall "stdio.h ftell" c_ftell :: FILE -> IO IntMax
foreign import ccall "stdio.h fseek" c_fseek :: FILE -> IntMax -> CInt -> IO Int
foreign import ccall "stdio.h fileno" c_fileno :: FILE -> IO Int
foreign import primitive "const.SEEK_SET" c_SEEK_SET :: CInt
foreign import primitive "const.SEEK_CUR" c_SEEK_CUR :: CInt
foreign import primitive "const.SEEK_END" c_SEEK_END :: CInt
foreign import primitive "const._IOFBF" c__IOFBF :: CInt
foreign import primitive "const._IOLBF" c__IOLBF :: CInt
foreign import primitive "const._IONBF" c__IONBF :: CInt
| m-alvarez/jhc | lib/jhc/System/C/Stdio.hs | mit | 1,772 | 7 | 11 | 393 | 461 | 242 | 219 | -1 | -1 |
{-# LANGUAGE CPP, DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Server.Packages.PackageIndex
-- Copyright : (c) David Himmelstrup 2005,
-- Bjorn Bringert 2007,
-- Duncan Coutts 2008
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- An index of packages.
--
module Distribution.Server.Packages.PackageIndex (
-- * Package index data type
PackageIndex,
-- * Creating an index
fromList,
-- * Updates
merge,
insert,
insertWith,
deletePackageName,
deletePackageId,
-- * Queries
indexSize,
numPackageVersions,
packageNames,
-- ** Precise lookups
lookupPackageName,
lookupPackageId,
lookupPackageForId,
lookupDependency,
-- ** Case-insensitive searches
searchByName,
SearchResult(..),
searchByNameSubstring,
-- ** Bulk queries
allPackages,
allPackagesByName
) where
import Distribution.Server.Framework.MemSize
import Distribution.Server.Util.Merge
import Prelude hiding (lookup)
import Control.Exception (assert)
import qualified Data.Map.Strict as Map
import Data.Map.Strict (Map)
import qualified Data.Foldable as Foldable
import Data.List (groupBy, sortBy, find, isInfixOf)
import Data.Monoid (Monoid(..))
import Data.Maybe (fromMaybe)
import Data.SafeCopy
import Data.Typeable
import Distribution.Package
( PackageName(..), PackageIdentifier(..)
, Package(..), packageName, packageVersion
, Dependency(Dependency) )
import Distribution.Version ( withinRange )
import Distribution.Simple.Utils (lowercase, comparing)
-- | The collection of information about packages from one or more 'PackageDB's.
--
-- It can be searched effeciently by package name and version.
--
newtype PackageIndex pkg = PackageIndex
-- A mapping from package names to a non-empty list of versions of that
-- package, in ascending order (most recent package last)
-- TODO: Wouldn't it make more sense to store the most recent package first?
--
-- This allows us to find all versions satisfying a dependency.
-- Most queries are a map lookup followed by a linear scan of the bucket.
--
(Map PackageName [pkg])
deriving (Show, Read, Typeable, MemSize)
instance Eq pkg => Eq (PackageIndex pkg) where
PackageIndex m1 == PackageIndex m2 = flip Foldable.all (mergeMaps m1 m2) $ \mr -> case mr of
InBoth pkgs1 pkgs2 -> bagsEq pkgs1 pkgs2
OnlyInLeft _ -> False
OnlyInRight _ -> False
where
bagsEq [] [] = True
bagsEq [] _ = False
bagsEq (x:xs) ys = case suitable_ys of
[] -> False
(_y:suitable_ys') -> bagsEq xs (unsuitable_ys ++ suitable_ys')
where (unsuitable_ys, suitable_ys) = break (==x) ys
instance Package pkg => Monoid (PackageIndex pkg) where
mempty = PackageIndex (Map.empty)
mappend = merge
--save one mappend with empty in the common case:
mconcat [] = mempty
mconcat xs = foldr1 mappend xs
invariant :: Package pkg => PackageIndex pkg -> Bool
invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m)
where
goodBucket _ [] = False
goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0
where
check pkgid [] = packageName pkgid == name
check pkgid (pkg':pkgs) = packageName pkgid == name
&& pkgid < pkgid'
&& check pkgid' pkgs
where pkgid' = packageId pkg'
--
-- * Internal helpers
--
mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg
mkPackageIndex index = assert (invariant (PackageIndex index)) (PackageIndex index)
internalError :: String -> a
internalError name = error ("PackageIndex." ++ name ++ ": internal error")
-- | Lookup a name in the index to get all packages that match that name
-- case-sensitively.
--
lookup :: Package pkg => PackageIndex pkg -> PackageName -> [pkg]
lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m
--
-- * Construction
--
-- | Build an index out of a bunch of packages.
--
-- If there are duplicates, later ones mask earlier ones.
--
fromList :: Package pkg => [pkg] -> PackageIndex pkg
fromList pkgs = mkPackageIndex
. Map.map fixBucket
. Map.fromListWith (++)
$ [ (packageName pkg, [pkg])
| pkg <- pkgs ]
where
fixBucket = -- out of groups of duplicates, later ones mask earlier ones
-- but Map.fromListWith (++) constructs groups in reverse order
map head
-- Eq instance for PackageIdentifier is wrong, so use Ord:
. groupBy (\a b -> EQ == comparing packageId a b)
-- relies on sortBy being a stable sort so we
-- can pick consistently among duplicates
. sortBy (comparing packageId)
--
-- * Updates
--
-- | Merge two indexes.
--
-- Packages from the second mask packages of the same exact name
-- (case-sensitively) from the first.
--
merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg
merge i1@(PackageIndex m1) i2@(PackageIndex m2) =
assert (invariant i1 && invariant i2) $
mkPackageIndex (Map.unionWith mergeBuckets m1 m2)
-- | Elements in the second list mask those in the first.
mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg]
mergeBuckets [] ys = ys
mergeBuckets xs [] = xs
mergeBuckets xs@(x:xs') ys@(y:ys') =
case packageId x `compare` packageId y of
GT -> y : mergeBuckets xs ys'
EQ -> y : mergeBuckets xs' ys'
LT -> x : mergeBuckets xs' ys
-- | Inserts a single package into the index.
--
-- This is equivalent to (but slightly quicker than) using 'mappend' or
-- 'merge' with a singleton index.
--
insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg
insert pkg (PackageIndex index) = mkPackageIndex $ -- or insertWith const
Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index
where
pkgid = packageId pkg
insertNoDup [] = [pkg]
insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of
LT -> pkg : pkgs
EQ -> pkg : pkgs' -- this replaces the package
GT -> pkg' : insertNoDup pkgs'
-- | Inserts a single package into the index, combining an old and new value with a function.
-- This isn't in cabal's version of PackageIndex.
--
-- The merge function is called as (f newPkg oldPkg). Ensure that the result has the same
-- package id as the two arguments; otherwise newPkg is used.
--
insertWith :: Package pkg => (pkg -> pkg -> pkg) -> pkg -> PackageIndex pkg -> PackageIndex pkg
insertWith mergeFunc pkg (PackageIndex index) = mkPackageIndex $
Map.insertWith (\_ -> insertMerge) (packageName pkg) [pkg] index
where
pkgid = packageId pkg
insertMerge [] = [pkg]
insertMerge pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of
LT -> pkg : pkgs
EQ -> let merged = mergeFunc pkg pkg' in
if packageId merged == pkgid then merged : pkgs'
else pkg : pkgs'
GT -> pkg' : insertMerge pkgs'
-- | Internal delete helper.
--
delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg
delete name p (PackageIndex index) = mkPackageIndex $
Map.update filterBucket name index
where
filterBucket = deleteEmptyBucket
. filter (not . p)
deleteEmptyBucket [] = Nothing
deleteEmptyBucket remaining = Just remaining
-- | Removes a single package from the index.
--
deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg
deletePackageId pkgid =
delete (packageName pkgid) (\pkg -> packageId pkg == pkgid)
-- | Removes all packages with this (case-sensitive) name from the index.
--
deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg
deletePackageName name =
delete name (\pkg -> packageName pkg == name)
--
-- * Bulk queries
--
-- | Get all the packages from the index.
--
allPackages :: Package pkg => PackageIndex pkg -> [pkg]
allPackages (PackageIndex m) = concat (Map.elems m)
-- | Get all the packages from the index.
--
-- They are grouped by package name, case-sensitively.
--
allPackagesByName :: Package pkg => PackageIndex pkg -> [[pkg]]
allPackagesByName (PackageIndex m) = Map.elems m
--
-- * Lookups
--
-- | Does a lookup by package id (name & version).
--
-- Since multiple package DBs mask each other case-sensitively by package name,
-- then we get back at most one package.
--
lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg
lookupPackageId index pkgid =
case [ pkg | pkg <- lookup index (packageName pkgid)
, packageId pkg == pkgid ] of
[] -> Nothing
[pkg] -> Just pkg
_ -> internalError "lookupPackageIdentifier"
-- | Does a case-sensitive search by package name.
-- The returned list should be ordered (strictly ascending) by version number.
--
lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg]
lookupPackageName index name = lookup index name
-- | Search by name of a package identifier, and further select a version if possible.
--
lookupPackageForId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> ([pkg], Maybe pkg)
lookupPackageForId index pkgid =
let pkgs = lookupPackageName index (packageName pkgid)
in (,) pkgs $ find ((==pkgid) . packageId) pkgs
-- | Does a case-sensitive search by package name and a range of versions.
--
-- We get back any number of versions of the specified package name, all
-- satisfying the version range constraint.
--
lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg]
lookupDependency index (Dependency name versionRange) =
[ pkg | pkg <- lookup index name
, packageName pkg == name
, packageVersion pkg `withinRange` versionRange ]
--
-- * Case insensitive name lookups
--
-- | Does a case-insensitive search by package name.
--
-- If there is only one package that compares case-insentiviely to this name
-- then the search is unambiguous and we get back all versions of that package.
-- If several match case-insentiviely but one matches exactly then it is also
-- unambiguous.
--
-- If however several match case-insentiviely and none match exactly then we
-- have an ambiguous result, and we get back all the versions of all the
-- packages. The list of ambiguous results is split by exact package name. So
-- it is a non-empty list of non-empty lists.
--
searchByName :: Package pkg => PackageIndex pkg -> String -> SearchResult [pkg]
searchByName (PackageIndex m) name =
case [ pkgs | pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ] of
[] -> None
[(_,pkgs)] -> Unambiguous pkgs
pkgss -> case find ((PackageName name==) . fst) pkgss of
Just (_,pkgs) -> Unambiguous pkgs
Nothing -> Ambiguous (map snd pkgss)
where lname = lowercase name
data SearchResult a = None | Unambiguous a | Ambiguous [a] deriving (Show)
-- | Does a case-insensitive substring search by package name.
--
-- That is, all packages that contain the given string in their name.
--
searchByNameSubstring :: Package pkg => PackageIndex pkg -> String -> [pkg]
searchByNameSubstring (PackageIndex m) searchterm =
[ pkg
| (PackageName name, pkgs) <- Map.toList m
, lsearchterm `isInfixOf` lowercase name
, pkg <- pkgs ]
where lsearchterm = lowercase searchterm
-- | Gets the number of packages in the index (number of names).
indexSize :: Package pkg => PackageIndex pkg -> Int
indexSize (PackageIndex m) = Map.size m
-- | The number of package versions
-- (i.e., we should have @length . allPackages == numPackageVersions@)
numPackageVersions ::PackageIndex pkg -> Int
numPackageVersions (PackageIndex m) = sum . map (length . snd) $ Map.toList m
-- | Get an ascending list of package names in the index.
packageNames :: Package pkg => PackageIndex pkg -> [PackageName]
packageNames (PackageIndex m) = Map.keys m
---------------------------------- State for PackageIndex
instance (Package pkg, SafeCopy pkg) => SafeCopy (PackageIndex pkg) where
putCopy index = contain $ do
safePut $ allPackages index
getCopy = contain $ do
packages <- safeGet
return $ fromList packages
| ocharles/hackage-server | Distribution/Server/Packages/PackageIndex.hs | bsd-3-clause | 12,567 | 0 | 14 | 2,872 | 2,894 | 1,548 | 1,346 | 180 | 5 |
module Aws.Sqs.Commands (
module Aws.Sqs.Commands.Message,
module Aws.Sqs.Commands.Permission,
module Aws.Sqs.Commands.Queue,
module Aws.Sqs.Commands.QueueAttributes
) where
import Aws.Sqs.Commands.Message
import Aws.Sqs.Commands.Permission
import Aws.Sqs.Commands.Queue
import Aws.Sqs.Commands.QueueAttributes
| RayRacine/aws | Aws/Sqs/Commands.hs | bsd-3-clause | 320 | 0 | 5 | 30 | 69 | 50 | 19 | 9 | 0 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Getting started Guide</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İndeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Axtar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | ccgreen13/zap-extensions | src/org/zaproxy/zap/extension/gettingStarted/resources/help_az_AZ/helpset_az_AZ.hs | apache-2.0 | 968 | 79 | 66 | 158 | 411 | 208 | 203 | -1 | -1 |
{-# OPTIONS_GHC -fglasgow-exts -O -ddump-simpl -fno-method-sharing #-}
module Roman where
import Control.Monad.ST
newtype T s a = T { unT :: Int -> ST s a }
instance Monad (T s) where
return = T . const . return
T p >>= f = T $ \i -> do { x <- p i
; unT (f x) i }
myIndex :: T s Int
{-# INLINE myIndex #-}
myIndex = T return
foo :: T s Int
foo = do { x <- myIndex
; return (x + 1) }
{- At one stage we got code looking like this:
U.a3 =
\ (@ s_a8E) (i_shA :: GHC.Base.Int) (eta_shB :: GHC.Prim.State# s_a8E) ->
case ((((U.myIndex @ s_a8E)
`cast` ...)
i_shA)
`cast` ...)
eta_shB
of wild_si5 { (# new_s_shF, r_shG #) -> ...
U.foo :: forall s_a5S. U.T s_a5S GHC.Base.Int
U.foo = U.a3 `cast` ...
The point is that myIndex should be inlined, else code is bad -}
| ezyang/ghc | testsuite/tests/eyeball/inline1.hs | bsd-3-clause | 916 | 0 | 12 | 317 | 170 | 92 | 78 | 14 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
-- | This holds the certificate authority (without the private key)
module Types.CA where
import Control.Lens
import Data.Data (Data, Typeable)
import Data.SafeCopy
import Data.Text (Text)
import Types.Common
data CertificateAuthority = CA {
_caSerialNumber :: SerialNumber -- ^ Current CA serial number. Updated with every signing
, _caCertificate :: Text -- ^ The CA certificate without the private key.
} deriving (Data, Typeable, Show)
makeLenses ''CertificateAuthority
$(deriveSafeCopy 0 'base ''CertificateAuthority)
| tazjin/herbert | src/Types/CA.hs | mit | 733 | 0 | 8 | 178 | 111 | 65 | 46 | 15 | 0 |
module Spear.Math.Ray
(
Ray(..)
, raylr
, rayfb
)
where
import Spear.Math.Utils
import Spear.Math.Vector
data Ray = Ray
{ origin :: {-# UNPACK #-} !Vector2
, dir :: {-# UNPACK #-} !Vector2
}
-- | Classify the given point's position with respect to the given ray. Left/Right test.
raylr :: Ray -> Vector2 -> Side
raylr (Ray o d) p
| orientation2d o (o+d) p < 0 = R
| otherwise = L
-- | Classify the given point's position with respect to the given ray. Front/Back test.
rayfb :: Ray -> Vector2 -> Face
rayfb (Ray o d) p
| orientation2d o (perp d) p > 0 = F
| otherwise = B
| jeannekamikaze/Spear | Spear/Math/Ray.hs | mit | 650 | 0 | 11 | 192 | 188 | 101 | 87 | 18 | 1 |
module Numeric.LinearCombination
( LinearCombination(..)
, Term(..)
, multiplyUsing
, operationUsing
, basisChangeUsing
, Numeric.LinearCombination.filter
, find
, elementsToList
, scalarMult
, zero
, showUsing
, simplify
) where
import GHC.Exts (sortWith)
import Data.AEq
import Numeric (showGFloat)
import qualified Data.List as L
import Prelude hiding (filter)
newtype LinearCombination coefficient element
= LinComb [Term coefficient element]
deriving (Eq)
data Term coefficient element = coefficient :* element
deriving (Eq, Show)
instance (Num a) => Monad (Term a) where
(c :* e) >>= f = (c * c') :* e'
where (c' :* e') = f e
return x = 1 :* x
instance (Num a, AEq a, Eq b, Ord b) => Num (LinearCombination a b) where
(LinComb terms) + (LinComb terms') = LinComb $ foldr addTerm short long
where [short, long] = sortWith length [terms, terms']
negate = mapTerms (\(c :* e) -> (-c) :* e)
(*) = error "Use the function `multiplyUsing` instead."
abs = error "Absolute value not defined for LinearCombination."
signum = error "Signum not defined for LinearCombination."
fromInteger 0 = zero -- to allow sum
fromInteger _ = error "Can't construct LinearCombination from integer."
mapTerms :: (Num a, Eq b, Ord b)
=> (Term a b -> Term a b)
-> LinearCombination a b -> LinearCombination a b
mapTerms f (LinComb terms) = LinComb $ map f terms
-- | Multiplies two 'LinearCombination's given a rule to multiply two elements.
multiplyUsing :: (Num a, AEq a, Eq b, Ord b)
=> (b -> b -> Term a b)
-> LinearCombination a b -> LinearCombination a b
-> LinearCombination a b
multiplyUsing op (LinComb xs) (LinComb ys) = simplify $ LinComb products
where products = [(c1 * c2 * factor) :* e
| (c1 :* e1) <- xs, (c2 :* e2) <- ys,
let factor :* e = e1 `op` e2,
factor /= 0]
operationUsing :: (Num a, AEq a, Eq b, Ord b)
=> (b -> Term a b) -> LinearCombination a b
-> LinearCombination a b
operationUsing op = simplify . (mapTerms (>>= op))
basisChangeUsing :: (Num a, AEq a, Eq b, Ord b)
=> (b -> LinearCombination a b) -> LinearCombination a b
-> LinearCombination a b
basisChangeUsing op (LinComb terms) = sum $ map transform terms
where transform (c :* e) = c `scalarMult` op e
filter :: (Num a, AEq a, Eq b, Ord b)
=> (b -> Bool) -> LinearCombination a b
-> LinearCombination a b
filter p (LinComb terms) = LinComb $ L.filter (p . element) terms
find :: (Num a, AEq a, Eq b, Ord b)
=> (b -> Bool) -> LinearCombination a b
-> Maybe (Term a b)
find p (LinComb terms) = L.find (p . element) terms
elementsToList :: LinearCombination a b -> [b]
elementsToList (LinComb terms) = map element terms
element :: Term a b -> b
element (c :* e) = e
-- | Adds a Term to a list of Terms.
addTerm :: (Num a, AEq a, Eq b, Ord b)
=> Term a b -> [Term a b]
-> [Term a b]
addTerm y [] = [y]
addTerm y@(c1 :* e1) (x@(c2 :* e2):xs)
| e1 == e2 = if c ~== 0 then xs else (c :* e1) : xs
| e1 < e2 = y : x : xs
| otherwise = x : addTerm y xs
where c = c1 + c2
-- | Simplifies a LinearCombination so that each element only appears once by
-- adding the coefficients of like elements.
simplify :: (Num a, AEq a, Eq b, Ord b)
=> LinearCombination a b
-> LinearCombination a b
simplify (LinComb xs) = LinComb $ foldr addTerm [] xs
-- | Multiplication by a scalar
scalarMult :: (Num a, Eq b, Ord b)
=> a -> LinearCombination a b
-> LinearCombination a b
scalarMult x = mapTerms (\(c :* e) -> (x * c) :* e)
zero :: LinearCombination a b
zero = LinComb []
-- * Printing
instance (Show a, Num a, AEq a, Ord a, Show b)
=> Show (LinearCombination a b) where
show = showUsing show
showUsing :: (Show a, Num a, AEq a, Ord a)
=> (b -> String) -> LinearCombination a b
-> String
showUsing _ (LinComb []) = "0"
showUsing showElement (LinComb terms) = concat $ first : rest
where first = showTerm True (head terms)
rest = map (showTerm False) (tail terms)
showTerm = showTermUsing showElement
showTermUsing :: (Show a, Num a, AEq a, Ord a)
=> (b -> String) -> Bool -> Term a b
-> String
showTermUsing showElement first (c :* e) = sign ++ number ++ eString
where sign | first = if c < 0 then "-" else ""
| otherwise = if c < 0 then " - " else " + "
number = if absc ~== 1 && eString /= "" then "" else absString
absString = show absc--GFloat (Just 3) absc ""
absc = abs c
eString = showElement e
instance (Num a, AEq a, Eq b) => AEq (LinearCombination a b) where
(===) = (==)
(LinComb terms) ~== (LinComb terms') = and $ zipWith (~==) terms terms'
instance (Num a, AEq a, Eq b) => AEq (Term a b) where
(===) = (==)
(c :* e) ~== (c' :* e') = (c ~== c') && (e == e') | pnutus/geometric-algebra | src/Numeric/LinearCombination.hs | mit | 5,080 | 0 | 12 | 1,448 | 2,053 | 1,076 | 977 | 116 | 4 |
-- 1
take 1 $ map (+1) [undefined, 2, 3] -- ⊥
-- 2
take 1 $ map (+1) [1, undefined, 3] -- [2]
-- 3
take 2 $ map (+1) [1, undefined, 3] -- ⊥
-- 4
-- itIsMystery takes string and return list of bools where True correspond to vowel and False to consonant for input string
itIsMystery :: String -> [Bool]
itIsMystery xs = map (\x -> elem x "aeiou") xs
-- 5
map (^2) [1..10] -- [1,4,9,16,25,36,49,64,81,100]
-- 6
import Data.Bool
map (\x -> bool (x) (-x) (x == 3)) [1..10]
| ashnikel/haskellbook | ch09/ch09.9_ex.hs | mit | 473 | 0 | 10 | 95 | 199 | 111 | 88 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Helpers.Database where
import Database.Groundhog
import Database.Groundhog.Core
import Database.Groundhog.Sqlite
import Data.Proxy
db :: Proxy Sqlite
db = undefined
intToKey :: (PrimitivePersistField (Key a b)) => Int -> Key a b
intToKey p = integralToKey p
integralToKey :: (PrimitivePersistField i, PrimitivePersistField (Key a b)) => i -> Key a b
integralToKey = fromPrimitivePersistValue db . toPrimitivePersistValue db
| archaeron/chatty-server | src/Helpers/Database.hs | mit | 471 | 0 | 8 | 66 | 136 | 73 | 63 | 12 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module AltParsing where
import Control.Applicative
import Text.Trifecta
import Text.RawString.QQ
type NumberOrString = Either Integer String
parseNos :: Parser NumberOrString
parseNos =
skipMany (oneOf "\n")
>>
(Left <$> integer)
<|> (Right <$> some letter)
eitherOr :: String
eitherOr = [r|
123
abc
456
def
|]
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter24/AltParsing/src/AltParsing.hs | mit | 365 | 0 | 9 | 73 | 93 | 54 | 39 | 14 | 1 |
module Lab6 where
import System.CPUTime
import System.Random
import Control.Monad
import Lecture6
-- Exercise 1
-- Catches some edge cases and then use the exMsq to calculate squared modulo.
exM :: Integer -> Integer -> Integer -> Integer
exM b e m
| e < 0 = 0
| e == 0 = 1 `mod` m
| otherwise = exMsq b e m 1
-- Squaring
-- Usage: exMsq b e m 1
exMsq :: Integer -> Integer -> Integer -> Integer -> Integer
exMsq _ 0 _ r = r
exMsq b e m r
| odd e = exMsq b (e-1) m (mod (r*b) m)
| otherwise = exMsq (mod (b*b) m) (div e 2) m r
-- Memory-efficient method
exMmem :: Integer -> Integer -> Integer -> Integer
exMmem b 1 m = mod b m
exMmem b e m = mod (b* exMmem b (e-1) m) m
-- Exercise 2
-- Usage: testEx2 minRange maxRange
testEx2 :: Integer -> Integer -> IO()
testEx2 = randomFaster
-- Generates a random test which compares both methods performance.
randomFaster:: Integer -> Integer -> IO()
randomFaster x y = do
g <- newStdGen
let (b, newGen) = randomR (x,y) g
let (e, newGen') = randomR (x,y) newGen
let (m, _) = randomR (x,y) newGen'
faster b e m
-- Given a base, exponent and modulus, it compares the performance of both methods.
-- Time is in pico seconds.
faster:: Integer -> Integer -> Integer -> IO ()
faster b e m = do
print ("Comparison between the Ex1 method and lecture's method"::String)
print ("Results are expressed in pico seconds."::String)
x <- getDiffMsq b e m
y <- getDiffDefault b e m
r <- compareDiff x y
if r then print ("--> Squaring method is faster"::String)
else print ("--> Lecture's method is faster"::String)
compareDiff:: Integer -> Integer -> IO Bool
compareDiff x y = return (x < y)
-- Calculates the execution time using the square method from the exercise 1.
getDiffMsq:: Integer -> Integer -> Integer -> IO Integer
getDiffMsq b e m = do
start <- getCPUTime
print ("Square method result: " ++ show (exMsq b e m 1))
end <- getCPUTime
let diff = fromIntegral (end - start)
print ("- Execution time: " ++ show diff)
return diff
-- Calculates the execution time using the default method from the lectures.
getDiffDefault:: Integer -> Integer -> Integer -> IO Integer
getDiffDefault b e m = do
start <- getCPUTime
print ("Lecture method result: " ++ show (expM b e m))
end <- getCPUTime
let diff = fromIntegral (end - start)
print ("- Execution time: " ++ show diff)
return diff
-- Exercise 3
-- The approach taken is fairly easy. Starting by the first known not prime number
-- we generate a list filtering in not being prime using the function isPrime
-- from the lectures. The function takes long time to compute if a number is
-- prime, but in case it isn't it gives a result really fast.
composites :: [Integer]
composites = 4 : filter (not . isPrime) [5..]
-- Exercise 4
-- Usage: testEx4 k
-- Lowest found values for textEx k
-- (k = 1) lowest found: 4
-- (k = 2) lowest found: 4
-- (k = 3) lowest found: 15
-- (k = 4) lowest found: 4
--
-- If you increase k the probability of fooling the test becomes smaller due to
-- a larger number of random samples, However for low numbers the unique
-- possible samples are quite small, for example for 4 there are only 3 samples.
-- Namely : 1^3 mod 4, 2^3 mod 4 and 3^3 mod 4. One of which returns 1, which
-- means that with k = 4 the chance of fooling the test is 1/3 ^ 4 == 1.23%.
testEx4 :: Int -> IO()
testEx4 k = do
f <- foolFermat k
print ("(k = " ++ show k ++ ") lowest found: " ++ show f)
foolFermat :: Int -> IO Integer
foolFermat k = lowestFermatFooler k composites
lowestFermatFooler :: Int -> [Integer] -> IO Integer
lowestFermatFooler _ [] = return 0
lowestFermatFooler k (x:xs) = do
result <- prime_tests_F k x
if result then return x else lowestFermatFooler k xs
-- Exercise 5
-- Usage: testEx5 k
-- We are going to test Fermant's primalty test using Carmichael's numbers.
-- The first thing we do is to define a function to generate Carmichael's numbers.
-- In this case, it is given in the description of the exercise.
carmichael :: [Integer]
carmichael = [ (6*k+1)*(12*k+1)*(18*k+1) |
k <- [2..],
isPrime (6*k+1),
isPrime (12*k+1),
isPrime (18*k+1) ]
-- Test the first k Carmichael numbers using the Fermat's primalty test. We take
-- the function prime_test_F from the lectures as our implementation of Fermat's
-- primalty test.
testFermatC :: Int -> IO [Bool]
testFermatC k = mapM prime_test_F (take k carmichael)
-- We also define a test using the Sieve of Erastothenes to check if the output
-- of the Fermat's test is true.
testIsPrime :: Int -> [Bool]
testIsPrime k = map isPrime (take k carmichael)
-- Finally, we output the solution. Carmichael numbers are quite big so the
-- primalty check takes long time to compute.
testEx5 :: Int -> IO()
testEx5 k = do
let num = take k carmichael
print ("Test for the first " ++ show k ++ " Carmichael's numbers.")
print ("Displays the result as a triple following the structure:"::String)
print ("- (Carmichael number, (Fermat's test, Sieve of Erastothenes))"::String)
ferm <- testFermatC k
let eras = testIsPrime k
let comb = zip num (zip ferm eras)
print comb
-- The output of the test deservers to be discussed. The Carmichael's numbers is
-- derivated from Fermat's primalty test. Fermat's primalty test states that if
-- p is prime, then a random number a not divisible by p fulfills
-- a^(p-1) = 1 mod p. Carmichael numbers are composite numbers (as we can see in
-- the function used to generate them) that pass Fermat's test but they are not
-- prime. Indeed, those numbers p are not prime but coprimes with the chosen
-- base a. This turns Fermat's test into a necessary condition but not sufficient:
-- If a number is prime, it fulfills Fermat's theorem but if a number fulfills
-- Fermat's theorem, it may not be prime as Carmichael numbers demostrate.
-- The current formula used to calculate this numbers was proved by J. Chernick
-- in 1939 and it produces Carmichael numbers as far as his 3 components are
-- prime numbers. As the number is a Carmichael number, it should pass Fermat's
-- test but fail on Erastothenes sieve.
-- Exercise 6
-- Miller-Rabin
testMR :: Int -> [Integer] -> IO ()
testMR k (p:ps) = do
r <- primeMR k p
when r $ print(show p ++ ", Miller-Rabin: " ++ show r)
testMR k ps
-- Test: testMR 1 carmichael, will take forever
-- Test: testMR 1 (take 1000 carmichael) 118901521 Miller-Rabin:True, and
-- probably many more but my processor fails.
-- Conclusion: testER uses an iterator int k, and list of Carmichael numbers to
-- test. Our test isn't consistent enough to write a solid conclusion, but they
-- are hard to find and this fact make presume that using carmichael numbers the
-- MR test is more difficult to fool.
-- Mersenne
mersnPrimes :: Integer -> IO ()
mersnPrimes p = do
print(show p)
let p1 = (2^p - 1) in
do
r <- primeMR 5 p1
when r $ mersnPrimes p1
--Test : mersnPrimes 5
--Test : mersnPrimes m3 "2147483647" (= m8: 2^31-1)
--Conclusion: mersnPrimes takes a p (prime number) and check in primeMR
-- searching for similarity. According with
-- https://en.wikipedia.org/wiki/Mersenne_prime not all the numbers has pass the
-- check are genuine Mersenne primes.
-- Exercise 7
| Gurrt/software-testing | week-6/Lab6.hs | mit | 7,815 | 0 | 13 | 2,049 | 1,603 | 818 | 785 | 99 | 2 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2018.M04.D04.Exercise where
{--
So you have the data from the last two day's exercises, let's start storing
those data into a PostgreSQL database. Today's exercise is to store just
the authors.
But there's a catch: you have to consider you're doing this as a daily upload.
So: are there authors already stored? If so we don't store them, if not, we
DO store them and get back the unique ID associated with those authors (for
eventual storage in an article_author join table.
First, fetch all the authors already stored (with, of course their unique ids)
--}
import Control.Monad.State
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
-- we will use previous techniques for memoizing tables:
-- below imports available via 1HaskellADay git repository
import Data.LookupTable
import Data.MemoizingTable
import Store.SQL.Connection
import Store.SQL.Util.Indexed
import Store.SQL.Util.LookupTable
import Y2018.M04.D02.Exercise (readJSON, arts)
import Y2018.M04.D03.Exercise (Author, authors)
-- 1. fetch the authors into a LookupTable then convert that into a memoizing
-- table state
authorTableName :: String
authorTableName = "author"
lookupAuthors :: Connection -> IO LookupTable
lookupAuthors conn = undefined
type MemoizedAuthors m = MemoizingS m Integer Author ()
lk2MS :: Monad m => LookupTable -> MemoizedAuthors m
lk2MS table = undefined
-- 2. from yesterday's exercise, triage the authors into the memoizing table
addNewAuthors :: [Author] -> MemoizedAuthors m
addNewAuthors authors = undefined
-- 3. store the new memoizing table values into the author table
authorStmt :: Query
authorStmt = [sql|INSERT INTO author (author) VALUES (?) returning id|]
insertAuthors :: Connection -> MemoizedAuthors IO
insertAuthors conn = undefined
-- and there we go for today! Have at it!
| geophf/1HaskellADay | exercises/HAD/Y2018/M04/D04/Exercise.hs | mit | 2,012 | 0 | 7 | 296 | 249 | 154 | 95 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
--
-- | Test SQL queries using SQLite interpreter
--
module Codex.Tester.Sql (
sqliteTester
) where
import Codex.Tester
import Control.Applicative ((<|>))
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.List (sort)
import Control.Exception (throwIO, catch)
import System.FilePath (takeFileName)
sqliteTester :: Tester Result
sqliteTester = queryTester <|> updateTester
--
-- | Tester for queries
--
queryTester :: Tester Result
queryTester = tester "sqlite-query" $ do
Code lang query <- testCode
guard (lang == "sql")
---
limits <- configLimits "language.sqlite.limits"
sqlite <- configured "language.sqlite.command" >>= parseArgs
answer <- fromMaybe "" <$> metadata "answer"
assert (pure $ answer /= "") "missing SQL query answer in metadata"
dir <- takeDirectory <$> testFilePath
inputs <- globPatterns dir =<< metadataWithDefault "databases" []
assert (pure $ not $ null inputs) "missing SQL databases metadata"
ordering <- metadataWithDefault "ignore-order" False
let normalize = if ordering then T.unlines . sort . T.lines else T.strip
liftIO (runQueries limits sqlite answer query normalize inputs
`catch` return)
runQueries _ [] _ _ _ _ =
throwIO $ userError "no SQLite command in config file"
runQueries limits (sqlcmd:sqlargs) answer query normalize inputs =
loop 1 inputs
where
total = length inputs
runQuery db sql = do
(exitCode, stdout, stderr) <-
safeExec limits sqlcmd Nothing (sqlargs++["-init", db]) sql
case exitCode of
ExitSuccess ->
-- NB: SQLite can exit with zero code in many errors,
-- so we need to check stderr
if match "Error" stderr then
throwIO $ runtimeError stderr
else return stdout
ExitFailure _ -> throwIO $ runtimeError stderr
---
loop _ []
= return $ accepted $ "Passed " <> T.pack (show total) <> " tests"
loop n (db : rest) = do
obtained <- runQuery db query
expected <- runQuery db answer
if normalize obtained == normalize expected then
loop (n+1) rest
else
return $ wrongAnswer $
T.unlines [ "Test " <> T.pack (show n) <> " / " <>
T.pack (show total) <>
" using database " <>
T.pack (takeFileName db)
, ""
, "EXPECTED:"
, expected
, ""
, "OBTAINED:"
, obtained
]
--
-- | Tester for updates
--
updateTester = tester "sqlite-update" $ do
Code lang update <- testCode
guard (lang == "sql")
---
limits <- configLimits "language.sqlite.limits"
sqlite <- configured "language.sqlite.command" >>= parseArgs
sqldiff<- configured "language.sqlite.diff" >>= parseArgs
answer <- metadataWithDefault "answer" ""
assert (pure $ answer /= "") "missing SQL query answer in metadata"
dir <- takeDirectory <$> testFilePath
inputs <- globPatterns dir =<< metadataWithDefault "databases" []
assert (pure $ not $ null inputs) "missing SQL databases in metadata"
liftIO (runUpdates limits sqlite sqldiff answer update inputs
`catch` return)
runUpdates _ [] _ _ _ _ =
throwIO $ userError "no SQLite command in config file"
runUpdates _ _ [] _ _ _ =
throwIO $ userError "no SQLite diff command in config file"
runUpdates limits (sqlite:args) (sqldiff:args') answer update inputs =
loop 1 inputs
where
total = length inputs
runDiff db1 db2 = do
(exitCode, stdout, stderr) <-
safeExec limits sqldiff Nothing (args' ++ [db1, db2]) ""
case exitCode of
ExitSuccess -> if match "Error" stderr
then throwIO $ runtimeError stderr
else return stdout
ExitFailure _ -> throwIO $ runtimeError stderr
runUpdate db sql file = do
(exitCode, _, stderr) <-
safeExec limits sqlite Nothing (args++["-init", db, file]) sql
case exitCode of
ExitSuccess ->
-- NB: SQLite can exit with zero code in many errors,
-- so we need to check stderr
when (match "Error" stderr) $
throwIO $ runtimeError ("runUpdate: " <> stderr)
ExitFailure _ -> throwIO $ runtimeError ("runUpdate: "<> stderr)
---
loop _ []
= return $ accepted $ "Passed " <> T.pack (show total) <> " tests"
loop n (db : rest) = do
stdout <- withTemp "expected.db" "" $ \expectf ->
withTemp "observed.db" "" $ \observef -> do
chmod (readable . writeable) expectf
chmod (readable . writeable) observef
runUpdate db answer expectf
runUpdate db update observef
runDiff observef expectf
if T.null stdout then loop (n+1) rest
else throwIO $ wrongAnswer stdout
| pbv/codex | src/Codex/Tester/Sql.hs | mit | 5,242 | 0 | 20 | 1,728 | 1,409 | 701 | 708 | 107 | 6 |
module Triplet (isPythagorean, mkTriplet, pythagoreanTriplets) where
isPythagorean :: (Int, Int, Int) -> Bool
isPythagorean (a, b, c) = a * a + b * b + c * c == 2 * m * m
where m = maximum [a, b, c]
mkTriplet :: Int -> Int -> Int -> (Int, Int, Int)
mkTriplet a b c = (a, b, c)
pythagoreanTriplets :: Int -> Int -> [(Int, Int, Int)]
pythagoreanTriplets minFactor maxFactor = filter isPythagorean [(a, b, c) | a <- [minFactor..maxFactor], b <- [a..maxFactor], c <- [b..maxFactor]]
| c19/Exercism-Haskell | pythagorean-triplet/src/Triplet.hs | mit | 484 | 0 | 12 | 94 | 239 | 136 | 103 | 8 | 1 |
import Test.Hspec
import Control.Exception (evaluate)
import qualified Data.Map.Strict as Map
import Go
import Go.UI.Color
main :: IO ()
main = hspec $ do
describe "Go.addMove" $ do
it "adds a black stone to an empty board" $ do
let point = Point (3, 4)
Right game = addMove newGame point in
(boardAt game point) `shouldBe` (Just Black)
it "cannot add a stone to a place already taken" $ do
let point = Point (3, 4)
Right game = addMove newGame point in
(addMove game point) `shouldBe` (Left "Invalid move")
it "cannot add a stone to coordinates outside the board" $ do
let point = Point (20, 4) in
(addMove newGame point) `shouldBe` (Left "Invalid coordinates")
it "removes a single dead stone" $ do
let moves = ["d4", "d3", "pass", "d5", "pass", "c4", "pass", "e4"]
game = addMoves moves newGame in
boardAt game (Point (4, 4)) `shouldBe` Just Ko
it "does not allow to set to a Ko point" $ do
let moves = ["d4", "d3", "pass", "d5", "pass", "c4", "pass", "e4"]
game = addMoves moves newGame in
addMove game (Point (4, 4)) `shouldBe` Left "Invalid move"
it "clears Ko after one move" $ do
let moves = ["d4", "d3", "pass", "d5", "pass", "c4", "pass", "e4", "q16"]
game = addMoves moves newGame in
boardAt game (Point (4, 4)) `shouldBe` Nothing
it "ends the game after second consecutive pass" $ do
let moves = ["pass", "pass"]
game = addMoves moves newGame in
addMove game (Point (4, 4)) `shouldBe` Left "Game over"
describe "Go.pass" $ do
it "lets the player pass" $ do
let Right (Game { moves = moves }) = pass newGame in
moves `shouldBe` [Nothing]
it "clears Ko after one pass" $ do
let moves = ["d4", "d3", "pass", "d5", "pass", "c4", "pass", "e4", "pass"]
game = addMoves moves newGame in
boardAt game (Point (4, 4)) `shouldBe` Nothing
it "ends the game after second consecutive pass" $ do
let moves = ["pass", "pass"]
game = addMoves moves newGame in
pass game `shouldBe` Left "Game over"
addMoves :: [String] -> Game -> Game
addMoves [] game = game
addMoves ("pass":moves) game = addMoves moves game'
where Right game' = pass game
addMoves ((x:y):moves) game = addMoves moves game'
where y' = read y::Int
Just x' = Map.lookup x coordLetters
point = Point (x', y')
Right game' = addMove game point
| tsujigiri/gosh | spec/Spec.hs | mit | 2,748 | 0 | 22 | 935 | 916 | 470 | 446 | 56 | 1 |
module RandomForest where
import Data.List
import Numeric.LinearAlgebra
import Predictor
import Utils
import CART
type RandomForest = EnsembleLearner CART_Tree
newtype EnsembleLearner learner = EL [learner]
instance (Predictor l) => Predictor (EnsembleLearner l) where
predict v (EL ls) = (sum' (map (predict v) ls))/(fromIntegral $ length ls)
instance (Learner l) => Learner (EnsembleLearner l) where
learn x y (EL ls) = fmap EL $ mapM (learn x y) ls
new_ensemble f n = fmap EL $ mapM (const f) [1..n]
| jvictor0/OnlineRandomForest | src/RandomForest.hs | mit | 518 | 0 | 12 | 93 | 211 | 111 | 100 | 13 | 1 |
-- Copyright 2015 Mitchell Kember. Subject to the MIT License.
-- Project Euler: Problem 11
-- Summation of primes
module Problem11 where
import Data.Maybe (catMaybes)
type Coord = (Int, Int)
grid :: [[Int]]
grid =
[ [08, 02, 22, 97, 38, 15, 00, 40, 00, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 08]
, [49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 00]
, [81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65]
, [52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91]
, [22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80]
, [24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50]
, [32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70]
, [67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 08, 40, 91, 66, 49, 94, 21]
, [24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72]
, [21, 36, 23, 09, 75, 00, 76, 44, 20, 45, 35, 14, 00, 61, 33, 97, 34, 31, 33, 95]
, [78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 09, 53, 56, 92]
, [16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 00, 17, 54, 24, 36, 29, 85, 57]
, [86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58]
, [19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40]
, [04, 52, 08, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66]
, [88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69]
, [04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 08, 46, 29, 32, 40, 62, 76, 36]
, [20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16]
, [20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54]
, [01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48]
]
cell :: Coord -> Maybe Int
cell (x, y)
| inBounds = Just $ row !! x
| otherwise = Nothing
where
row = grid !! y
inBounds = x >= 0 && y >= 0 && y < length grid && x < length row
move :: Coord -> Coord -> Coord
move (x, y) (dx, dy) = (x + dx, y + dy)
productsForCell :: Int -> Coord -> [Int]
productsForCell n coord = catMaybes prods
where
slide delta = take n . iterate (move delta) $ coord
multCells = fmap product . sequence . map cell
directions = [(1, 0), (0, 1), (1, 1), (-1, 1)]
prods = map (multCells . slide) directions
solve :: Int
solve = maximum . concatMap (productsForCell size) $ coords
where
size = 4
lastIdx = length grid - 1
coords = [(x, y) | x <- [0..lastIdx], y <- [0..lastIdx]]
| mk12/euler | haskell/Problem11.hs | mit | 2,717 | 0 | 13 | 747 | 1,677 | 1,069 | 608 | 44 | 1 |
module Note.Character where
import Control.Arrow
import Control.Applicative hiding ( (<|>) )
import Control.Name
import Data.List
import Data.String.Utils
import Data.Utils
import Note
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.TagWiki
import Text.Pin ( fromName )
import qualified Control.Modifier as Mods
import qualified Data.Set as Set
newtype Character = Character { base :: Basic } deriving (Eq, Ord, Show)
instance Note Character where
basic = base
-- | Adds prefixes and suffixes to tags.
-- | Character names will be split on spaces, that they may be referenced
-- | by either first or last or full names.
-- |
-- | If the character has multi-part names (i.e. "Van Halen"), escape the
-- | whitespace (i.e. "Van\ Halen").
-- |
-- | Only the first name is so split; all following names will not be touched.
names c = alter (names $ basic c) where
(ps, ss) = (prefixes &&& suffixes) c
expand = addSuffixes ss . applyPrefixes ps . splitIntoNames
alter (Name pri n:ns) = nub $ [Name pri x | x <- expand n] ++ ns
alter [] = []
-- | The primary name is the prefixed and suffixed full name
primaryName c = doHead "" expand $ map namePart $ names $ basic c where
expand n = prefixString (prefixes c) ++ n ++ suffixString (suffixes c)
-- | Updates a character to add 'nicknames' to the qualifiers.
-- | Thus, if you have the following character:
-- |
-- | Fredward Sharpe, Freddie
-- |
-- | He may be referenced as (for example):
-- |
-- | |Fredward (Freddie) Sharpe|
qualifiers c = qualifiers (basic c) `Set.union` extras where
extras = Set.fromList $ map fromName $ drop 1 ns
ns = names $ basic c
-- | Of a character's tags, the first is the "full name" and the rest
-- | are pseudonyms. Therefore, the first of a character's tags
-- | will have the prefixes and suffixes applied, the rest will not.
tags c = Set.fromList $ expand $ map namePart $ names $ basic c where
expanded n = prefixString (prefixes c) ++ n ++ suffixString (suffixes c)
expand (n:ns) = expanded n : ns
expand [] = []
parseCharacter :: Int -> GenParser Char st Character
parseCharacter i = Character <$> parseBasic i Mods.anyMod
-- | Add all suffixes to each name.
-- | Will be separated by spaces (unless the suffix starts with a comma)
-- | For example, given the following character:
-- |
-- | Shane Cantlyn
-- | $Jr. $, M.D.
-- |
-- | Yields the following names (in addition to the un-suffixed names)
-- |
-- | Shane Jr., M.D.
-- | Cantlyn Jr., M.D.
-- | Shane Cantlyn Jr., M.D.
addSuffixes :: [String] -> [String] -> [String]
addSuffixes [] xs = xs
addSuffixes ss xs = xs ++ [x ++ suffixString ss | x <- xs]
-- | Applies each prefix in turn to each string in turn
-- | If a character has multiple names, the prefixes will not be applied
-- | to the first (assumed informal) name.
-- |
-- | Only one prefix is applied at a time. So the following character:
-- |
-- | Shane Cantlyn
-- | ^Dr. ^Fr.
-- |
-- | Yields the following names (in addition to the un-prefixed ones)
-- |
-- | Dr. Cantlyn, Fr. Cantlyn, Dr. Shane Cantlyn, Fr. Shane Cantlyn
applyPrefixes :: [String] -> [String] -> [String]
applyPrefixes _ [] = []
applyPrefixes ps [x] = x : [unwords [p, x] | p <- ps]
applyPrefixes ps (x:ys) = x : ys ++ [unwords [p, y] | p <- ps, y <- ys]
-- | Splits a name into the names that can be tagged, which include:
-- |
-- | First name, Last name, First & Last Name, All names
splitIntoNames :: String -> [String]
splitIntoNames s = let str = strip s in case parseNames str of
[] -> [str]
[x] -> [x]
[x, y] -> [x, y, unwords [x, y]]
xs -> [x, z, unwords [x, z], unwords xs] where
x = head xs
z = last xs
-- | Combines a list of suffixes into one suffix, separating by space
-- | except when the suffix starts with a comma
suffixString :: [String] -> String
suffixString = concatMap (prep . strip) where
prep [] = []
prep trail@(',':_) = trail
prep suffix = ' ':suffix
-- | Like 'suffixString', for prefixes
prefixString :: [String] -> String
prefixString = concatMap (prep . strip) where
prep prefix = if null prefix then "" else prefix ++ " "
-- | Parse a name into names, respecting escaped whitespace
-- | Return the original name on failure
parseNames :: String -> [String]
parseNames str = case parse namesParser str str of
Left _ -> [str]
Right ns -> ns
namesParser :: GenParser Char st [String]
namesParser = many1 (whitespace *> nameParser <* whitespace)
nameParser :: GenParser Char st String
nameParser = except " \t\""
| Soares/tagwiki | src/Note/Character.hs | mit | 4,677 | 0 | 13 | 1,078 | 1,131 | 622 | 509 | 63 | 4 |
-- Take a number of items from the beginning of a list.
module Take where
import Prelude hiding (take)
take :: Integer -> [t] -> [t]
take _ [] = []
take numberOfItems list
| numberOfItems < 0 = error "Negative number."
| otherwise = take' numberOfItems list
where
take' 0 _ = []
take' numberOfItems (listItem : remainingListItems)
= listItem : (take' (numberOfItems - 1) remainingListItems)
{- GHCi>
take 1 []
take 0 [1]
take 1 [1]
take (-1) [1]
take 1 [1, 2]
take 2 [1, 2]
take 2 [1, 2, 3]
-}
-- []
-- []
-- [1]
-- *** Exception: Negative number.
-- [1]
-- [1, 2]
-- [1, 2]
| pascal-knodel/haskell-craft | Examples/· Recursion/· Primitive Recursion/Lists/Take.hs | mit | 630 | 0 | 11 | 169 | 142 | 78 | 64 | 10 | 2 |
module ScoreToLilypond.UtilsTest where
import Data.ByteString.Builder
import Data.Either.Combinators (fromRight')
import Data.Ratio
import qualified Data.Set as Set
import Music.RealSimpleMusic.Music.Data
import Music.RealSimpleMusic.Music.Utils
import Music.RealSimpleMusic.ScoreToLilypond.Utils
import Test.HUnit
testAccentNames :: Assertion
testAccentNames =
fromEnum (maxBound::Accent) @=? length accentValues
testRenderPitchOctaves :: Assertion
testRenderPitchOctaves =
map (toLazyByteString . stringEncoding) ["c'", "c''", "c'''", "c", "c,", "c,,"] @=? map (toLazyByteString . renderPitch) [Pitch C (Octave octave) | octave <- [0, 1, 2, -1, -2, -3]]
testRenderPitchAccidentals :: Assertion
testRenderPitchAccidentals =
map (toLazyByteString . stringEncoding) ["c", "ces", "ceses", "cis", "cisis"] @=? map (toLazyByteString . renderPitch) [Pitch pc (Octave (-1)) | pc <- [C, Cf, Cff, Cs, Css]]
testRenderRhythmBase :: Assertion
testRenderRhythmBase =
(map . map) (toLazyByteString . stringEncoding) [["1"], ["2"], ["4"], ["8"], ["16"], ["32"], ["64"]] @=? map (map toLazyByteString . renderRhythm . fromRight' . mkRhythm) [1%1, 1%2, 1%4, 1%8, 1%16, 1%32, 1%64]
testRenderRhythmDots :: Assertion
testRenderRhythmDots =
(map . map) (toLazyByteString . stringEncoding) [["1."], ["2."], ["4."], ["8."], ["16."], ["32."], ["64."]] @=? map (map toLazyByteString . renderRhythm . fromRight' . mkRhythm) [3%2, 3%4, 3%8, 3%16, 3%32, 3%64, 3%128]
testRenderRhythmTies :: Assertion
testRenderRhythmTies =
(map . map) (toLazyByteString . stringEncoding) [["1", "4"], ["1", "2."], ["1", "1", "4"], ["2", "8"], ["2."]] @=? map (map toLazyByteString . renderRhythm . fromRight' . mkRhythm) [5%4, 7%4, 9%4, 5%8, 6%8]
renderNote' :: Note -> Builder
renderNote' note = renderedNote where (_, _, renderedNote) = renderNote (False, False) note
testRenderNote :: Assertion
testRenderNote =
(toLazyByteString . stringEncoding) "c'64" @=? (toLazyByteString . renderNote') (Note (Pitch C (Octave 0)) ((fromRight' . mkRhythm) (1%64)) Set.empty)
testRenderAccentedNote :: Assertion
testRenderAccentedNote =
(toLazyByteString . stringEncoding) "d'32\\verysoft" @=? (toLazyByteString . renderNote') (Note (Pitch D (Octave 0)) ((fromRight'. mkRhythm) (1%32)) (Set.singleton (AccentControl VerySoft)))
testRenderRest :: Assertion
testRenderRest =
(toLazyByteString . stringEncoding) "r64" @=? (toLazyByteString . renderNote') (Rest (fromRight' (mkRhythm (1%64))) Set.empty)
testRenderPercussionNote :: Assertion
testRenderPercussionNote =
(toLazyByteString . stringEncoding) "c64" @=? (toLazyByteString . renderNote') (PercussionNote (fromRight' (mkRhythm (1%64))) Set.empty)
testRenderAccentedPercussionNote :: Assertion
testRenderAccentedPercussionNote =
(toLazyByteString . stringEncoding) "c32\\hard" @=? (toLazyByteString . renderNote') (PercussionNote (fromRight' (mkRhythm (1%32))) (Set.singleton (AccentControl Hard)))
testRenderTiedNote :: Assertion
testRenderTiedNote =
(toLazyByteString . stringEncoding) "c'1~ c'4" @=? (toLazyByteString . renderNote') (Note (Pitch C (Octave 0)) (fromRight' (mkRhythm (5%4))) Set.empty)
testRenderNotes :: Assertion
testRenderNotes =
(toLazyByteString . stringEncoding) "c8 d16 e32 f64 g128 a64 b32" @=? (toLazyByteString . renderNotes) (zipWith (\pc dur -> Note (Pitch pc (Octave (-1))) (fromRight' (mkRhythm dur)) Set.empty) (ascendingScale (fromRight' (majorScale C))) [1%8, 1%16, 1%32, 1%64, 1%128, 1%64, 1%32])
-- Fractional Dynamic Voice. Mix of
-- a) tied whole notes of varying durations with fractional dynamics that include crescendo and decrescendo
-- b) quarter note with discrete dyanmic
-- c) tied whole notes of varying durations with fractional dynamics that consist of a series of discrete dynamics
-- d) instrument should be a sustained instrument
-- TBD: when units add up to 3 things go badly wrong, e.g. "s3" in Lilypond, where the duration has to be power of 2.
d1, d2, d3, d4 :: Dynamic
d1 = FractionalDynamic [(Crescendo,1),(Fortissimo,0),(Decrescendo,1),(Piano,0)] -- must not be first dynamic, needs dynamic to start crescendo
d2 = FractionalDynamic [(Fortissimo,1),(Piano,1),(MezzoForte,1),(MezzoPiano,1)]
d3 = DiscreteDynamic MezzoPiano
d4 = DiscreteDynamic MezzoForte
p1, p2, p3, p4 :: Pitch
p1 = Pitch C (Octave 0)
p2 = Pitch A (Octave 0)
p3 = Pitch D (Octave (-1))
p4 = Pitch G (Octave (-2))
rhythm :: Rhythm
rhythm = fromRight' (mkRhythm (4%1))
np1d1, np1d3, np1d2, np2d1, np2d2, np2d3, np2d4, np3d3, np3d2, np3d4, np4d1, np4d2, np4d4 :: Note
np1d1 = Note p1 rhythm (Set.singleton (DynamicControl d1))
np1d3 = Note p1 rhythm (Set.singleton (DynamicControl d3))
np1d2 = Note p1 rhythm (Set.singleton (DynamicControl d2))
np2d1 = Note p2 rhythm (Set.singleton (DynamicControl d1))
np2d2 = Note p2 rhythm (Set.singleton (DynamicControl d2))
np2d3 = Note p2 rhythm (Set.singleton (DynamicControl d3))
np2d4 = Note p2 rhythm (Set.singleton (DynamicControl d4))
np3d3 = Note p3 rhythm (Set.singleton (DynamicControl d3))
np3d4 = Note p3 rhythm (Set.singleton (DynamicControl d4))
np3d2 = Note p3 rhythm (Set.singleton (DynamicControl d2))
np4d4 = Note p4 rhythm (Set.singleton (DynamicControl d4))
np4d1 = Note p4 rhythm (Set.singleton (DynamicControl d1))
np4d2 = Note p4 rhythm (Set.singleton (DynamicControl d2))
-- Fractional Dynamics: validate by inspection of rendered results.
-- For Midi, open in editor and check alignment of dynamic events.
-- For Lilypond view score and verify alignment of dynamic events.
genFractDyn :: Score
genFractDyn =
Score "GenFractDynTest" "Test" controls voices
where
controls = ScoreControls (KeySignature 0) (TimeSignature 4 4) [(Tempo (TempoVal Quarter 120), fromRight' (mkRhythm (0%4)))]
voices = [
Voice (Instrument "Trombone") [np1d3,np1d1,np1d2]
, Voice (Instrument "Trombone") [np2d3,np2d2,np3d4]
, Voice (Instrument "Trombone") [np3d3,np3d2,np4d2]
, Voice (Instrument "Trombone") [np4d4,np4d1,np2d4]]
| tomtitchener/RealSimpleMusic | tests/ScoreToLilypond/UtilsTest.hs | cc0-1.0 | 6,069 | 0 | 17 | 916 | 2,156 | 1,202 | 954 | 85 | 1 |
{-# OPTIONS_GHC -Wall #-}
import Control.Monad (void)
import Test.HUnit
import CreditCard
digitsTest1, digitsTest2, digitsTest3, digitsTest4 :: Test
digitsTest1 = TestCase $ assertEqual "in digits decomposition"
[1,2,3,4] (toDigits 1234)
digitsTest2 = TestCase $ assertEqual "in reverse digits decomposition"
[4,3,2,1] (toDigitsRev 1234)
digitsTest3 = TestCase $ assertEqual "in zero decomposition"
[] (toDigits 0)
digitsTest4 = TestCase $ assertEqual "in negative number decomposition"
[] (toDigits (-2014))
doublingTest1, doublingTest2 :: Test
doublingTest1 = TestCase $ assertEqual "in every-other doubling of even-length list"
[16,7,12,5] (doubleEveryOther [8,7,6,5])
doublingTest2 = TestCase $ assertEqual "in every-other doubling of odd-length list"
[1,4,3] (doubleEveryOther [1,2,3])
sumTest :: Test
sumTest = TestCase $ assertEqual "in sum of digits"
22 (sumDigits [16,7,12,5])
validationTest1, validationTest2 :: Test
validationTest1 = TestCase $ assertBool "in validation of valid input"
(validate 4012888888881881)
validationTest2 = TestCase $ assertBool "in validation of invalid input"
(not (validate 4012888888881882))
tests :: Test
tests = TestList [ TestLabel "number to digits decomposition" $
TestList [ digitsTest1
, digitsTest2
, digitsTest3
, digitsTest4 ]
, TestLabel "doubling routine" $
TestList [ doublingTest1
, doublingTest2 ]
, TestLabel "sum of digits" $ sumTest
, TestLabel "number validation" $
TestList [ validationTest1
, validationTest2 ] ]
main :: IO ()
main = void $ runTestTT tests
| mgrabovsky/upenn-cis194 | hw01/testCreditCard.hs | cc0-1.0 | 1,819 | 0 | 10 | 502 | 445 | 248 | 197 | 41 | 1 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module NLP.Grabber.Duden where
import Data.Text (Text)
import qualified Data.Text as T
import Text.XML.HXT.Core hiding (when)
import NLP.Grabber.Download
import Data.List
import Data.Monoid
import NLP.Types
import Control.Monad.Reader
import Control.Monad.Logger
import NLP.Grabber.Wordlist
import NLP.Types.Monad
getAndInsertWords :: NLP ()
getAndInsertWords = do
urls1 <- liftIO $ getDefinitionList "http://www.duden.de/definition"
void $ mapConcurrentlyNLP handleStep1 urls1
where
handleStep1, handleStep2 :: Text -> NLP ()
handleStep1 url = do
urls2 <- liftIO $ getDefinitionList url
void $ mapM handleStep2 urls2
handleStep2 url = do
wordUrls <- getWordList url
void $ mapM handleWord wordUrls
globalizeUrl :: T.Text -> T.Text
globalizeUrl = ("http://www.duden.de" <>)
handleWord :: Text -> NLP ()
handleWord url = do
w <- getWord url
case w of
Just word -> insertWord word
Nothing -> return ()
getWordList :: Text -> NLP [Text]
getWordList url = do
$(logInfo) ("Getting Wordlist from " <> url)
doc <- liftIO $ download True url
liftIO $ runX $ doc >>> getLinks
where
getLinks = proc x -> do
nurl <- deep $ hasName "a" >>> hasAttrValue "href" ("/rechtschreibung/" `isPrefixOf`) >>> getAttrValue "href" -< x
returnA -< globalizeUrl $ T.pack nurl
getDefinitionList :: Text -> IO [Text]
getDefinitionList url = do
doc <- download True url
runX $ doc >>> getLinks
where
getLinks = proc x -> do
content <- deep $ hasName "ul" -< x
nurl <- deep $ hasName "a" >>> hasAttrValue "href" ("/definition/" `isPrefixOf`) >>> getAttrValue "href" -< content
returnA -< globalizeUrl $ T.pack nurl
getWord :: T.Text -> NLP (Maybe (Word, [Word]))
getWord url = do
$(logInfo) ("Getting Word from " <> url)
doc <- liftIO $ download True url
word <- liftIO $ runX $ doc >>> findWord
synonyms <- liftIO $ runX $ doc >>> findSynonyms
case length word of
0 -> return Nothing
_ -> return $ Just (head word,synonyms)
where
findWord = proc x -> do
word <- deepest $ hasName "div" >>> hasAttrValue "class" (=="breadcrumb") /> getText -< x
returnA -< Word $ T.pack word
findSynonyms = proc x -> do
synonym <- deepest $ hasName "a" >>> hasAttrValue "meta-topic" (=="Synonym") /> getText -< x
returnA -< Word $ T.pack synonym
| azapps/nlp.hs | src/NLP/Grabber/Duden.hs | gpl-3.0 | 2,469 | 4 | 15 | 539 | 845 | 421 | 424 | 65 | 2 |
module Kevin.Types (
Kevin(Kevin),
KevinIO,
KevinS,
Privclass,
Chatroom,
User(..),
Title,
PrivclassStore,
UserStore,
TitleStore,
kevin,
use_,
get_,
gets_,
put_,
modify_,
-- lenses
users, privclasses, titles, joining, loggedIn,
-- other accessors
damn, irc, dChan, iChan, settings, logger
) where
import qualified Data.Text as T
import qualified Data.Map as M
import System.IO
import Control.Concurrent
import Control.Concurrent.STM.TVar
import Control.Monad.Trans
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.STM (STM, atomically)
import Kevin.Settings
import Control.Lens
type Chatroom = T.Text
data User = User { username :: T.Text
, privclass :: T.Text
, privclassLevel :: Int
, symbol :: T.Text
, realname :: T.Text
, typename :: T.Text
, gpc :: T.Text
} deriving (Eq, Show)
type UserStore = M.Map Chatroom [User]
type Privclasses = M.Map T.Text Int
type PrivclassStore = M.Map Chatroom Privclasses
type Privclass = (T.Text, Int)
type Title = T.Text
type TitleStore = M.Map Chatroom Title
data Kevin = Kevin { damn :: Handle
, irc :: Handle
, dChan :: Chan T.Text
, iChan :: Chan T.Text
, _kevinSettings :: Settings
, _users :: UserStore
, _privclasses :: PrivclassStore
, _titles :: TitleStore
, _joining :: [T.Text]
, _loggedIn :: Bool
, logger :: Chan String
}
makeLenses ''Kevin
instance HasSettings Kevin where
settings = kevinSettings
type KevinS = StateT Kevin STM
kevin :: KevinS a -> KevinIO a
kevin m = ask >>= \v -> liftIO $ atomically $ do
s <- readTVar v
(a, t) <- runStateT m s
writeTVar v t
return a
type KevinIO = ReaderT (TVar Kevin) IO
use_ :: Getting a Kevin t a b -> KevinIO a
use_ = gets_ . view
get_ :: KevinIO Kevin
get_ = ask >>= liftIO . readTVarIO
put_ :: Kevin -> KevinIO ()
put_ k = ask >>= io . atomically . flip writeTVar k
gets_ :: (Kevin -> a) -> KevinIO a
gets_ = flip liftM get_
modify_ :: (Kevin -> Kevin) -> KevinIO ()
modify_ f = do
var <- ask
io . atomically $ modifyTVar var f
io :: MonadIO m => IO a -> m a
io = liftIO
| ekmett/kevin | Kevin/Types.hs | gpl-3.0 | 2,440 | 0 | 11 | 787 | 759 | 431 | 328 | -1 | -1 |
module Scheme.Internal (
LispVal (..)
, LispError (..)
, ThrowsError, IOThrowsError
, throwError, catchError, throwParserError
, liftIO, liftThrows, runErrorT
, showValPretty
, Env, Frame, Cell
) where
import Control.Monad.Error
import System.IO (Handle)
import Text.PrettyPrint.ANSI.Leijen (Doc)
import Data.IORef (IORef)
data LispVal = Number Integer
| Bool Bool
| Character Char
| String String
| Symbol String
| List [LispVal]
| DottedList [LispVal] LispVal
| Port Handle
| PrimitiveFunc ([LispVal] -> ThrowsError LispVal)
| IOFunc ([LispVal] -> IOThrowsError LispVal)
| BuiltInFunc (Env -> [LispVal] -> IOThrowsError LispVal)
| Func {params :: [String], vararg :: (Maybe String), body :: [LispVal], closure :: Env}
| Macro {params :: [String], vararg :: (Maybe String), body :: [LispVal], closure :: Env}
| Undefined
instance Eq LispVal where (==) = eqVal
instance Show LispVal where show = showVal
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser Doc
| BadSpecialFrom String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
instance Show LispError where show = showError
instance Eq LispError where _ == _ = False
instance Error LispError where
noMsg = Default "An error has occured"
strMsg = Default
type ThrowsError = Either LispError
type IOThrowsError = ErrorT LispError IO
type Cell = IORef LispVal
type Frame = IORef [(String, Cell)]
type Env = IORef [Frame]
liftThrows :: ThrowsError a -> IOThrowsError a
liftThrows (Left err) = throwError err
liftThrows (Right val) = return val
eqVal :: LispVal -> LispVal -> Bool
eqVal (Number x) (Number y) = x == y
eqVal (Bool x) (Bool y) = x == y
eqVal (String x) (String y) = x == y
eqVal (Character x) (Character y) = x == y
eqVal (Symbol x) (Symbol y) = x == y
eqVal (List xs) (List ys) = xs == ys
eqVal (DottedList xs x) (DottedList ys y) = (xs == ys) && (x == y)
eqVal _ _ = False
showVal :: LispVal -> String
showVal (Number contents) = show contents
showVal (Bool True) = "#t"
showVal (Bool False) = "#f"
showVal (Character contents) = "#\\" ++ case contents of ' ' -> "space"; '\n' -> "newline"; c -> [c]
showVal (String contents) = show contents
showVal (Symbol name) = name
showVal (List contents) = "(" ++ unwordsList contents ++ ")"
showVal (DottedList _init _last) = "(" ++ unwordsList _init ++ " . " ++ show _last ++ ")"
showVal (Port _) = "<IO port>"
showVal (PrimitiveFunc _) = "<primitive>"
showVal (IOFunc _) = "<IO primitive>"
showVal (BuiltInFunc _) = "<subr>"
showVal (Func params vararg body closure) = "<closure>"
showVal (Macro params vararg body closure) = "<macro>"
showVal Undefined = ""
unwordsList = unwords . map showVal
showValPretty :: LispVal -> String
showValPretty val@(Number _) = showVal val
showValPretty val@(Bool _) = showVal val
showValPretty val@(Character _) = showVal val
showValPretty (String contents) = contents
showValPretty val@(Symbol _) = showVal val
showValPretty (List contents) = "(" ++ unwordsListPretry contents ++ ")"
showValPretty (DottedList _init _last) = "(" ++ unwordsListPretry _init ++ " . " ++ showValPretty _last ++ ")"
showValPretty (Port h) = "<IO port: " ++ extractPath (show h) ++ ">"
where extractPath str = init . drop 9 $ str
showValPretty val@(PrimitiveFunc _) = showVal val
showValPretty val@(IOFunc _) = showVal val
showValPretty val@(BuiltInFunc _) = showVal val
showValPretty (Func params vararg body closure) =
"<closure: (lambda (" ++ unwords params ++ (case vararg of Nothing -> ""; Just arg -> " . " ++ arg) ++ ") " ++ unwordsList body ++ ")>"
showValPretty (Macro params vararg body closure) =
"<macro: (define-macro (_______" ++ unwords params ++ (case vararg of Nothing -> ""; Just arg -> " . " ++ arg) ++ ") " ++ unwordsList body ++ ")>"
showValPretty Undefined = "<undefined>"
unwordsListPretry = unwords . map showValPretty
showError :: LispError -> String
showError (UnboundVar message varname) = message ++ ":" ++ varname
showError (BadSpecialFrom message form) = message ++ ":" ++ show form
showError (NumArgs expected found) = "Expected " ++ show expected ++ " args; found values " ++ unwordsList found
showError (TypeMismatch expected found) = "Invalid type: expected " ++ expected ++ ", found " ++ show found
showError (Parser doc) = "Parse error \n" ++ show doc
showError (NotFunction message found) = message ++ "; found " ++ found
showError (Default message) = message
throwParserError s = throwError $ Parser s -- for Parser.hs
| Altech/haScm | src/Scheme/Internal.hs | gpl-3.0 | 4,789 | 0 | 13 | 1,075 | 1,708 | 894 | 814 | 102 | 3 |
-- | The Sprites component of the Lamdu Theme
{-# LANGUAGE TemplateHaskell, DerivingVia #-}
module Lamdu.Config.Theme.Sprites where
import qualified Control.Lens as Lens
import qualified Data.Aeson.TH.Extended as JsonTH
import GUI.Momentu.Animation.Id (ElemIds)
import Lamdu.Prelude
data Sprites a = Sprites
{ _earthGlobe :: a
, _pencilLine :: a
, _theme :: a
}
deriving stock (Eq, Generic, Generic1, Show, Functor, Foldable, Traversable)
deriving Applicative via (Generically1 Sprites)
deriving anyclass (ElemIds)
JsonTH.derivePrefixed "_" ''Sprites
Lens.makeLenses ''Sprites
| Peaker/lamdu | src/Lamdu/Config/Theme/Sprites.hs | gpl-3.0 | 628 | 0 | 8 | 123 | 150 | 89 | 61 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Projects.Sinks.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a sink that exports specified log entries to a destination. The
-- export of newly-ingested log entries begins immediately, unless the
-- sink\'s writer_identity is not permitted to write to the destination. A
-- sink can export log entries only from the resource owning the sink.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.projects.sinks.create@.
module Network.Google.Resource.Logging.Projects.Sinks.Create
(
-- * REST Resource
ProjectsSinksCreateResource
-- * Creating a Request
, projectsSinksCreate
, ProjectsSinksCreate
-- * Request Lenses
, pscParent
, pscXgafv
, pscUniqueWriterIdentity
, pscUploadProtocol
, pscAccessToken
, pscUploadType
, pscPayload
, pscCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.projects.sinks.create@ method which the
-- 'ProjectsSinksCreate' request conforms to.
type ProjectsSinksCreateResource =
"v2" :>
Capture "parent" Text :>
"sinks" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "uniqueWriterIdentity" Bool :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LogSink :> Post '[JSON] LogSink
-- | Creates a sink that exports specified log entries to a destination. The
-- export of newly-ingested log entries begins immediately, unless the
-- sink\'s writer_identity is not permitted to write to the destination. A
-- sink can export log entries only from the resource owning the sink.
--
-- /See:/ 'projectsSinksCreate' smart constructor.
data ProjectsSinksCreate =
ProjectsSinksCreate'
{ _pscParent :: !Text
, _pscXgafv :: !(Maybe Xgafv)
, _pscUniqueWriterIdentity :: !(Maybe Bool)
, _pscUploadProtocol :: !(Maybe Text)
, _pscAccessToken :: !(Maybe Text)
, _pscUploadType :: !(Maybe Text)
, _pscPayload :: !LogSink
, _pscCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsSinksCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pscParent'
--
-- * 'pscXgafv'
--
-- * 'pscUniqueWriterIdentity'
--
-- * 'pscUploadProtocol'
--
-- * 'pscAccessToken'
--
-- * 'pscUploadType'
--
-- * 'pscPayload'
--
-- * 'pscCallback'
projectsSinksCreate
:: Text -- ^ 'pscParent'
-> LogSink -- ^ 'pscPayload'
-> ProjectsSinksCreate
projectsSinksCreate pPscParent_ pPscPayload_ =
ProjectsSinksCreate'
{ _pscParent = pPscParent_
, _pscXgafv = Nothing
, _pscUniqueWriterIdentity = Nothing
, _pscUploadProtocol = Nothing
, _pscAccessToken = Nothing
, _pscUploadType = Nothing
, _pscPayload = pPscPayload_
, _pscCallback = Nothing
}
-- | Required. The resource in which to create the sink:
-- \"projects\/[PROJECT_ID]\" \"organizations\/[ORGANIZATION_ID]\"
-- \"billingAccounts\/[BILLING_ACCOUNT_ID]\" \"folders\/[FOLDER_ID]\"
-- Examples: \"projects\/my-logging-project\",
-- \"organizations\/123456789\".
pscParent :: Lens' ProjectsSinksCreate Text
pscParent
= lens _pscParent (\ s a -> s{_pscParent = a})
-- | V1 error format.
pscXgafv :: Lens' ProjectsSinksCreate (Maybe Xgafv)
pscXgafv = lens _pscXgafv (\ s a -> s{_pscXgafv = a})
-- | Optional. Determines the kind of IAM identity returned as
-- writer_identity in the new sink. If this value is omitted or set to
-- false, and if the sink\'s parent is a project, then the value returned
-- as writer_identity is the same group or service account used by Logging
-- before the addition of writer identities to this API. The sink\'s
-- destination must be in the same project as the sink itself.If this field
-- is set to true, or if the sink is owned by a non-project resource such
-- as an organization, then the value of writer_identity will be a unique
-- service account used only for exports from the new sink. For more
-- information, see writer_identity in LogSink.
pscUniqueWriterIdentity :: Lens' ProjectsSinksCreate (Maybe Bool)
pscUniqueWriterIdentity
= lens _pscUniqueWriterIdentity
(\ s a -> s{_pscUniqueWriterIdentity = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pscUploadProtocol :: Lens' ProjectsSinksCreate (Maybe Text)
pscUploadProtocol
= lens _pscUploadProtocol
(\ s a -> s{_pscUploadProtocol = a})
-- | OAuth access token.
pscAccessToken :: Lens' ProjectsSinksCreate (Maybe Text)
pscAccessToken
= lens _pscAccessToken
(\ s a -> s{_pscAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pscUploadType :: Lens' ProjectsSinksCreate (Maybe Text)
pscUploadType
= lens _pscUploadType
(\ s a -> s{_pscUploadType = a})
-- | Multipart request metadata.
pscPayload :: Lens' ProjectsSinksCreate LogSink
pscPayload
= lens _pscPayload (\ s a -> s{_pscPayload = a})
-- | JSONP
pscCallback :: Lens' ProjectsSinksCreate (Maybe Text)
pscCallback
= lens _pscCallback (\ s a -> s{_pscCallback = a})
instance GoogleRequest ProjectsSinksCreate where
type Rs ProjectsSinksCreate = LogSink
type Scopes ProjectsSinksCreate =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/logging.admin"]
requestClient ProjectsSinksCreate'{..}
= go _pscParent _pscXgafv _pscUniqueWriterIdentity
_pscUploadProtocol
_pscAccessToken
_pscUploadType
_pscCallback
(Just AltJSON)
_pscPayload
loggingService
where go
= buildClient
(Proxy :: Proxy ProjectsSinksCreateResource)
mempty
| brendanhay/gogol | gogol-logging/gen/Network/Google/Resource/Logging/Projects/Sinks/Create.hs | mpl-2.0 | 6,812 | 0 | 18 | 1,486 | 881 | 520 | 361 | 125 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.Courses.Aliases.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes an alias of a course. This method returns the following error
-- codes: * \`PERMISSION_DENIED\` if the requesting user is not permitted
-- to remove the alias or for access errors. * \`NOT_FOUND\` if the alias
-- does not exist.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.courses.aliases.delete@.
module Network.Google.Resource.Classroom.Courses.Aliases.Delete
(
-- * REST Resource
CoursesAliasesDeleteResource
-- * Creating a Request
, coursesAliasesDelete
, CoursesAliasesDelete
-- * Request Lenses
, cadXgafv
, cadUploadProtocol
, cadPp
, cadCourseId
, cadAccessToken
, cadUploadType
, cadAlias
, cadBearerToken
, cadCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.courses.aliases.delete@ method which the
-- 'CoursesAliasesDelete' request conforms to.
type CoursesAliasesDeleteResource =
"v1" :>
"courses" :>
Capture "courseId" Text :>
"aliases" :>
Capture "alias" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes an alias of a course. This method returns the following error
-- codes: * \`PERMISSION_DENIED\` if the requesting user is not permitted
-- to remove the alias or for access errors. * \`NOT_FOUND\` if the alias
-- does not exist.
--
-- /See:/ 'coursesAliasesDelete' smart constructor.
data CoursesAliasesDelete = CoursesAliasesDelete'
{ _cadXgafv :: !(Maybe Text)
, _cadUploadProtocol :: !(Maybe Text)
, _cadPp :: !Bool
, _cadCourseId :: !Text
, _cadAccessToken :: !(Maybe Text)
, _cadUploadType :: !(Maybe Text)
, _cadAlias :: !Text
, _cadBearerToken :: !(Maybe Text)
, _cadCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CoursesAliasesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cadXgafv'
--
-- * 'cadUploadProtocol'
--
-- * 'cadPp'
--
-- * 'cadCourseId'
--
-- * 'cadAccessToken'
--
-- * 'cadUploadType'
--
-- * 'cadAlias'
--
-- * 'cadBearerToken'
--
-- * 'cadCallback'
coursesAliasesDelete
:: Text -- ^ 'cadCourseId'
-> Text -- ^ 'cadAlias'
-> CoursesAliasesDelete
coursesAliasesDelete pCadCourseId_ pCadAlias_ =
CoursesAliasesDelete'
{ _cadXgafv = Nothing
, _cadUploadProtocol = Nothing
, _cadPp = True
, _cadCourseId = pCadCourseId_
, _cadAccessToken = Nothing
, _cadUploadType = Nothing
, _cadAlias = pCadAlias_
, _cadBearerToken = Nothing
, _cadCallback = Nothing
}
-- | V1 error format.
cadXgafv :: Lens' CoursesAliasesDelete (Maybe Text)
cadXgafv = lens _cadXgafv (\ s a -> s{_cadXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cadUploadProtocol :: Lens' CoursesAliasesDelete (Maybe Text)
cadUploadProtocol
= lens _cadUploadProtocol
(\ s a -> s{_cadUploadProtocol = a})
-- | Pretty-print response.
cadPp :: Lens' CoursesAliasesDelete Bool
cadPp = lens _cadPp (\ s a -> s{_cadPp = a})
-- | Identifier of the course whose alias should be deleted. This identifier
-- can be either the Classroom-assigned identifier or an alias.
cadCourseId :: Lens' CoursesAliasesDelete Text
cadCourseId
= lens _cadCourseId (\ s a -> s{_cadCourseId = a})
-- | OAuth access token.
cadAccessToken :: Lens' CoursesAliasesDelete (Maybe Text)
cadAccessToken
= lens _cadAccessToken
(\ s a -> s{_cadAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cadUploadType :: Lens' CoursesAliasesDelete (Maybe Text)
cadUploadType
= lens _cadUploadType
(\ s a -> s{_cadUploadType = a})
-- | Alias to delete. This may not be the Classroom-assigned identifier.
cadAlias :: Lens' CoursesAliasesDelete Text
cadAlias = lens _cadAlias (\ s a -> s{_cadAlias = a})
-- | OAuth bearer token.
cadBearerToken :: Lens' CoursesAliasesDelete (Maybe Text)
cadBearerToken
= lens _cadBearerToken
(\ s a -> s{_cadBearerToken = a})
-- | JSONP
cadCallback :: Lens' CoursesAliasesDelete (Maybe Text)
cadCallback
= lens _cadCallback (\ s a -> s{_cadCallback = a})
instance GoogleRequest CoursesAliasesDelete where
type Rs CoursesAliasesDelete = Empty
type Scopes CoursesAliasesDelete =
'["https://www.googleapis.com/auth/classroom.courses"]
requestClient CoursesAliasesDelete'{..}
= go _cadCourseId _cadAlias _cadXgafv
_cadUploadProtocol
(Just _cadPp)
_cadAccessToken
_cadUploadType
_cadBearerToken
_cadCallback
(Just AltJSON)
classroomService
where go
= buildClient
(Proxy :: Proxy CoursesAliasesDeleteResource)
mempty
| rueshyna/gogol | gogol-classroom/gen/Network/Google/Resource/Classroom/Courses/Aliases/Delete.hs | mpl-2.0 | 6,199 | 0 | 20 | 1,532 | 939 | 547 | 392 | 132 | 1 |
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func = x + x
| lspitzner/brittany | data/Test424.hs | agpl-3.0 | 127 | 0 | 5 | 16 | 11 | 6 | 5 | 1 | 1 |
module HaskellPU.Algorithms.QR where
import qualified Prelude
import Prelude (($), (-), (==), Int, Float, undefined)
import Data.Word
import HaskellPU.HighDataTypes
import HaskellPU.Data.Matrix
import Data.Functor
unsplit :: Word -> Word -> HighMatrix (Matrix Float) -> Matrix Float
unsplit _ _ _ = undefined
qr_lu f11 f1k fk1 fkk i a = unsplit i i bs
where
as = split i i a
(a11,ak1,a1k,akk) = triangularSplit as
b11 = f11 a11
bk1 = fmap (fk1 b11) ak1
b1k = fmap (f1k b11) a1k
tmpkk = zipWith fkk akk (cross bk1 b1k)
bkk = if (hwidth tmpkk == 0) then tmpkk else split (i-1) (i-1) $ qr_lu f11 f1k fk1 fkk i $ unsplit (i-1) (i-1) tmpkk
bs = fromTriangularSplit b11 bk1 b1k bkk
triangularSplit :: HighMatrix a -> (a, HighVector a, HighVector a, HighMatrix a)
triangularSplit m = (m11,mk1,m1k,mkk)
where
m11 = head (column 1 m)
mk1 = tail (column 1 m)
m1k = tail (row 1 m)
mkk = dropRows 1 (dropColumns 1 m)
fromTriangularSplit :: a -> HighVector a -> HighVector a -> HighMatrix a -> HighMatrix a
fromTriangularSplit m11 mk1 m1k mkk = fromColumns $ cons left right
where
left = cons m11 mk1
right = columns $ fromRows $ cons m1k (rows mkk)
| hsyl20/HaskellPU | lib/HaskellPU/Algorithms/QR.hs | lgpl-3.0 | 1,204 | 0 | 12 | 270 | 531 | 280 | 251 | 28 | 2 |
module PoemLines where
myWords :: String -> [String]
myWords [] = []
myWords (s:str)
| s == ' ' = myWords str
| otherwise = takeWhile (/= ' ') (s:str) : myWords (dropWhile (/= ' ') (s:str))
firstSen :: String
firstSen = "Tyger Tyger, burning bright\n"
secondSen :: String
secondSen = "In the forests of the night\n"
thirdSen :: String
thirdSen = "What immortal hand or eye\n"
fourthSen :: String
fourthSen = "Could frame thy fearful symmetry?"
sentences :: String
sentences = firstSen ++ secondSen ++ thirdSen ++ fourthSen
myLines :: String -> [String]
myLines [] = []
myLines (word:sentence)
| word == '\n' = myLines sentence
| otherwise =
takeWhile (/= '\n') (word:sentence) : myLines (dropWhile (/= '\n') (word:sentence))
shouldEqual :: [String]
shouldEqual =
["Tyger Tyger, burning bright"
,"In the forests of the night"
, "What immortal hand or eye"
, "Could frame thy fearful symmetry?"
]
mySplitOn :: String -> Char -> [String]
mySplitOn [] _ = []
mySplitOn (s:str) delim
| s == delim = mySplitOn str delim
| otherwise =
takeWhile (/= delim) (s:str) : mySplitOn (dropWhile (/= delim) (s:str)) delim
main :: IO ()
main =
print $ "Are they equal? " ++ show (myLines sentences == shouldEqual)
| thewoolleyman/haskellbook | 09/06/eric/PoemLines.hs | unlicense | 1,237 | 0 | 11 | 243 | 448 | 238 | 210 | 37 | 1 |
module Git.Command.ReadTree (run) where
run :: [String] -> IO ()
run args = return () | wereHamster/yag | Git/Command/ReadTree.hs | unlicense | 86 | 0 | 7 | 15 | 42 | 23 | 19 | 3 | 1 |
{-# LANGUAGE FlexibleInstances #-}
-- | The RLP module provides a framework within which serializers can be built, described in the Ethereum Yellowpaper (<http://gavwood.com/paper.pdf>).
--
-- The 'RLPObject' is an intermediate data container, whose serialization rules are well defined. By creating code that converts from a
-- given type to an 'RLPObject', full serialization will be specified. The 'RLPSerializable' class provides functions to do this conversion.
module Blockchain.Data.RLP
( RLPObject(..)
, formatRLPObject
, RLPSerializable(..)
, rlpSplit
, rlpSerialize
, rlpDeserialize
) where
import Data.Bits
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Char8 as BC
import Data.ByteString.Internal
import Data.Word
import Numeric
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Blockchain.Data.Util
-- | An internal representation of generic data, with no type information.
--
-- End users will not need to directly create objects of this type (an 'RLPObject' can be created using 'rlpEncode'),
-- however the designer of a new type will need to create conversion code by making their type an instance
-- of the RLPSerializable class.
data RLPObject
= RLPScalar Word8
| RLPString B.ByteString
| RLPArray [RLPObject]
deriving (Show, Eq, Ord)
-- | Converts objects to and from 'RLPObject's.
class RLPSerializable a where
rlpDecode :: RLPObject -> a
rlpEncode :: a -> RLPObject
instance Pretty RLPObject where
pretty (RLPArray objects) =
encloseSep (text "[") (text "]") (text ", ") $ pretty <$> objects
pretty (RLPScalar n) = text $ "0x" ++ showHex n ""
pretty (RLPString s) = text $ "0x" ++ BC.unpack (B16.encode s)
formatRLPObject :: RLPObject -> String
formatRLPObject = show . pretty
splitAtWithError :: Int -> B.ByteString -> (B.ByteString, B.ByteString)
splitAtWithError i s
| i > B.length s = error "splitAtWithError called with n > length arr"
splitAtWithError i s = B.splitAt i s
getLength :: Int -> B.ByteString -> (Integer, B.ByteString)
getLength sizeOfLength bytes =
( bytes2Integer $ B.unpack $ B.take sizeOfLength bytes
, B.drop sizeOfLength bytes)
rlpSplit :: B.ByteString -> (RLPObject, B.ByteString)
rlpSplit input =
case B.head input of
x
| x >= 192 && x <= 192 + 55 ->
let (arrayData, nextRest) =
splitAtWithError (fromIntegral x - 192) $ B.tail input
in (RLPArray $ getRLPObjects arrayData, nextRest)
x
| x >= 0xF8 && x <= 0xFF ->
let (arrLength, restAfterLen) =
getLength (fromIntegral x - 0xF7) $ B.tail input
(arrayData, nextRest) =
splitAtWithError (fromIntegral arrLength) restAfterLen
in (RLPArray $ getRLPObjects arrayData, nextRest)
x
| x >= 128 && x <= 128 + 55 ->
let (strList, nextRest) =
splitAtWithError (fromIntegral $ x - 128) $ B.tail input
in (RLPString strList, nextRest)
x
| x >= 0xB8 && x <= 0xBF ->
let (strLength, restAfterLen) =
getLength (fromIntegral x - 0xB7) $ B.tail input
(strList, nextRest) =
splitAtWithError (fromIntegral strLength) restAfterLen
in (RLPString strList, nextRest)
x
| x < 128 -> (RLPScalar x, B.tail input)
x -> error ("Missing case in rlpSplit: " ++ show x)
getRLPObjects :: ByteString -> [RLPObject]
getRLPObjects x
| B.null x = []
getRLPObjects theData = obj : getRLPObjects rest
where
(obj, rest) = rlpSplit theData
int2Bytes :: Int -> [Word8]
int2Bytes val
| val < 0x100 = map (fromIntegral . (val `shiftR`)) [0]
int2Bytes val
| val < 0x10000 = map (fromIntegral . (val `shiftR`)) [8, 0]
int2Bytes val
| val < 0x1000000 = map (fromIntegral . (val `shiftR`)) [16, 8, 0]
int2Bytes val
| val < 0x100000000 = map (fromIntegral . (val `shiftR`)) [24,16 .. 0]
int2Bytes val
| val < 0x10000000000 = map (fromIntegral . (val `shiftR`)) [32,24 .. 0]
int2Bytes _ = error "int2Bytes not defined for val >= 0x10000000000."
rlp2Bytes :: RLPObject -> [Word8]
rlp2Bytes (RLPScalar val) = [fromIntegral val]
rlp2Bytes (RLPString s)
| B.length s <= 55 = 0x80 + fromIntegral (B.length s) : B.unpack s
rlp2Bytes (RLPString s) =
[0xB7 + fromIntegral (length lengthAsBytes)] ++ lengthAsBytes ++ B.unpack s
where
lengthAsBytes = int2Bytes $ B.length s
rlp2Bytes (RLPArray innerObjects) =
if length innerBytes <= 55
then 0xC0 + fromIntegral (length innerBytes) : innerBytes
else let lenBytes = int2Bytes $ length innerBytes
in [0xF7 + fromIntegral (length lenBytes)] ++ lenBytes ++ innerBytes
where
innerBytes = concat $ rlp2Bytes <$> innerObjects
--TODO- Probably should just use Data.Binary's 'Binary' class for this
-- | Converts bytes to 'RLPObject's.
--
-- Full deserialization of an object can be obtained using @rlpDecode . rlpDeserialize@.
rlpDeserialize :: B.ByteString -> RLPObject
rlpDeserialize s =
case rlpSplit s of
(o, x)
| B.null x -> o
_ ->
error
("parse error converting ByteString to an RLP Object: " ++
show (B.unpack s))
-- | Converts 'RLPObject's to bytes.
--
-- Full serialization of an object can be obtained using @rlpSerialize . rlpEncode@.
rlpSerialize :: RLPObject -> B.ByteString
rlpSerialize o = B.pack $ rlp2Bytes o
instance RLPSerializable Integer where
rlpEncode 0 = RLPString B.empty
rlpEncode x
| x < 128 = RLPScalar $ fromIntegral x
rlpEncode x = RLPString $ B.pack $ integer2Bytes x
rlpDecode (RLPScalar x) = fromIntegral x
rlpDecode (RLPString s) = byteString2Integer s
rlpDecode (RLPArray _) = error "rlpDecode called for Integer for array"
instance RLPSerializable String where
rlpEncode s = rlpEncode $ BC.pack s
rlpDecode (RLPString s) = BC.unpack s
rlpDecode (RLPScalar n) = [w2c $ fromIntegral n]
rlpDecode (RLPArray x) =
error $
"Malformed RLP in call to rlpDecode for String: RLPObject is an array: " ++
show (pretty x)
instance RLPSerializable B.ByteString where
rlpEncode x
| B.length x == 1 && B.head x < 128 = RLPScalar $ B.head x
rlpEncode s = RLPString s
rlpDecode (RLPScalar x) = B.singleton x
rlpDecode (RLPString s) = s
rlpDecode x = error ("rlpDecode for ByteString not defined for: " ++ show x)
| zchn/ethereum-analyzer | ethereum-analyzer-deps/src/Blockchain/Data/RLP.hs | apache-2.0 | 6,324 | 0 | 17 | 1,360 | 1,885 | 968 | 917 | 134 | 6 |
{-# LANGUAGE ScopedTypeVariables #-}
{-
HSStructMain.hs
Copyright 2014 Sebastien Soudan
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Main where
import qualified AVLTree
import qualified BSTree
import qualified BatchedQueue
import qualified BatchedDequeue
import qualified Dequeue
import qualified Queue
import Data.Digest.Murmur32
import Microbench
import Graph
import RDG
buildBatchedQueue :: Int -> BatchedQueue.BatchedQueue Integer
buildBatchedQueue n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [1..n] ]
in BatchedQueue.buildBatchedQueue rs
buildBatchedDequeue :: Int -> BatchedDequeue.BatchedDequeue Integer
buildBatchedDequeue n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [1..n] ]
in BatchedDequeue.buildBatchedDequeue rs
buildAVLTree :: Int -> AVLTree.AVLTree Integer
buildAVLTree n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [1..n] ]
in AVLTree.buildTree rs
searchAVLTree :: AVLTree.AVLTree Integer -> Int -> Integer
searchAVLTree tree n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [500000..n] ]
(a,b) = span (`AVLTree.elemTree` tree) rs
in sum a + sum b
buildBSTree :: Int -> BSTree.BSTree Integer
buildBSTree n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [1..n] ]
in BSTree.buildTree rs
searchBSTree :: BSTree.BSTree Integer -> Int -> Integer
searchBSTree tree n = let rs = [ (toInteger . asWord32 . hash32) m | m <- [500000..n] ]
(a,b) = span (`BSTree.elemTree` tree) rs
in sum a + sum b
connectedCompF = let g = buildGraph ["A", "B", "C"] [("A", "B", "AB"), ("B", "C", "BC")]
cc = connectedComp g
v = Prelude.map vertexData $ vertices cc
in v
main :: IO ()
main = do
putStrLn "Testing performance of 'buildTree'"
microbench "buildAVLTree " buildAVLTree
microbench "buildBSTree " buildAVLTree
putStrLn "Testing performance of 'elemTree' in pre-built trees"
let rs = [ (toInteger . asWord32 . hash32) m | (m :: Integer) <- [1..1000000] ]
avltree = AVLTree.buildTree rs
bstree = BSTree.buildTree rs
putStrLn $ "AVLTree size: " ++ show (AVLTree.size avltree)
putStrLn $ "BSTree size: " ++ show (BSTree.size bstree)
microbench "searchAVLTree " $ searchAVLTree avltree
microbench "searchBSTree " $ searchBSTree bstree
----
putStrLn "Testing performance of 'buildQueue'"
microbench "buildBatchedQueue " buildBatchedQueue
putStrLn "Testing performance of 'head' in pre-built queue"
let rs = [ (toInteger . asWord32 . hash32) m | (m :: Integer) <- [1..1000000] ]
batchedQueue = BatchedQueue.buildBatchedQueue rs
putStrLn $ "BatchedQueue size: " ++ show (Queue.size batchedQueue)
--microbench "BatchedQueue.head " $ Queue.head batchedQueue
----
putStrLn "Testing performance of 'buildDequeue'"
microbench "buildBatchedDequeue " buildBatchedDequeue
putStrLn "Testing performance of 'head' in pre-built dequeue"
let rs = [ (toInteger . asWord32 . hash32) m | (m :: Integer) <- [1..1000000] ]
batchedDequeue = BatchedDequeue.buildBatchedDequeue rs
putStrLn $ "BatchedDequeue size: " ++ show (Dequeue.size batchedDequeue)
--microbench "BatchedDequeue.head " $ Dequeue.head batchedDequeue
putStrLn $ "connected comp: " ++ show (connectedCompF)
| ssoudan/hsStruct | src/HSStructMain.hs | apache-2.0 | 4,013 | 0 | 14 | 968 | 1,000 | 507 | 493 | 62 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTextCursor.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QTextCursor (
MoveMode, eMoveAnchor, eKeepAnchor
, MoveOperation, eNoMove, eStart, eStartOfLine, eStartOfBlock, eStartOfWord, ePreviousBlock, ePreviousCharacter, ePreviousWord, eWordLeft, eEndOfLine, eEndOfWord, eEndOfBlock, eNextBlock, eNextCharacter, eNextWord, eWordRight
, SelectionType, eWordUnderCursor, eLineUnderCursor, eBlockUnderCursor, eDocument
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CMoveMode a = CMoveMode a
type MoveMode = QEnum(CMoveMode Int)
ieMoveMode :: Int -> MoveMode
ieMoveMode x = QEnum (CMoveMode x)
instance QEnumC (CMoveMode Int) where
qEnum_toInt (QEnum (CMoveMode x)) = x
qEnum_fromInt x = QEnum (CMoveMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> MoveMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eMoveAnchor :: MoveMode
eMoveAnchor
= ieMoveMode $ 0
eKeepAnchor :: MoveMode
eKeepAnchor
= ieMoveMode $ 1
data CMoveOperation a = CMoveOperation a
type MoveOperation = QEnum(CMoveOperation Int)
ieMoveOperation :: Int -> MoveOperation
ieMoveOperation x = QEnum (CMoveOperation x)
instance QEnumC (CMoveOperation Int) where
qEnum_toInt (QEnum (CMoveOperation x)) = x
qEnum_fromInt x = QEnum (CMoveOperation x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> MoveOperation -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eNoMove :: MoveOperation
eNoMove
= ieMoveOperation $ 0
eStart :: MoveOperation
eStart
= ieMoveOperation $ 1
instance QeUp MoveOperation where
eUp
= ieMoveOperation $ 2
eStartOfLine :: MoveOperation
eStartOfLine
= ieMoveOperation $ 3
eStartOfBlock :: MoveOperation
eStartOfBlock
= ieMoveOperation $ 4
eStartOfWord :: MoveOperation
eStartOfWord
= ieMoveOperation $ 5
ePreviousBlock :: MoveOperation
ePreviousBlock
= ieMoveOperation $ 6
ePreviousCharacter :: MoveOperation
ePreviousCharacter
= ieMoveOperation $ 7
ePreviousWord :: MoveOperation
ePreviousWord
= ieMoveOperation $ 8
instance QeLeft MoveOperation where
eLeft
= ieMoveOperation $ 9
eWordLeft :: MoveOperation
eWordLeft
= ieMoveOperation $ 10
instance QeEnd MoveOperation where
eEnd
= ieMoveOperation $ 11
instance QeDown MoveOperation where
eDown
= ieMoveOperation $ 12
eEndOfLine :: MoveOperation
eEndOfLine
= ieMoveOperation $ 13
eEndOfWord :: MoveOperation
eEndOfWord
= ieMoveOperation $ 14
eEndOfBlock :: MoveOperation
eEndOfBlock
= ieMoveOperation $ 15
eNextBlock :: MoveOperation
eNextBlock
= ieMoveOperation $ 16
eNextCharacter :: MoveOperation
eNextCharacter
= ieMoveOperation $ 17
eNextWord :: MoveOperation
eNextWord
= ieMoveOperation $ 18
instance QeRight MoveOperation where
eRight
= ieMoveOperation $ 19
eWordRight :: MoveOperation
eWordRight
= ieMoveOperation $ 20
data CSelectionType a = CSelectionType a
type SelectionType = QEnum(CSelectionType Int)
ieSelectionType :: Int -> SelectionType
ieSelectionType x = QEnum (CSelectionType x)
instance QEnumC (CSelectionType Int) where
qEnum_toInt (QEnum (CSelectionType x)) = x
qEnum_fromInt x = QEnum (CSelectionType x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> SelectionType -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eWordUnderCursor :: SelectionType
eWordUnderCursor
= ieSelectionType $ 0
eLineUnderCursor :: SelectionType
eLineUnderCursor
= ieSelectionType $ 1
eBlockUnderCursor :: SelectionType
eBlockUnderCursor
= ieSelectionType $ 2
eDocument :: SelectionType
eDocument
= ieSelectionType $ 3
| uduki/hsQt | Qtc/Enums/Gui/QTextCursor.hs | bsd-2-clause | 7,402 | 0 | 18 | 1,595 | 1,984 | 1,006 | 978 | 202 | 1 |
--
-- Copyright 2014, NICTA
--
-- This software may be distributed and modified according to the terms of
-- the BSD 2-Clause license. Note that NO WARRANTY is provided.
-- See "LICENSE_BSD2.txt" for details.
--
-- @TAG(NICTA_BSD)
--
-- Printer for C source format to be consumed by the CapDL initialiser.
-- Note: corresponds to the -c/--code argument.
module CapDL.PrintC where
import CapDL.Model
import Control.Exception (assert)
import Data.List
import Data.List.Ordered
import Data.List.Utils
import Data.Maybe (fromJust, fromMaybe)
import Data.Word
import Data.Map as Map
import Data.Set as Set
import Data.String.Utils (rstrip)
import Data.Bits
import Numeric (showHex)
import Text.PrettyPrint
(∈) = Set.member
indent :: String -> String
indent s = rstrip $ unlines $ Prelude.map (\a -> " " ++ a) $ lines s
(+++) :: String -> String -> String
s1 +++ s2 = s1 ++ "\n" ++ s2
hex :: Word -> String
hex x = "0x" ++ showHex x ""
maxObjects :: Int -> String
maxObjects count = "#define MAX_OBJECTS " ++ show count
memberArch :: Arch -> String
memberArch IA32 = ".arch = CDL_Arch_IA32,"
memberArch ARM11 = ".arch = CDL_Arch_ARM,"
memberNum :: Int -> String
memberNum n = ".num = " ++ show n ++ ","
showObjID :: Map ObjID Int -> ObjID -> String
showObjID xs id = (case Map.lookup id xs of
Just w -> show w
_ -> "INVALID_SLOT") ++ " /* " ++ fst id ++ " */"
showRights :: CapRights -> String
showRights rights =
prefix ++ r ++ w ++ g
where
prefix = if Set.null rights then "0" else "CDL_"
r = if Read ∈ rights then "R" else ""
w = if Write ∈ rights then "W" else ""
g = if Grant ∈ rights then "G" else ""
showPorts :: Set Word -> String
showPorts ports =
show ((.|.) (shift start 16) end)
where
start = Set.findMin ports
end = Set.findMax ports
showPCI :: Word -> (Word, Word, Word) -> String
showPCI domainID (pciBus, pciDev, pciFun) =
hex $ shift domainID 16 .|. shift pciBus 8 .|. shift pciDev 3 .|. pciFun
-- Lookup-by-value on a dictionary. I feel like I need a shower.
lookupByValue :: Ord k => (a -> Bool) -> Map k a -> k
lookupByValue f m = head $ keys $ Map.filter f m
showCap :: Map ObjID Int -> Cap -> IRQMap -> String -> ObjMap Word -> String
showCap _ NullCap _ _ _ = "{.type = CDL_NullCap}"
showCap objs (UntypedCap id) _ _ _ =
"{.type = CDL_UntypedCap, .obj_id = " ++ showObjID objs id ++ "}"
showCap objs (EndpointCap id badge rights) _ is_orig _ =
"{.type = CDL_EPCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .rights = " ++ showRights rights ++
", .data = { .tag = seL4_CapData_Badge, .badge = " ++ show badge ++ "}}"
showCap objs (AsyncEndpointCap id badge rights) _ is_orig _ =
"{.type = CDL_AEPCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .rights = " ++ showRights rights ++
", .data = { .tag = seL4_CapData_Badge, .badge = " ++ show badge ++
"}}"
showCap objs (ReplyCap id) _ _ _ =
"{.type = CDL_ReplyCap, .obj_id = " ++ showObjID objs id ++ "}"
-- XXX: Does it even make sense to give out a reply cap? How does init fake this?
showCap objs (MasterReplyCap id) _ _ _ =
"{.type = CDL_MasterReplyCap, .obj_id = " ++ showObjID objs id ++ "}"
-- XXX: As above.
showCap objs (CNodeCap id guard guard_size) _ is_orig _ =
"{.type = CDL_CNodeCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .rights = CDL_RWG, .data = CDL_CapData_MakeGuard(" ++
show guard_size ++ ", " ++ show guard ++ ")}"
showCap objs (TCBCap id) _ is_orig _ =
"{.type = CDL_TCBCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .rights = CDL_RWG}"
showCap _ IRQControlCap _ _ _ = "{.type = CDL_IRQControlCap}"
showCap objs (IRQHandlerCap id) irqNode is_orig _ =
"{.type = CDL_IRQHandlerCap, .obj_id = INVALID_OBJ_ID" ++
", .is_orig = " ++ is_orig ++
", .irq = " ++ show (lookupByValue (\x -> x == id) irqNode) ++ "}"
-- Caps have obj_ids, or IRQs, but not both.
showCap objs (FrameCap id rights _ cached) _ is_orig _ =
"{.type = CDL_FrameCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .rights = " ++ showRights rights ++
", .vm_attribs = " ++ (if cached then "seL4_Default_VMAttributes" else "CDL_VM_CacheDisabled") ++ "}"
-- FIXME: I feel like I should be doing something with the ASID data here...
showCap objs (PTCap id _) _ is_orig _ =
"{.type = CDL_PTCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++ "}"
showCap objs (PDCap id _) _ is_orig _ =
"{.type = CDL_PDCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++ "}"
showCap _ ASIDControlCap _ _ _ =
"{.type = CDL_ASIDControlCap}"
showCap objs (ASIDPoolCap id _) _ is_orig _ =
"{.type = CDL_ASIDPoolCap, .obj_id = " ++ showObjID objs id ++ "}"
showCap objs (IOPortsCap id ports) _ is_orig _ =
"{.type = CDL_IOPortsCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .data = { .tag = CDL_CapData_Raw, .data = " ++ showPorts ports ++ "}}"
showCap objs (IOSpaceCap id) _ is_orig ms =
"{.type = CDL_IOSpaceCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++
", .data = { .tag = CDL_CapData_Raw, .data = " ++ showPCI dom pci ++ "}}"
where pci = pciDevice $ fromJust $ Map.lookup id ms
dom = domainID $ fromJust $ Map.lookup id ms
showCap objs (VCPUCap id) _ _ _ = "{.type = CDL_VCPUCap, .obj_id = " ++ showObjID objs id ++ "}"
showCap _ SchedControlCap _ _ _ =
"{.type = CDL_SchedControlCap}"
showCap objs (SCCap id) _ is_orig _ =
"{.type = CDL_SCCap, .obj_id = " ++ showObjID objs id ++
", .is_orig = " ++ is_orig ++ "}"
showCap _ x _ _ _ = assert False $
"UNSUPPORTED CAP TYPE: " ++ show x
-- These are not supported by the initialiser itself.
showSlots :: Map ObjID Int -> ObjID -> [(Word, Cap)] -> IRQMap -> CDT -> ObjMap Word -> String
showSlots _ _ [] _ _ _ = ""
showSlots objs obj_id (x:xs) irqNode cdt ms =
"{" ++ show index ++ ", " ++ slot ++ "}," +++
showSlots objs obj_id xs irqNode cdt ms
where
index = fst x
slot = showCap objs (snd x) irqNode is_orig ms
is_orig = if (Map.notMember (obj_id, index) cdt) then "true" else "false"
memberSlots :: Map ObjID Int -> ObjID -> CapMap Word -> IRQMap -> CDT -> ObjMap Word -> String
memberSlots objs obj_id slots irqNode cdt ms =
".slots.num = " ++ show slot_count ++ "," +++
".slots.slot = (CDL_CapSlot[]) {" +++
indent (showSlots objs obj_id (Map.toList slots) irqNode cdt ms) +++
"},"
where
slot_count = Map.size slots
printInit :: [Word] -> String
printInit argv =
"{" ++ Data.List.Utils.join ", " (Data.List.map show argv) ++ "}"
showObjectFields :: Map ObjID Int -> ObjID -> KernelObject Word -> IRQMap -> CDT -> ObjMap Word -> String
showObjectFields _ _ Endpoint _ _ _ = ".type = CDL_Endpoint,"
showObjectFields _ _ AsyncEndpoint _ _ _ = ".type = CDL_AsyncEndpoint,"
showObjectFields objs obj_id (TCB slots info domain argv) _ _ _ =
".type = CDL_TCB," +++
".tcb_extra = {" +++
indent
(".ipcbuffer_addr = " ++ show ipcbuffer_addr ++ "," +++
".driverinfo = " ++ show driverinfo ++ "," +++
".priority = " ++ show priority ++ "," +++
".max_priority = " ++ show max_priority ++ "," +++
".criticality = " ++ show criticality ++ "," +++
".max_criticality = " ++ show max_criticality ++ "," +++
".pc = " ++ show pc ++ "," +++
".sp = " ++ show stack ++ "," +++
".elf_name = " ++ show elf_name ++ "," +++
".init = (const seL4_Word[])" ++ printInit argv ++ "," +++
".init_sz = " ++ show (length argv) ++ "," +++
".domain = " ++ show domain ++ ",") +++
"}," +++
memberSlots objs obj_id slots Map.empty Map.empty Map.empty -- IRQ, cdt and obj map not required
where
ipcbuffer_addr = case info of {Just i -> ipcBufferAddr i; _ -> 0}
driverinfo = 0 -- TODO: Not currently in CapDL
priority = case info of {Just i -> case prio i of {Just p -> p; _ -> 125}; _ -> 125}
max_priority = case info of {Just i -> case max_prio i of {Just p -> p; _ -> 125}; _ -> 125}
criticality = case info of {Just i -> case crit i of {Just p -> p; _ -> 125}; _ -> 125}
max_criticality = case info of {Just i -> case max_crit i of {Just p -> p; _ -> 125}; _ -> 125}
pc = case info of {Just i -> case ip i of {Just v -> v; _ -> 0}; _ -> 0}
stack = case info of {Just i -> case sp i of {Just v -> v; _ -> 0}; _ -> 0}
elf_name = case info of {Just i -> case elf i of {Just e -> e; _ -> ""}; _ -> ""}
showObjectFields objs obj_id (CNode slots sizeBits) irqNode cdt ms =
".type = " ++ t ++ "," +++
".size_bits = " ++ show sizeBits ++ "," +++
memberSlots objs obj_id slots irqNode cdt ms
where
-- IRQs are represented in CapDL as 0-sized CNodes. This is fine for
-- the model, but the initialiser needs to know what objects represent
-- interrupts to avoid trying to create them at runtime. It's a bit of
-- a hack to assume that any 0-sized CNode is an interrupt, but this is
-- an illegal size for a valid CNode so everything should work out.
t = if sizeBits == 0 then "CDL_Interrupt" else "CDL_CNode"
showObjectFields _ _ (Untyped size_bits) _ _ _ =
".type = CDL_Untyped," +++
".size_bits = " ++ show sizeBits ++ ","
where
sizeBits = case size_bits of {Just s -> s; _ -> -1}
showObjectFields objs obj_id (PT slots) _ _ _ =
".type = CDL_PT," +++
memberSlots objs obj_id slots Map.empty Map.empty Map.empty -- IRQ, cdt and obj map not required
showObjectFields objs obj_id (PD slots) _ _ _ =
".type = CDL_PD," +++
memberSlots objs obj_id slots Map.empty Map.empty Map.empty -- IRQ, cdt and obj map not required
showObjectFields _ _ (Frame size paddr) _ _ _ =
".type = CDL_Frame," +++
".size_bits = " ++ show (logBase 2 $ fromIntegral size) ++ "," +++
".paddr = (void*)" ++ hex (fromMaybe 0 paddr) ++ ","
showObjectFields _ _ (IOPorts size) _ _ _ =
".type = CDL_IOPorts," +++
".size_bits = " ++ show size ++ "," -- FIXME: This doesn't seem right.
showObjectFields objs obj_id (ASIDPool slots) _ _ _ =
".type = CDL_ASIDPool," +++
memberSlots objs obj_id slots Map.empty Map.empty Map.empty -- IRQ, cdt and obj map not required
showObjectFields objs _ (IODevice slots domainID (bus, dev, fun)) _ _ _ =
".type = CDL_IODevice,"
showObjectFields _ _ VCPU _ _ _ = ".type = CDL_VCPU,"
showObjectFields objs obj_id (SC info) _ _ _ =
".type = CDL_SchedContext," +++
".sc_extra = {" +++
indent
(".period = " ++ show period_ ++ "," +++
".deadline = " ++ show deadline_ ++ "," +++
".exec_req = " ++ show exec_req_ ++ "," +++
".flags = {{" ++ show flags_ ++ "}},") +++
"},"
where
period_ = case info of {Just i -> case period i of {Just p -> p; _ -> 0}; _ -> 0}
deadline_ = case info of {Just i -> case deadline i of {Just p -> p; _ -> 0}; _ -> 0}
exec_req_ = case info of {Just i -> case exec_req i of {Just p -> p; _ -> 0}; _ -> 0}
flags_ = case info of {Just i -> case flags i of {Just p -> p; _ -> 0}; _ -> 0}
showObjectFields _ _ x _ _ _ = assert False $
"UNSUPPORTED OBJECT TYPE: " ++ show x
showObject :: Map ObjID Int -> (ObjID, KernelObject Word) -> IRQMap -> CDT -> ObjMap Word -> String
showObject objs obj irqNode cdt ms =
"{" +++
indent
(".name = \"" ++ name ++ "\"," +++
showObjectFields objs id (snd obj) irqNode cdt ms) +++
"}"
where
id = fst obj
name = fst id ++ (case snd id of
Just index -> "[" ++ show index ++ "]"
_ -> "")
showObjects :: Map ObjID Int -> Int -> [(ObjID, KernelObject Word)] -> IRQMap -> CDT -> ObjMap Word -> String
showObjects _ _ [] _ _ _ = ""
showObjects objs counter (x:xs) irqNode cdt ms =
"[" ++ show counter ++ "] = " ++ showObject objs x irqNode cdt ms ++ "," +++
showObjects objs (counter + 1) xs irqNode cdt ms
sizeOf :: Arch -> KernelObject Word -> Word
sizeOf _ (Frame vmSz _) = vmSz
sizeOf _ (Untyped (Just bSz)) = 2 ^ bSz
sizeOf _ (CNode _ bSz) = 16 * 2 ^ bSz
sizeOf _ Endpoint = 16
sizeOf _ AsyncEndpoint = 16
sizeOf _ ASIDPool {} = 4 * 2^10
sizeOf _ IOPT {} = 4 * 2^10
sizeOf _ IODevice {} = 1
sizeOf _ SC {} = 16 -- FIXME size of SC ??
sizeOf IA32 TCB {} = 2^10
sizeOf IA32 PD {} = 4 * 2^10
sizeOf IA32 PT {} = 4 * 2^10
sizeOf ARM11 TCB {} = 512
sizeOf ARM11 PD {} = 16 * 2^10
sizeOf ARM11 PT {} = 2^10
sizeOf _ _ = 0
memberObjects :: Map ObjID Int -> Arch -> [(ObjID, KernelObject Word)] -> IRQMap -> CDT ->
ObjMap Word -> String
memberObjects obj_ids arch objs irqNode cdt objs' =
".objects = (CDL_Object[]) {" +++
(indent $ showObjects obj_ids 0 objs irqNode cdt objs') +++
"},"
-- Emit an array where each entry represents a given interrupt. Each is -1 if
-- that interrupt has no handler or else the object ID of the interrupt
-- (0-sized CNode).
memberIRQs :: Map ObjID Int -> IRQMap -> Arch -> String
memberIRQs objs irqNode arch =
".irqs = {" +++
(indent $ join ", " $ Data.List.map (\k -> show $ case Map.lookup k irqNode of
Just i -> fromJust $ Map.lookup i objs
_ -> -1) [0..(CONFIG_CAPDL_LOADER_MAX_IRQS - 1)]) +++
"},"
printC :: Model Word -> Idents CapName -> CopyMap -> Doc
printC (Model arch objs irqNode cdt untypedCovers) ids copies =
text $
"/* Generated file. Your changes will be overwritten. */" +++
"" +++
"#include <capdl.h>" +++
"" +++
"#ifndef INVALID_SLOT" +++
indent "#define INVALID_SLOT (-1)" +++
"#endif" +++
"" +++
maxObjects objs_sz +++ -- FIXME: I suspect this is not the right list to measure.
"" +++
"CDL_Model capdl_spec = {" +++
indent
(memberArch arch +++
memberNum objs_sz +++
memberIRQs obj_ids irqNode arch +++
memberObjects obj_ids arch objs' irqNode cdt objs) +++
"};"
where
objs_sz = length $ Map.toList objs
objs' = reverse $ sortOn (sizeOf arch . snd) $ Map.toList objs
obj_ids = Map.fromList $ flip zip [0..] $ Prelude.map fst objs'
| smaccm/capDL-tool | CapDL/PrintC.hs | bsd-2-clause | 14,280 | 0 | 45 | 3,567 | 4,635 | 2,357 | 2,278 | 275 | 26 |
{-# LANGUAGE CPP, OverloadedStrings #-}
-- Requires the network-bytestring library.
--
-- Start server and run
-- httperf --server=localhost --port=5002 --uri=/ --num-conns=10000
-- or
-- ab -n 10000 -c 100 http://localhost:5002/
import Args (ljust, parseArgs, positive, theLast)
import Control.Concurrent (forkIO, runInUnboundThread)
import Data.ByteString.Char8 ()
import Data.Function (on)
import Data.Monoid (Monoid(..), Last(..))
import Network.Socket hiding (accept, recv)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as C ()
#ifdef USE_GHC_IO_MANAGER
import Network.Socket (accept)
import Network.Socket.ByteString (recv, sendAll)
#else
import EventSocket (accept, recv, sendAll)
import System.Event.Thread (ensureIOManagerIsRunning)
#endif
import System.Console.GetOpt (ArgDescr(ReqArg), OptDescr(..))
import System.Environment (getArgs)
import System.Posix.Resource (ResourceLimit(..), ResourceLimits(..),
Resource(..), setResourceLimit)
main = do
(cfg, _) <- parseArgs defaultConfig defaultOptions =<< getArgs
let listenBacklog = theLast cfgListenBacklog cfg
port = theLast cfgPort cfg
lim = ResourceLimit . fromIntegral . theLast cfgMaxFds $ cfg
myHints = defaultHints { addrFlags = [AI_PASSIVE]
, addrSocketType = Stream }
#ifndef USE_GHC_IO_MANAGER
ensureIOManagerIsRunning
#endif
setResourceLimit ResourceOpenFiles
ResourceLimits { softLimit = lim, hardLimit = lim }
(ai:_) <- getAddrInfo (Just myHints) Nothing (Just port)
sock <- socket (addrFamily ai) (addrSocketType ai) (addrProtocol ai)
setSocketOption sock ReuseAddr 1
bindSocket sock (addrAddress ai)
listen sock listenBacklog
runInUnboundThread $ acceptConnections sock
acceptConnections :: Socket -> IO ()
acceptConnections sock = loop
where
loop = do
(c,_) <- accept sock
forkIO $ client c
loop
client :: Socket -> IO ()
client sock = do
recvRequest ""
sendAll sock msg
sClose sock
where
msg = "HTTP/1.0 200 OK\r\nConnection: Close\r\nContent-Length: 5\r\n\r\nPong!"
recvRequest r = do
s <- recv sock 4096
let t = S.append r s
if S.null s || "\r\n\r\n" `S.isInfixOf` t
then return ()
else recvRequest t
------------------------------------------------------------------------
-- Configuration
data Config = Config {
cfgListenBacklog :: Last Int
, cfgMaxFds :: Last Int
, cfgPort :: Last String
}
defaultConfig :: Config
defaultConfig = Config {
cfgListenBacklog = ljust 1024
, cfgMaxFds = ljust 256
, cfgPort = ljust "5002"
}
instance Monoid Config where
mempty = Config {
cfgListenBacklog = mempty
, cfgMaxFds = mempty
, cfgPort = mempty
}
mappend a b = Config {
cfgListenBacklog = app cfgListenBacklog a b
, cfgMaxFds = app cfgMaxFds a b
, cfgPort = app cfgPort a b
}
where app :: (Monoid b) => (a -> b) -> a -> a -> b
app = on mappend
defaultOptions :: [OptDescr (IO Config)]
defaultOptions = [
Option ['p'] ["port"]
(ReqArg (\s -> return mempty { cfgPort = ljust s }) "N")
"server port"
, Option ['m'] ["max-fds"]
(ReqArg (positive "maximum number of file descriptors" $ \n ->
mempty { cfgMaxFds = n }) "N")
"maximum number of file descriptors"
, Option [] ["listen-backlog"]
(ReqArg (positive "maximum number of pending connections" $ \n ->
mempty { cfgListenBacklog = n }) "N")
"maximum number of pending connections"
]
| tibbe/event | benchmarks/PongServer.hs | bsd-2-clause | 3,719 | 0 | 14 | 941 | 980 | 538 | 442 | 82 | 2 |
module Application.Scaffold.Generate.Darcs where
import System.Process
import System.Exit
darcsInit :: IO ExitCode
darcsInit = system "darcs init"
darcsFile :: FilePath -> IO ExitCode
darcsFile fp = do
putStrLn $ "add " ++ fp
system ("darcs add " ++ fp)
darcsRecord :: String -> IO ExitCode
darcsRecord patchname =
system ("darcs record --all -m \"" ++ patchname ++ "\"")
| wavewave/scaffold | lib/Application/Scaffold/Generate/Darcs.hs | bsd-2-clause | 387 | 0 | 9 | 72 | 112 | 58 | 54 | 12 | 1 |
module Animation where
import Graphics.UI.SDL
import Graphics.UI.SDL.Image
import Control.Monad
type Animation = [Surface]
loadAnimation :: FilePath -> Int -> IO Animation
loadAnimation folder size = do
frames <- forM [0..size-1] $ \i -> do
load $ folder ++ show i ++ ".png"
return $ cycle frames | alexisVallet/hachitai-haskell-shmup | Animation.hs | bsd-2-clause | 307 | 0 | 14 | 56 | 113 | 60 | 53 | 10 | 1 |
{-# LANGUAGE PatternGuards #-}
module Idris.PartialEval(partial_eval, getSpecApps, specType,
mkPE_TyDecl, mkPE_TermDecl, PEArgType(..),
pe_app, pe_def, pe_clauses, pe_simple) where
import Idris.AbsSyntax
import Idris.Delaborate
import Idris.Core.TT
import Idris.Core.Evaluate
import Control.Monad.State
import Control.Applicative
import Data.Maybe
import Debug.Trace
-- | Data type representing binding-time annotations for partial evaluation of arguments
data PEArgType = ImplicitS -- ^ Implicit static argument
| ImplicitD -- ^ Implicit dynamic argument
| ExplicitS -- ^ Explicit static argument
| ExplicitD -- ^ Explicit dynamic argument
| UnifiedD -- ^ Erasable dynamic argument (found under unification)
deriving (Eq, Show)
-- | A partially evaluated function. pe_app captures the lhs of the
-- new definition, pe_def captures the rhs, and pe_clauses is the
-- specialised implementation.
-- pe_simple is set if the result is always reducible, because in such a
-- case we'll also need to reduce the static argument
data PEDecl = PEDecl { pe_app :: PTerm, -- new application
pe_def :: PTerm, -- old application
pe_clauses :: [(PTerm, PTerm)], -- clauses of new application
pe_simple :: Bool -- if just one reducible clause
}
-- | Partially evaluates given terms under the given context.
-- It is an error if partial evaluation fails to make any progress.
-- Making progress is defined as: all of the names given with explicit
-- reduction limits (in practice, the function being specialised)
-- must have reduced at least once.
-- If we don't do this, we might end up making an infinite function after
-- applying the transformation.
partial_eval :: Context ->
[(Name, Maybe Int)] ->
[Either Term (Term, Term)] ->
Maybe [Either Term (Term, Term)]
partial_eval ctxt ns_in tms = mapM peClause tms where
ns = squash ns_in
squash ((n, Just x) : ns)
| Just (Just y) <- lookup n ns
= squash ((n, Just (x + y)) : drop n ns)
| otherwise = (n, Just x) : squash ns
squash (n : ns) = n : squash ns
squash [] = []
drop n ((m, _) : ns) | n == m = ns
drop n (x : ns) = x : drop n ns
drop n [] = []
-- If the term is not a clause, it is simply kept as is
peClause (Left t) = Just $ Left t
-- If the term is a clause, specialise the right hand side
peClause (Right (lhs, rhs))
= let (rhs', reductions) = specialise ctxt [] (map toLimit ns) rhs in
do when (length tms == 1) $ checkProgress ns reductions
return (Right (lhs, rhs'))
-- TMP HACK until I do PE by WHNF rather than using main evaluator
toLimit (n, Nothing) | isTCDict n ctxt = (n, 2)
toLimit (n, Nothing) = (n, 65536) -- somewhat arbitrary reduction limit
toLimit (n, Just l) = (n, l)
checkProgress ns [] = return ()
checkProgress ns ((n, r) : rs)
| Just (Just start) <- lookup n ns
= if start <= 1 || r < start then checkProgress ns rs else Nothing
| otherwise = checkProgress ns rs
-- | Specialises the type of a partially evaluated TT function returning
-- a pair of the specialised type and the types of expected arguments.
specType :: [(PEArgType, Term)] -> Type -> (Type, [(PEArgType, Term)])
specType args ty = let (t, args') = runState (unifyEq args ty) [] in
(st (map fst args') t, map fst args')
where
-- Specialise static argument in type by let-binding provided value instead
-- of expecting it as a function argument
st ((ExplicitS, v) : xs) (Bind n (Pi _ t _) sc)
= Bind n (Let t v) (st xs sc)
st ((ImplicitS, v) : xs) (Bind n (Pi _ t _) sc)
= Bind n (Let t v) (st xs sc)
-- Erase argument from function type
st ((UnifiedD, _) : xs) (Bind n (Pi _ t _) sc)
= st xs sc
-- Keep types as is
st (_ : xs) (Bind n (Pi i t k) sc)
= Bind n (Pi i t k) (st xs sc)
st _ t = t
-- Erase implicit dynamic argument if existing argument shares it value,
-- by substituting the value of previous argument
unifyEq (imp@(ImplicitD, v) : xs) (Bind n (Pi i t k) sc)
= do amap <- get
case lookup imp amap of
Just n' ->
do put (amap ++ [((UnifiedD, Erased), n)])
sc' <- unifyEq xs (subst n (P Bound n' Erased) sc)
return (Bind n (Pi i t k) sc') -- erase later
_ -> do put (amap ++ [(imp, n)])
sc' <- unifyEq xs sc
return (Bind n (Pi i t k) sc')
unifyEq (x : xs) (Bind n (Pi i t k) sc)
= do args <- get
put (args ++ [(x, n)])
sc' <- unifyEq xs sc
return (Bind n (Pi i t k) sc')
unifyEq xs t = do args <- get
put (args ++ (zip xs (repeat (sUN "_"))))
return t
-- | Creates an Idris type declaration given current state and a
-- specialised TT function application type.
-- Can be used in combination with the output of 'specType'.
--
-- This should: specialise any static argument position, then generalise
-- over any function applications in the result.
mkPE_TyDecl :: IState -> [(PEArgType, Term)] -> Type -> PTerm
mkPE_TyDecl ist args ty = mkty args ty
where
mkty ((ExplicitD, v) : xs) (Bind n (Pi _ t k) sc)
= PPi expl n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
mkty ((ImplicitD, v) : xs) (Bind n (Pi _ t k) sc)
| concreteClass ist t = mkty xs sc
| classConstraint ist t
= PPi constraint n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
| otherwise = PPi impl n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
mkty (_ : xs) t
= mkty xs t
mkty [] t = delab ist t
generaliseIn tm = evalState (gen tm) 0
gen tm | (P _ fn _, args) <- unApply tm,
isFnName fn (tt_ctxt ist)
= do nm <- get
put (nm + 1)
return (P Bound (sMN nm "spec") Erased)
gen (App s f a) = App s <$> gen f <*> gen a
gen tm = return tm
-- | Checks if a given argument is a type class constraint argument
classConstraint :: Idris.AbsSyntax.IState -> TT Name -> Bool
classConstraint ist v
| (P _ c _, args) <- unApply v = case lookupCtxt c (idris_classes ist) of
[_] -> True
_ -> False
| otherwise = False
-- | Checks if the given arguments of a type class constraint are all either constants
-- or references (i.e. that it doesn't contain any complex terms).
concreteClass :: IState -> TT Name -> Bool
concreteClass ist v
| not (classConstraint ist v) = False
| (P _ c _, args) <- unApply v = all concrete args
| otherwise = False
where concrete (Constant _) = True
concrete tm | (P _ n _, args) <- unApply tm
= case lookupTy n (tt_ctxt ist) of
[_] -> all concrete args
_ -> False
| otherwise = False
mkNewPats :: IState ->
[(Term, Term)] -> -- definition to specialise
[(PEArgType, Term)] -> -- arguments to specialise with
Name -> -- New name
Name -> -- Specialised function name
PTerm -> -- Default lhs
PTerm -> -- Default rhs
PEDecl
-- If all of the dynamic positions on the lhs are variables (rather than
-- patterns or constants) then we can just make a simple definition
-- directly applying the specialised function, since we know the
-- definition isn't going to block on any of the dynamic arguments
-- in this case
mkNewPats ist d ns newname sname lhs rhs | all dynVar (map fst d)
= PEDecl lhs rhs [(lhs, rhs)] True
where dynVar ap = case unApply ap of
(_, args) -> dynArgs ns args
dynArgs _ [] = True -- can definitely reduce from here
-- if Static, doesn't matter what the argument is
dynArgs ((ImplicitS, _) : ns) (a : as) = dynArgs ns as
dynArgs ((ExplicitS, _) : ns) (a : as) = dynArgs ns as
-- if Dynamic, it had better be a variable or we'll need to
-- do some more work
dynArgs (_ : ns) (V _ : as) = dynArgs ns as
dynArgs (_ : ns) (P _ _ _ : as) = dynArgs ns as
dynArgs _ _ = False -- and now we'll get stuck
mkNewPats ist d ns newname sname lhs rhs =
PEDecl lhs rhs (map mkClause d) False
where
mkClause :: (Term, Term) -> (PTerm, PTerm)
mkClause (oldlhs, oldrhs)
= let (_, as) = unApply oldlhs
lhsargs = mkLHSargs [] ns as
lhs = PApp emptyFC (PRef emptyFC newname) lhsargs
rhs = PApp emptyFC (PRef emptyFC sname)
(mkRHSargs ns lhsargs) in
(lhs, rhs)
mkLHSargs _ [] _ = []
-- dynamics don't appear if they're implicit
mkLHSargs sub ((ExplicitD, t) : ns) (a : as)
= pexp (delab ist (substNames sub a)) : mkLHSargs sub ns as
mkLHSargs sub ((ImplicitD, _) : ns) (a : as)
= mkLHSargs sub ns as
mkLHSargs sub ((UnifiedD, _) : ns) (a : as)
= mkLHSargs sub ns as
-- statics get dropped in any case
mkLHSargs sub ((ImplicitS, t) : ns) (a : as)
= mkLHSargs (extend a t sub) ns as
mkLHSargs sub ((ExplicitS, t) : ns) (a : as)
= mkLHSargs (extend a t sub) ns as
mkLHSargs sub _ [] = [] -- no more LHS
extend (P _ n _) t sub = (n, t) : sub
extend _ _ sub = sub
mkRHSargs ((ExplicitS, t) : ns) as = pexp (delab ist t) : mkRHSargs ns as
mkRHSargs ((ExplicitD, t) : ns) (a : as) = a : mkRHSargs ns as
mkRHSargs (_ : ns) as = mkRHSargs ns as
mkRHSargs _ _ = []
mkSubst :: (Term, Term) -> Maybe (Name, Term)
mkSubst (P _ n _, t) = Just (n, t)
mkSubst _ = Nothing
-- | Creates a new declaration for a specialised function application.
-- Simple version at the moment: just create a version which is a direct
-- application of the function to be specialised.
-- More complex version to do: specialise the definition clause by clause
mkPE_TermDecl :: IState -> Name -> Name ->
[(PEArgType, Term)] -> PEDecl
mkPE_TermDecl ist newname sname ns
= let lhs = PApp emptyFC (PRef emptyFC newname) (map pexp (mkp ns))
rhs = eraseImps $ delab ist (mkApp (P Ref sname Erased) (map snd ns))
patdef = lookupCtxtExact sname (idris_patdefs ist)
newpats = case patdef of
Nothing -> PEDecl lhs rhs [(lhs, rhs)] True
Just d -> mkNewPats ist (getPats d) ns
newname sname lhs rhs in
newpats where
getPats (ps, _) = map (\(_, lhs, rhs) -> (lhs, rhs)) ps
mkp [] = []
mkp ((ExplicitD, tm) : tms) = delab ist tm : mkp tms
mkp (_ : tms) = mkp tms
eraseImps tm = mapPT deImp tm
deImp (PApp fc t as) = PApp fc t (map deImpArg as)
deImp t = t
deImpArg a@(PImp _ _ _ _ _) = a { getTm = Placeholder }
deImpArg a = a
-- | Get specialised applications for a given function
getSpecApps :: IState -> [Name] -> Term ->
[(Name, [(PEArgType, Term)])]
getSpecApps ist env tm = ga env (explicitNames tm) where
-- staticArg env True _ tm@(P _ n _) _ | n `elem` env = Just (True, tm)
-- staticArg env True _ tm@(App f a) _ | (P _ n _, args) <- unApply tm,
-- n `elem` env = Just (True, tm)
staticArg env x imp tm n
| x && imparg imp = (ImplicitS, tm)
| x = (ExplicitS, tm)
| imparg imp = (ImplicitD, tm)
| otherwise = (ExplicitD, (P Ref (sUN (show n ++ "arg")) Erased))
imparg (PExp _ _ _ _) = False
imparg _ = True
buildApp env [] [] _ _ = []
buildApp env (s:ss) (i:is) (a:as) (n:ns)
= let s' = staticArg env s i a n
ss' = buildApp env ss is as ns in
(s' : ss')
-- if we have a *defined* function that has static arguments,
-- it will become a specialised application
ga env tm@(App _ f a) | (P _ n _, args) <- unApply tm,
n `notElem` map fst (idris_metavars ist) =
ga env f ++ ga env a ++
case (lookupCtxtExact n (idris_statics ist),
lookupCtxtExact n (idris_implicits ist)) of
(Just statics, Just imps) ->
if (length statics == length args && or statics) then
case buildApp env statics imps args [0..] of
args -> [(n, args)]
-- _ -> []
else []
_ -> []
ga env (Bind n (Let t v) sc) = ga env v ++ ga (n : env) sc
ga env (Bind n t sc) = ga (n : env) sc
ga env t = []
| BartAdv/Idris-dev | src/Idris/PartialEval.hs | bsd-3-clause | 13,072 | 1 | 20 | 4,382 | 4,286 | 2,230 | 2,056 | 215 | 17 |
import Control.Concurrent
import Control.Monad
foreign import ccall input :: Int
foreign import ccall output :: Int -> IO ()
main :: IO ()
main = do
m <- newEmptyMVar
forkIO $ putMVar m input
r <- takeMVar m
output r
| bosu/josh | t/progs/MVar.hs | bsd-3-clause | 235 | 1 | 8 | 58 | 93 | 44 | 49 | 10 | 1 |
{-# LANGUAGE CPP, GADTs #-}
-----------------------------------------------------------------------------
-- |
-- Module : Math.Combinatorics.Species.AST.Instances
-- Copyright : (c) Brent Yorgey 2010
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Type class instances for 'TSpeciesAST', 'ESpeciesAST', and
-- 'SpeciesAST', in a separate module to avoid a dependency cycle
-- between "Math.Combinatorics.Species.AST" and
-- "Math.Combinatorics.Species.Class".
--
-- This module also contains functions for reifying species
-- expressions to ASTs and reflecting ASTs back into other species
-- instances, which are in this module since they depend on the AST
-- type class instances.
--
-----------------------------------------------------------------------------
module Math.Combinatorics.Species.AST.Instances
( reify, reifyE, reflect, reflectT, reflectE )
where
#if MIN_VERSION_numeric_prelude(0,2,0)
import NumericPrelude hiding (cycle)
#else
import NumericPrelude
import PreludeBase hiding (cycle)
#endif
import Math.Combinatorics.Species.Class
import Math.Combinatorics.Species.AST
import Math.Combinatorics.Species.Util.Interval hiding (omega)
import qualified Math.Combinatorics.Species.Util.Interval as I
import qualified Algebra.Additive as Additive
import qualified Algebra.Ring as Ring
import qualified Algebra.Differential as Differential
import Data.Typeable
------------------------------------------------------------
-- SpeciesAST instances ----------------------------------
------------------------------------------------------------
-- grr -- can't autoderive this because of Rec constructor! =P
-- | Species expressions can be compared for /structural/ equality.
-- (Note that if @s1@ and @s2@ are /isomorphic/ species we do not
-- necessarily have @s1 == s2@.)
--
-- Note, however, that species containing an 'OfSize' constructor
-- will always compare as @False@ with any other species, since we
-- cannot decide function equality.
instance Eq SpeciesAST where
Zero == Zero = True
One == One = True
(N m) == (N n) = m == n
X == X = True
E == E = True
C == C = True
L == L = True
Subset == Subset = True
(KSubset k) == (KSubset j) = k == j
Elt == Elt = True
(f1 :+ g1) == (f2 :+ g2) = f1 == f2 && g1 == g2
(f1 :* g1) == (f2 :* g2) = f1 == f2 && g1 == g2
(f1 :. g1) == (f2 :. g2) = f1 == f2 && g1 == g2
(f1 :>< g1) == (f2 :>< g2) = f1 == f2 && g1 == g2
(f1 :@ g1) == (f2 :@ g2) = f1 == f2 && g1 == g2
Der f1 == Der f2 = f1 == f2
-- note, OfSize will always compare False since we can't compare the functions for equality
OfSizeExactly f1 k1 == OfSizeExactly f2 k2 = f1 == f2 && k1 == k2
NonEmpty f1 == NonEmpty f2 = f1 == f2
Rec f1 == Rec f2 = typeOf f1 == typeOf f2
Omega == Omega = True
_ == _ = False
-- argh, can't derive this either. ugh.
-- | An (arbitrary) 'Ord' instance, so that we can put species
-- expressions in canonical order when simplifying.
instance Ord SpeciesAST where
compare x y | x == y = EQ
compare Zero _ = LT
compare _ Zero = GT
compare One _ = LT
compare _ One = GT
compare (N m) (N n) = compare m n
compare (N _) _ = LT
compare _ (N _) = GT
compare X _ = LT
compare _ X = GT
compare E _ = LT
compare _ E = GT
compare C _ = LT
compare _ C = GT
compare L _ = LT
compare _ L = GT
compare Subset _ = LT
compare _ Subset = GT
compare (KSubset j) (KSubset k) = compare j k
compare (KSubset _) _ = LT
compare _ (KSubset _) = GT
compare Elt _ = LT
compare _ Elt = GT
compare (f1 :+ g1) (f2 :+ g2) | f1 == f2 = compare g1 g2
| otherwise = compare f1 f2
compare (_ :+ _) _ = LT
compare _ (_ :+ _) = GT
compare (f1 :* g1) (f2 :* g2) | f1 == f2 = compare g1 g2
| otherwise = compare f1 f2
compare (_ :* _) _ = LT
compare _ (_ :* _) = GT
compare (f1 :. g1) (f2 :. g2) | f1 == f2 = compare g1 g2
| otherwise = compare f1 f2
compare (_ :. _) _ = LT
compare _ (_ :. _) = GT
compare (f1 :>< g1) (f2 :>< g2) | f1 == f2 = compare g1 g2
| otherwise = compare f1 f2
compare (_ :>< _) _ = LT
compare _ (_ :>< _) = GT
compare (f1 :@ g1) (f2 :@ g2) | f1 == f2 = compare g1 g2
| otherwise = compare f1 f2
compare (_ :@ _) _ = LT
compare _ (_ :@ _) = GT
compare (Der f1) (Der f2) = compare f1 f2
compare (Der _) _ = LT
compare _ (Der _) = GT
compare (OfSize f1 p1) (OfSize f2 p2)
= compare f1 f2
compare (OfSize _ _) _ = LT
compare _ (OfSize _ _) = GT
compare (OfSizeExactly f1 k1) (OfSizeExactly f2 k2)
| f1 == f2 = compare k1 k2
| otherwise = compare f1 f2
compare (OfSizeExactly _ _) _ = LT
compare _ (OfSizeExactly _ _) = GT
compare (NonEmpty f1) (NonEmpty f2)
= compare f1 f2
compare (NonEmpty _) _ = LT
compare _ (NonEmpty _) = GT
compare (Rec f1) (Rec f2) = compare (show $ typeOf f1) (show $ typeOf f2)
compare Omega _ = LT
compare _ Omega = GT
-- | Display species expressions in a nice human-readable form. Note
-- that we commit the unforgivable sin of omitting a corresponding
-- Read instance. This will hopefully be remedied in a future
-- version.
instance Show SpeciesAST where
showsPrec _ Zero = shows (0 :: Int)
showsPrec _ One = shows (1 :: Int)
showsPrec _ (N n) = shows n
showsPrec _ X = showChar 'X'
showsPrec _ E = showChar 'E'
showsPrec _ C = showChar 'C'
showsPrec _ L = showChar 'L'
showsPrec _ Subset = showChar 'p'
showsPrec _ (KSubset n) = showChar 'p' . shows n
showsPrec _ (Elt) = showChar 'e'
showsPrec p (f :+ g) = showParen (p>6) $ showsPrec 6 f
. showString " + "
. showsPrec 6 g
showsPrec p (f :* g) = showParen (p>=7) $ showsPrec 7 f
. showString " * "
. showsPrec 7 g
showsPrec p (f :. g) = showParen (p>=7) $ showsPrec 7 f
. showString " . "
. showsPrec 7 g
showsPrec p (f :>< g) = showParen (p>=7) $ showsPrec 7 f
. showString " >< "
. showsPrec 7 g
showsPrec p (f :@ g) = showParen (p>=7) $ showsPrec 7 f
. showString " @ "
. showsPrec 7 g
showsPrec p (Der f) = showsPrec 11 f . showChar '\''
showsPrec _ (OfSize f p) = showChar '<' . showsPrec 0 f . showChar '>'
showsPrec _ (OfSizeExactly f n) = showsPrec 11 f . shows n
showsPrec _ (NonEmpty f) = showsPrec 11 f . showChar '+'
showsPrec _ (Rec f) = shows f
-- | Species expressions are additive.
instance Additive.C SpeciesAST where
zero = Zero
(+) = (:+)
negate = error "negation is not implemented yet! wait until virtual species..."
-- | Species expressions form a ring. Well, sort of. Of course the
-- ring laws actually only hold up to isomorphism of species, not up
-- to structural equality.
instance Ring.C SpeciesAST where
(*) = (:*)
one = One
fromInteger 0 = zero
fromInteger 1 = one
fromInteger n = N n
_ ^ 0 = one
w ^ 1 = w
f ^ n = f * (f ^ (n-1))
-- | Species expressions are differentiable.
instance Differential.C SpeciesAST where
differentiate = Der
-- | Species expressions are an instance of the 'Species' class, so we
-- can use the Species class DSL to build species expression ASTs.
instance Species SpeciesAST where
singleton = X
set = E
cycle = C
linOrd = L
subset = Subset
ksubset k = KSubset k
element = Elt
o = (:.)
(><) = (:><)
(@@) = (:@)
ofSize = OfSize
ofSizeExactly = OfSizeExactly
nonEmpty = NonEmpty
rec = Rec
omega = Omega
instance Show (TSpeciesAST s) where
show = show . erase'
instance Show ESpeciesAST where
show = show . erase
instance Additive.C ESpeciesAST where
zero = wrap TZero
Wrap f + Wrap g = wrap $ f :+:: g
negate = error "negation is not implemented yet! wait until virtual species..."
instance Ring.C ESpeciesAST where
Wrap f * Wrap g = wrap $ f :*:: g
one = wrap TOne
fromInteger 0 = zero
fromInteger 1 = one
fromInteger n = wrap $ TN n
_ ^ 0 = one
w@(Wrap{}) ^ 1 = w
(Wrap f) ^ n = case (Wrap f) ^ (n-1) of
(Wrap f') -> wrap $ f :*:: f'
instance Differential.C ESpeciesAST where
differentiate (Wrap f) = wrap (TDer f)
instance Species ESpeciesAST where
singleton = wrap TX
set = wrap TE
cycle = wrap TC
linOrd = wrap TL
subset = wrap TSubset
ksubset k = wrap $ TKSubset k
element = wrap TElt
o (Wrap f) (Wrap g) = wrap $ f :.:: g
Wrap f >< Wrap g = wrap $ f :><:: g
Wrap f @@ Wrap g = wrap $ f :@:: g
ofSize (Wrap f) p = wrap $ TOfSize f p
ofSizeExactly (Wrap f) n = wrap $ TOfSizeExactly f n
nonEmpty (Wrap f) = wrap $ TNonEmpty f
rec f = wrap $ TRec f
omega = wrap TOmega
------------------------------------------------------------
-- Reify/reflect -----------------------------------------
------------------------------------------------------------
-- | Reify a species expression into an AST. (Actually, this is just
-- the identity function with a usefully restricted type.) For
-- example:
--
-- > > reify octopus
-- > C . TL+
-- > > reify (ksubset 3)
-- > E3 * TE
reify :: SpeciesAST -> SpeciesAST
reify = id
-- | The same as reify, but produce a typed, size-annotated AST.
reifyE :: ESpeciesAST -> ESpeciesAST
reifyE = id
-- | Reflect an AST back into any instance of the 'Species' class.
reflect :: Species s => SpeciesAST -> s
reflect Zero = zero
reflect One = one
reflect (N n) = fromInteger n
reflect X = singleton
reflect E = set
reflect C = cycle
reflect L = linOrd
reflect Subset = subset
reflect (KSubset k) = ksubset k
reflect Elt = element
reflect (f :+ g) = reflect f + reflect g
reflect (f :* g) = reflect f * reflect g
reflect (f :. g) = reflect f `o` reflect g
reflect (f :>< g) = reflect f >< reflect g
reflect (f :@ g) = reflect f @@ reflect g
reflect (Der f) = oneHole (reflect f)
reflect (OfSize f p) = ofSize (reflect f) p
reflect (OfSizeExactly f n) = ofSizeExactly (reflect f) n
reflect (NonEmpty f) = nonEmpty (reflect f)
reflect (Rec f) = rec f
reflect Omega = omega
-- | Reflect a typed AST back into any instance of the 'Species'
-- class.
reflectT :: Species s => TSpeciesAST f -> s
reflectT = reflect . erase'
-- | Reflect an existentially wrapped typed AST back into any
-- instance of the 'Species' class.
reflectE :: Species s => ESpeciesAST -> s
reflectE = reflect . erase
| timsears/species | Math/Combinatorics/Species/AST/Instances.hs | bsd-3-clause | 13,095 | 0 | 11 | 5,227 | 3,514 | 1,786 | 1,728 | 226 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_ray_tracing_pipeline - device extension
--
-- == VK_KHR_ray_tracing_pipeline
--
-- [__Name String__]
-- @VK_KHR_ray_tracing_pipeline@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 348
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.1
--
-- - Requires @VK_KHR_spirv_1_4@
--
-- - Requires @VK_KHR_acceleration_structure@
--
-- [__Contact__]
--
-- - Daniel Koch
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_ray_tracing_pipeline] @dgkoch%0A<<Here describe the issue or question you have about the VK_KHR_ray_tracing_pipeline extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2020-11-12
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/KHR/SPV_KHR_ray_tracing.html SPV_KHR_ray_tracing>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/ext/GLSL_EXT_ray_tracing.txt GLSL_EXT_ray_tracing>
--
-- - This extension interacts with
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#versions-1.2 Vulkan 1.2>
-- and @VK_KHR_vulkan_memory_model@, adding the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shader-call-related shader-call-related>
-- relation of invocations,
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shader-call-order shader-call-order>
-- partial order of dynamic instances of instructions, and the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shaders-scope-shadercall ShaderCallKHR>
-- scope.
--
-- - This extension interacts with @VK_KHR_pipeline_library@,
-- enabling pipeline libraries to be used with ray tracing
-- pipelines and enabling usage of
-- 'RayTracingPipelineInterfaceCreateInfoKHR'.
--
-- [__Contributors__]
--
-- - Matthäus Chajdas, AMD
--
-- - Greg Grebe, AMD
--
-- - Nicolai Hähnle, AMD
--
-- - Tobias Hector, AMD
--
-- - Dave Oldcorn, AMD
--
-- - Skyler Saleh, AMD
--
-- - Mathieu Robart, Arm
--
-- - Marius Bjorge, Arm
--
-- - Tom Olson, Arm
--
-- - Sebastian Tafuri, EA
--
-- - Henrik Rydgard, Embark
--
-- - Juan Cañada, Epic Games
--
-- - Patrick Kelly, Epic Games
--
-- - Yuriy O’Donnell, Epic Games
--
-- - Michael Doggett, Facebook\/Oculus
--
-- - Andrew Garrard, Imagination
--
-- - Don Scorgie, Imagination
--
-- - Dae Kim, Imagination
--
-- - Joshua Barczak, Intel
--
-- - Slawek Grajewski, Intel
--
-- - Jeff Bolz, NVIDIA
--
-- - Pascal Gautron, NVIDIA
--
-- - Daniel Koch, NVIDIA
--
-- - Christoph Kubisch, NVIDIA
--
-- - Ashwin Lele, NVIDIA
--
-- - Robert Stepinski, NVIDIA
--
-- - Martin Stich, NVIDIA
--
-- - Nuno Subtil, NVIDIA
--
-- - Eric Werness, NVIDIA
--
-- - Jon Leech, Khronos
--
-- - Jeroen van Schijndel, OTOY
--
-- - Juul Joosten, OTOY
--
-- - Alex Bourd, Qualcomm
--
-- - Roman Larionov, Qualcomm
--
-- - David McAllister, Qualcomm
--
-- - Spencer Fricke, Samsung
--
-- - Lewis Gordon, Samsung
--
-- - Ralph Potter, Samsung
--
-- - Jasper Bekkers, Traverse Research
--
-- - Jesse Barker, Unity
--
-- - Baldur Karlsson, Valve
--
-- == Description
--
-- Rasterization has been the dominant method to produce interactive
-- graphics, but increasing performance of graphics hardware has made ray
-- tracing a viable option for interactive rendering. Being able to
-- integrate ray tracing with traditional rasterization makes it easier for
-- applications to incrementally add ray traced effects to existing
-- applications or to do hybrid approaches with rasterization for primary
-- visibility and ray tracing for secondary queries.
--
-- To enable ray tracing, this extension adds a few different categories of
-- new functionality:
--
-- - A new ray tracing pipeline type with new shader domains: ray
-- generation, intersection, any-hit, closest hit, miss, and callable
--
-- - A shader binding indirection table to link shader groups with
-- acceleration structure items
--
-- - Ray tracing commands which initiate the ray pipeline traversal and
-- invocation of the various new shader domains depending on which
-- traversal conditions are met
--
-- This extension adds support for the following SPIR-V extension in
-- Vulkan:
--
-- - @SPV_KHR_ray_tracing@
--
-- == New Commands
--
-- - 'cmdSetRayTracingPipelineStackSizeKHR'
--
-- - 'cmdTraceRaysIndirectKHR'
--
-- - 'cmdTraceRaysKHR'
--
-- - 'createRayTracingPipelinesKHR'
--
-- - 'getRayTracingCaptureReplayShaderGroupHandlesKHR'
--
-- - 'getRayTracingShaderGroupHandlesKHR'
--
-- - 'getRayTracingShaderGroupStackSizeKHR'
--
-- == New Structures
--
-- - 'RayTracingPipelineCreateInfoKHR'
--
-- - 'RayTracingPipelineInterfaceCreateInfoKHR'
--
-- - 'RayTracingShaderGroupCreateInfoKHR'
--
-- - 'StridedDeviceAddressRegionKHR'
--
-- - 'TraceRaysIndirectCommandKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceRayTracingPipelineFeaturesKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceRayTracingPipelinePropertiesKHR'
--
-- == New Enums
--
-- - 'RayTracingShaderGroupTypeKHR'
--
-- - 'ShaderGroupShaderKHR'
--
-- == New Enum Constants
--
-- - 'KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME'
--
-- - 'KHR_RAY_TRACING_PIPELINE_SPEC_VERSION'
--
-- - 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- - Extending
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BufferUsageFlagBits':
--
-- - 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.DynamicState.DynamicState':
--
-- - 'Vulkan.Core10.Enums.DynamicState.DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.PipelineBindPoint.PipelineBindPoint':
--
-- - 'Vulkan.Core10.Enums.PipelineBindPoint.PIPELINE_BIND_POINT_RAY_TRACING_KHR'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineStageFlagBits.PipelineStageFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineStageFlagBits.PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR'
--
-- - Extending
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.ShaderStageFlagBits':
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_ANY_HIT_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_CALLABLE_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_CLOSEST_HIT_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_INTERSECTION_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_MISS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_RAYGEN_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR'
--
-- == New or Modified Built-In Variables
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-launchid LaunchIdKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-launchsize LaunchSizeKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-worldrayorigin WorldRayOriginKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-worldraydirection WorldRayDirectionKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-objectrayorigin ObjectRayOriginKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-objectraydirection ObjectRayDirectionKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-raytmin RayTminKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-raytmax RayTmaxKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-instancecustomindex InstanceCustomIndexKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-instanceid InstanceId>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-objecttoworld ObjectToWorldKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-worldtoobject WorldToObjectKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-hitkind HitKindKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-incomingrayflags IncomingRayFlagsKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-raygeometryindex RayGeometryIndexKHR>
--
-- - (modified)@PrimitiveId@
--
-- == New SPIR-V Capabilities
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-RayTracingKHR RayTracingKHR>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-RayTraversalPrimitiveCullingKHR RayTraversalPrimitiveCullingKHR>
--
-- == Issues
--
-- (1) How does this extension differ from VK_NV_ray_tracing?
--
-- __DISCUSSION__:
--
-- The following is a summary of the main functional differences between
-- VK_KHR_ray_tracing_pipeline and VK_NV_ray_tracing:
--
-- - added support for indirect ray tracing ('cmdTraceRaysIndirectKHR')
--
-- - uses SPV_KHR_ray_tracing instead of SPV_NV_ray_tracing
--
-- - refer to KHR SPIR-V enums instead of NV SPIR-V enums (which are
-- functionally equivalent and aliased to the same values).
--
-- - added @RayGeometryIndexKHR@ built-in
--
-- - removed vkCompileDeferredNV compilation functionality and replaced
-- with
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#deferred-host-operations deferred host operations>
-- interactions for ray tracing
--
-- - added 'PhysicalDeviceRayTracingPipelineFeaturesKHR' structure
--
-- - extended 'PhysicalDeviceRayTracingPipelinePropertiesKHR' structure
--
-- - renamed @maxRecursionDepth@ to @maxRayRecursionDepth@ and it has
-- a minimum of 1 instead of 31
--
-- - require @shaderGroupHandleSize@ to be 32 bytes
--
-- - added @maxRayDispatchInvocationCount@,
-- @shaderGroupHandleAlignment@ and @maxRayHitAttributeSize@
--
-- - reworked geometry structures so they could be better shared between
-- device, host, and indirect builds
--
-- - changed SBT parameters to a structure and added size
-- ('StridedDeviceAddressRegionKHR')
--
-- - add parameter for requesting memory requirements for host and\/or
-- device build
--
-- - added
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipeline-library pipeline library>
-- support for ray tracing
--
-- - added
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-traversal-watertight watertightness guarantees>
--
-- - added no-null-shader pipeline flags
-- (@VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_*_SHADERS_BIT_KHR@)
--
-- - added
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-tracing-shader-call memory model interactions>
-- with ray tracing and define how subgroups work and can be repacked
--
-- (2) Can you give a more detailed comparision of differences and
-- similarities between VK_NV_ray_tracing and VK_KHR_ray_tracing_pipeline?
--
-- __DISCUSSION__:
--
-- The following is a more detailed comparision of which commands,
-- structures, and enums are aliased, changed, or removed.
--
-- - Aliased functionality — enums, structures, and commands that are
-- considered equivalent:
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingShaderGroupTypeNV'
-- ↔ 'RayTracingShaderGroupTypeKHR'
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.getRayTracingShaderGroupHandlesNV'
-- ↔ 'getRayTracingShaderGroupHandlesKHR'
--
-- - Changed enums, structures, and commands:
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingShaderGroupCreateInfoNV'
-- → 'RayTracingShaderGroupCreateInfoKHR' (added
-- @pShaderGroupCaptureReplayHandle@)
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV'
-- → 'RayTracingPipelineCreateInfoKHR' (changed type of @pGroups@,
-- added @libraries@, @pLibraryInterface@, and @pDynamicState@)
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.PhysicalDeviceRayTracingPropertiesNV'
-- → VkPhysicalDeviceRayTracingPropertiesKHR (renamed
-- @maxTriangleCount@ to @maxPrimitiveCount@, added
-- @shaderGroupHandleCaptureReplaySize@)
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.cmdTraceRaysNV' →
-- 'cmdTraceRaysKHR' (params to struct)
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.createRayTracingPipelinesNV'
-- → 'createRayTracingPipelinesKHR' (different struct, changed
-- functionality)
--
-- - Added enums, structures and commands:
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR'
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR',
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR',
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR'
-- to
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits'
--
-- - 'PhysicalDeviceRayTracingPipelineFeaturesKHR' structure
--
-- - 'Vulkan.Extensions.VK_KHR_acceleration_structure.DeviceOrHostAddressKHR'
-- and
-- 'Vulkan.Extensions.VK_KHR_acceleration_structure.DeviceOrHostAddressConstKHR'
-- unions
--
-- - 'Vulkan.Extensions.VK_KHR_pipeline_library.PipelineLibraryCreateInfoKHR'
-- struct
--
-- - 'RayTracingPipelineInterfaceCreateInfoKHR' struct
--
-- - 'StridedDeviceAddressRegionKHR' struct
--
-- - 'cmdTraceRaysIndirectKHR' command and
-- 'TraceRaysIndirectCommandKHR' struct
--
-- - 'getRayTracingCaptureReplayShaderGroupHandlesKHR' (shader group
-- capture\/replay)
--
-- - 'cmdSetRayTracingPipelineStackSizeKHR' and
-- 'getRayTracingShaderGroupStackSizeKHR' commands for stack size
-- control
--
-- - Functionality removed:
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DEFER_COMPILE_BIT_NV'
--
-- - 'Vulkan.Extensions.VK_NV_ray_tracing.compileDeferredNV' command
-- (replaced with @VK_KHR_deferred_host_operations@)
--
-- (3) What are the changes between the public provisional
-- (VK_KHR_ray_tracing v8) release and the internal provisional
-- (VK_KHR_ray_tracing v9) release?
--
-- - Require Vulkan 1.1 and SPIR-V 1.4
--
-- - Added interactions with Vulkan 1.2 and @VK_KHR_vulkan_memory_model@
--
-- - added creation time capture and replay flags
--
-- - added
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR'
-- to
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits'
--
-- - replace @VkStridedBufferRegionKHR@ with
-- 'StridedDeviceAddressRegionKHR' and change 'cmdTraceRaysKHR',
-- 'cmdTraceRaysIndirectKHR', to take these for the shader binding
-- table and use device addresses instead of buffers.
--
-- - require the shader binding table buffers to have the
-- @VK_BUFFER_USAGE_RAY_TRACING_BIT_KHR@ set
--
-- - make @VK_KHR_pipeline_library@ an interaction instead of required
-- extension
--
-- - rename the @libraries@ member of 'RayTracingPipelineCreateInfoKHR'
-- to @pLibraryInfo@ and make it a pointer
--
-- - make @VK_KHR_deferred_host_operations@ an interaction instead of a
-- required extension (later went back on this)
--
-- - added explicit stack size management for ray tracing pipelines
--
-- - removed the @maxCallableSize@ member of
-- 'RayTracingPipelineInterfaceCreateInfoKHR'
--
-- - added the @pDynamicState@ member to
-- 'RayTracingPipelineCreateInfoKHR'
--
-- - added
-- 'Vulkan.Core10.Enums.DynamicState.DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR'
-- dynamic state for ray tracing pipelines
--
-- - added 'getRayTracingShaderGroupStackSizeKHR' and
-- 'cmdSetRayTracingPipelineStackSizeKHR' commands
--
-- - added 'ShaderGroupShaderKHR' enum
--
-- - Added @maxRayDispatchInvocationCount@ limit to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'
--
-- - Added @shaderGroupHandleAlignment@ property to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'
--
-- - Added @maxRayHitAttributeSize@ property to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'
--
-- - Clarify deferred host ops for pipeline creation
--
-- - 'Vulkan.Extensions.Handles.DeferredOperationKHR' is now a
-- top-level parameter for 'createRayTracingPipelinesKHR'
--
-- - removed @VkDeferredOperationInfoKHR@ structure
--
-- - change deferred host creation\/return parameter behavior such
-- that the implementation can modify such parameters until the
-- deferred host operation completes
--
-- - @VK_KHR_deferred_host_operations@ is required again
--
-- (4) What are the changes between the internal provisional
-- (VK_KHR_ray_tracing v9) release and the final
-- (VK_KHR_acceleration_structure v11 \/ VK_KHR_ray_tracing_pipeline v1)
-- release?
--
-- - refactor VK_KHR_ray_tracing into 3 extensions, enabling
-- implementation flexibility and decoupling ray query support from ray
-- pipelines:
--
-- - @VK_KHR_acceleration_structure@ (for acceleration structure
-- operations)
--
-- - @VK_KHR_ray_tracing_pipeline@ (for ray tracing pipeline and
-- shader stages)
--
-- - @VK_KHR_ray_query@ (for ray queries in existing shader stages)
--
-- - Require @Volatile@ for the following builtins in the ray generation,
-- closest hit, miss, intersection, and callable shader stages:
--
-- - @SubgroupSize@, @SubgroupLocalInvocationId@, @SubgroupEqMask@,
-- @SubgroupGeMask@, @SubgroupGtMask@, @SubgroupLeMask@,
-- @SubgroupLtMask@
--
-- - @SMIDNV@, @WarpIDNV@
--
-- - clarify buffer usage flags for ray tracing
--
-- - 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- is added as an alias of
-- 'Vulkan.Extensions.VK_NV_ray_tracing.BUFFER_USAGE_RAY_TRACING_BIT_NV'
-- and is required on shader binding table buffers
--
-- - 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_STORAGE_BUFFER_BIT'
-- is used in @VK_KHR_acceleration_structure@ for @scratchData@
--
-- - rename @maxRecursionDepth@ to @maxRayPipelineRecursionDepth@
-- (pipeline creation) and @maxRayRecursionDepth@ (limit) to reduce
-- confusion
--
-- - Add queryable @maxRayHitAttributeSize@ limit and rename members of
-- 'RayTracingPipelineInterfaceCreateInfoKHR' to
-- @maxPipelineRayPayloadSize@ and @maxPipelineRayHitAttributeSize@ for
-- clarity
--
-- - Update SPIRV capabilities to use @RayTracingKHR@
--
-- - extension is no longer provisional
--
-- - define synchronization requirements for indirect trace rays and
-- indirect buffer
--
-- (5) This extension adds gl_InstanceID for the intersection, any-hit, and
-- closest hit shaders, but in KHR_vulkan_glsl, gl_InstanceID is replaced
-- with gl_InstanceIndex. Which should be used for Vulkan in this
-- extension?
--
-- __RESOLVED__: This extension uses gl_InstanceID and maps it to
-- @InstanceId@ in SPIR-V. It is acknowledged that this is different than
-- other shader stages in Vulkan. There are two main reasons for the
-- difference here:
--
-- - symmetry with gl_PrimitiveID which is also available in these
-- shaders
--
-- - there is no “baseInstance” relevant for these shaders, and so ID
-- makes it more obvious that this is zero-based.
--
-- == Sample Code
--
-- Example ray generation GLSL shader
--
-- > #version 450 core
-- > #extension GL_EXT_ray_tracing : require
-- > layout(set = 0, binding = 0, rgba8) uniform image2D image;
-- > layout(set = 0, binding = 1) uniform accelerationStructureEXT as;
-- > layout(location = 0) rayPayloadEXT float payload;
-- >
-- > void main()
-- > {
-- > vec4 col = vec4(0, 0, 0, 1);
-- >
-- > vec3 origin = vec3(float(gl_LaunchIDEXT.x)/float(gl_LaunchSizeEXT.x), float(gl_LaunchIDEXT.y)/float(gl_LaunchSizeEXT.y), 1.0);
-- > vec3 dir = vec3(0.0, 0.0, -1.0);
-- >
-- > traceRayEXT(as, 0, 0xff, 0, 1, 0, origin, 0.0, dir, 1000.0, 0);
-- >
-- > col.y = payload;
-- >
-- > imageStore(image, ivec2(gl_LaunchIDEXT.xy), col);
-- > }
--
-- == Version History
--
-- - Revision 1, 2020-11-12 (Mathieu Robart, Daniel Koch, Eric Werness,
-- Tobias Hector)
--
-- - Decomposition of the specification, from VK_KHR_ray_tracing to
-- VK_KHR_ray_tracing_pipeline (#1918,!3912)
--
-- - require certain subgroup and sm_shader_builtin shader builtins
-- to be decorated as volatile in the ray generation, closest hit,
-- miss, intersection, and callable stages (#1924,!3903,!3954)
--
-- - clarify buffer usage flags for ray tracing (#2181,!3939)
--
-- - rename maxRecursionDepth to maxRayPipelineRecursionDepth and
-- maxRayRecursionDepth (#2203,!3937)
--
-- - add queriable maxRayHitAttributeSize and rename members of
-- VkRayTracingPipelineInterfaceCreateInfoKHR (#2102,!3966)
--
-- - update to use @RayTracingKHR@ SPIR-V capability
--
-- - add VUs for matching hit group type against geometry type
-- (#2245,!3994)
--
-- - require @RayTMaxKHR@ be volatile in intersection shaders
-- (#2268,!4030)
--
-- - add numerical limits for ray parameters (#2235,!3960)
--
-- - fix SBT indexing rules for device addresses (#2308,!4079)
--
-- - relax formula for ray intersection candidate determination
-- (#2322,!4080)
--
-- - add more details on @ShaderRecordBufferKHR@ variables
-- (#2230,!4083)
--
-- - clarify valid bits for @InstanceCustomIndexKHR@
-- (GLSL\/GLSL#19,!4128)
--
-- - allow at most one @IncomingRayPayloadKHR@,
-- @IncomingCallableDataKHR@, and @HitAttributeKHR@ (!4129)
--
-- - add minimum for maxShaderGroupStride (#2353,!4131)
--
-- - require VK_KHR_pipeline_library extension to be supported
-- (#2348,!4135)
--
-- - clarify meaning of \'geometry index\' (#2272,!4137)
--
-- - restrict traces to TLAS (#2239,!4141)
--
-- - add note about maxPipelineRayPayloadSize (#2383,!4172)
--
-- - do not require raygen shader in pipeline libraries (!4185)
--
-- - define sync for indirect trace rays and indirect buffer
-- (#2407,!4208)
--
-- == See Also
--
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR',
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR',
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR',
-- 'RayTracingPipelineCreateInfoKHR',
-- 'RayTracingPipelineInterfaceCreateInfoKHR',
-- 'RayTracingShaderGroupCreateInfoKHR', 'RayTracingShaderGroupTypeKHR',
-- 'ShaderGroupShaderKHR', 'StridedDeviceAddressRegionKHR',
-- 'TraceRaysIndirectCommandKHR', 'cmdSetRayTracingPipelineStackSizeKHR',
-- 'cmdTraceRaysIndirectKHR', 'cmdTraceRaysKHR',
-- 'createRayTracingPipelinesKHR',
-- 'getRayTracingCaptureReplayShaderGroupHandlesKHR',
-- 'getRayTracingShaderGroupHandlesKHR',
-- 'getRayTracingShaderGroupStackSizeKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_ray_tracing_pipeline ( cmdTraceRaysKHR
, getRayTracingShaderGroupHandlesKHR
, getRayTracingCaptureReplayShaderGroupHandlesKHR
, createRayTracingPipelinesKHR
, withRayTracingPipelinesKHR
, cmdTraceRaysIndirectKHR
, getRayTracingShaderGroupStackSizeKHR
, cmdSetRayTracingPipelineStackSizeKHR
, RayTracingShaderGroupCreateInfoKHR(..)
, RayTracingPipelineCreateInfoKHR(..)
, PhysicalDeviceRayTracingPipelineFeaturesKHR(..)
, PhysicalDeviceRayTracingPipelinePropertiesKHR(..)
, StridedDeviceAddressRegionKHR(..)
, TraceRaysIndirectCommandKHR(..)
, RayTracingPipelineInterfaceCreateInfoKHR(..)
, RayTracingShaderGroupTypeKHR( RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR
, RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR
, RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
, ..
)
, ShaderGroupShaderKHR( SHADER_GROUP_SHADER_GENERAL_KHR
, SHADER_GROUP_SHADER_CLOSEST_HIT_KHR
, SHADER_GROUP_SHADER_ANY_HIT_KHR
, SHADER_GROUP_SHADER_INTERSECTION_KHR
, ..
)
, KHR_RAY_TRACING_PIPELINE_SPEC_VERSION
, pattern KHR_RAY_TRACING_PIPELINE_SPEC_VERSION
, KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME
, pattern KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME
, DeferredOperationKHR(..)
, PipelineLibraryCreateInfoKHR(..)
, SHADER_UNUSED_KHR
, pattern SHADER_UNUSED_KHR
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Data.Foldable (traverse_)
import Data.Typeable (eqT)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import Foreign.Marshal.Utils (maybePeek)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showsPrec)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Foreign.C.Types (CSize(..))
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Type.Equality ((:~:)(Refl))
import Data.Typeable (Typeable)
import Foreign.C.Types (CSize)
import Foreign.C.Types (CSize(CSize))
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Word (Word64)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.Pipeline (destroyPipeline)
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.CStruct.Extends (peekSomeCStruct)
import Vulkan.CStruct.Extends (pokeSomeCStruct)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.AllocationCallbacks (AllocationCallbacks)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.CStruct.Extends (Chain)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Extensions.Handles (DeferredOperationKHR)
import Vulkan.Extensions.Handles (DeferredOperationKHR(..))
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Core10.FundamentalTypes (DeviceAddress)
import Vulkan.Dynamic (DeviceCmds(pVkCmdSetRayTracingPipelineStackSizeKHR))
import Vulkan.Dynamic (DeviceCmds(pVkCmdTraceRaysIndirectKHR))
import Vulkan.Dynamic (DeviceCmds(pVkCmdTraceRaysKHR))
import Vulkan.Dynamic (DeviceCmds(pVkCreateRayTracingPipelinesKHR))
import Vulkan.Dynamic (DeviceCmds(pVkGetRayTracingCaptureReplayShaderGroupHandlesKHR))
import Vulkan.Dynamic (DeviceCmds(pVkGetRayTracingShaderGroupHandlesKHR))
import Vulkan.Dynamic (DeviceCmds(pVkGetRayTracingShaderGroupStackSizeKHR))
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.Handles (Device_T)
import Vulkan.CStruct.Extends (Extends)
import Vulkan.CStruct.Extends (Extendss)
import Vulkan.CStruct.Extends (Extensible(..))
import Vulkan.CStruct.Extends (PeekChain)
import Vulkan.CStruct.Extends (PeekChain(..))
import Vulkan.Core10.Handles (Pipeline)
import Vulkan.Core10.Handles (Pipeline(..))
import Vulkan.Core10.Handles (PipelineCache)
import Vulkan.Core10.Handles (PipelineCache(..))
import Vulkan.Core10.Enums.PipelineCreateFlagBits (PipelineCreateFlags)
import {-# SOURCE #-} Vulkan.Core13.Promoted_From_VK_EXT_pipeline_creation_feedback (PipelineCreationFeedbackCreateInfo)
import Vulkan.Core10.Pipeline (PipelineDynamicStateCreateInfo)
import Vulkan.Core10.Handles (PipelineLayout)
import Vulkan.Extensions.VK_KHR_pipeline_library (PipelineLibraryCreateInfoKHR)
import Vulkan.Core10.Pipeline (PipelineShaderStageCreateInfo)
import Vulkan.CStruct.Extends (PokeChain)
import Vulkan.CStruct.Extends (PokeChain(..))
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Extensions.Handles (DeferredOperationKHR(..))
import Vulkan.Extensions.VK_KHR_pipeline_library (PipelineLibraryCreateInfoKHR(..))
import Vulkan.Core10.APIConstants (SHADER_UNUSED_KHR)
import Vulkan.Core10.APIConstants (pattern SHADER_UNUSED_KHR)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdTraceRaysKHR
:: FunPtr (Ptr CommandBuffer_T -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Word32 -> Word32 -> Word32 -> IO ()) -> Ptr CommandBuffer_T -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Word32 -> Word32 -> Word32 -> IO ()
-- | vkCmdTraceRaysKHR - Initialize a ray tracing dispatch
--
-- = Description
--
-- When the command is executed, a ray generation group of @width@ ×
-- @height@ × @depth@ rays is assembled.
--
-- == Valid Usage
--
-- - #VUID-vkCmdTraceRaysKHR-magFilter-04553# If a
-- 'Vulkan.Core10.Handles.Sampler' created with @magFilter@ or
-- @minFilter@ equal to 'Vulkan.Core10.Enums.Filter.FILTER_LINEAR' and
-- @compareEnable@ equal to 'Vulkan.Core10.FundamentalTypes.FALSE' is
-- used to sample a 'Vulkan.Core10.Handles.ImageView' as a result of
-- this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-mipmapMode-04770# If a
-- 'Vulkan.Core10.Handles.Sampler' created with @mipmapMode@ equal to
-- 'Vulkan.Core10.Enums.SamplerMipmapMode.SAMPLER_MIPMAP_MODE_LINEAR'
-- and @compareEnable@ equal to 'Vulkan.Core10.FundamentalTypes.FALSE'
-- is used to sample a 'Vulkan.Core10.Handles.ImageView' as a result of
-- this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-None-06479# If a
-- 'Vulkan.Core10.Handles.ImageView' is sampled with
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#textures-depth-compare-operation depth comparison>,
-- the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-None-02691# If a
-- 'Vulkan.Core10.Handles.ImageView' is accessed using atomic
-- operations as a result of this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-None-02692# If a
-- 'Vulkan.Core10.Handles.ImageView' is sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' as a result
-- of this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT'
--
-- - #VUID-vkCmdTraceRaysKHR-filterCubic-02694# Any
-- 'Vulkan.Core10.Handles.ImageView' being sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' as a result
-- of this command /must/ have a
-- 'Vulkan.Core10.Enums.ImageViewType.ImageViewType' and format that
-- supports cubic filtering, as specified by
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FilterCubicImageViewImageFormatPropertiesEXT'::@filterCubic@
-- returned by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
--
-- - #VUID-vkCmdTraceRaysKHR-filterCubicMinmax-02695# Any
-- 'Vulkan.Core10.Handles.ImageView' being sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' with a
-- reduction mode of either
-- 'Vulkan.Core12.Enums.SamplerReductionMode.SAMPLER_REDUCTION_MODE_MIN'
-- or
-- 'Vulkan.Core12.Enums.SamplerReductionMode.SAMPLER_REDUCTION_MODE_MAX'
-- as a result of this command /must/ have a
-- 'Vulkan.Core10.Enums.ImageViewType.ImageViewType' and format that
-- supports cubic filtering together with minmax filtering, as
-- specified by
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FilterCubicImageViewImageFormatPropertiesEXT'::@filterCubicMinmax@
-- returned by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
--
-- - #VUID-vkCmdTraceRaysKHR-flags-02696# Any
-- 'Vulkan.Core10.Handles.Image' created with a
-- 'Vulkan.Core10.Image.ImageCreateInfo'::@flags@ containing
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_CORNER_SAMPLED_BIT_NV'
-- sampled as a result of this command /must/ only be sampled using a
-- 'Vulkan.Core10.Enums.SamplerAddressMode.SamplerAddressMode' of
-- 'Vulkan.Core10.Enums.SamplerAddressMode.SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE'
--
-- - #VUID-vkCmdTraceRaysKHR-OpTypeImage-06423# Any
-- 'Vulkan.Core10.Handles.ImageView' or
-- 'Vulkan.Core10.Handles.BufferView' being written as a storage image
-- or storage texel buffer where the image format field of the
-- @OpTypeImage@ is @Unknown@ /must/ have image format features that
-- support
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-OpTypeImage-06424# Any
-- 'Vulkan.Core10.Handles.ImageView' or
-- 'Vulkan.Core10.Handles.BufferView' being read as a storage image or
-- storage texel buffer where the image format field of the
-- @OpTypeImage@ is @Unknown@ /must/ have image format features that
-- support
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT'
--
-- - #VUID-vkCmdTraceRaysKHR-None-02697# For each set /n/ that is
-- statically used by the 'Vulkan.Core10.Handles.Pipeline' bound to the
-- pipeline bind point used by this command, a descriptor set /must/
-- have been bound to /n/ at the same pipeline bind point, with a
-- 'Vulkan.Core10.Handles.PipelineLayout' that is compatible for set
-- /n/, with the 'Vulkan.Core10.Handles.PipelineLayout' used to create
-- the current 'Vulkan.Core10.Handles.Pipeline', as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#descriptorsets-compatibility ???>
--
-- - #VUID-vkCmdTraceRaysKHR-maintenance4-06425# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-maintenance4 maintenance4>
-- feature is not enabled, then for each push constant that is
-- statically used by the 'Vulkan.Core10.Handles.Pipeline' bound to the
-- pipeline bind point used by this command, a push constant value
-- /must/ have been set for the same pipeline bind point, with a
-- 'Vulkan.Core10.Handles.PipelineLayout' that is compatible for push
-- constants, with the 'Vulkan.Core10.Handles.PipelineLayout' used to
-- create the current 'Vulkan.Core10.Handles.Pipeline', as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#descriptorsets-compatibility ???>
--
-- - #VUID-vkCmdTraceRaysKHR-None-02699# Descriptors in each bound
-- descriptor set, specified via
-- 'Vulkan.Core10.CommandBufferBuilding.cmdBindDescriptorSets', /must/
-- be valid if they are statically used by the
-- 'Vulkan.Core10.Handles.Pipeline' bound to the pipeline bind point
-- used by this command
--
-- - #VUID-vkCmdTraceRaysKHR-None-02700# A valid pipeline /must/ be bound
-- to the pipeline bind point used by this command
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-02701# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command requires any dynamic state, that state
-- /must/ have been set or inherited (if the
-- @VK_NV_inherited_viewport_scissor@ extension is enabled) for
-- @commandBuffer@, and done so after any previously bound pipeline
-- with the corresponding state not specified as dynamic
--
-- - #VUID-vkCmdTraceRaysKHR-None-02859# There /must/ not have been any
-- calls to dynamic state setting commands for any state not specified
-- as dynamic in the 'Vulkan.Core10.Handles.Pipeline' object bound to
-- the pipeline bind point used by this command, since that pipeline
-- was bound
--
-- - #VUID-vkCmdTraceRaysKHR-None-02702# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used to sample from any
-- 'Vulkan.Core10.Handles.Image' with a
-- 'Vulkan.Core10.Handles.ImageView' of the type
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_3D',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_CUBE',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_1D_ARRAY',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_2D_ARRAY' or
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_CUBE_ARRAY', in
-- any shader stage
--
-- - #VUID-vkCmdTraceRaysKHR-None-02703# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used with any of the SPIR-V
-- @OpImageSample*@ or @OpImageSparseSample*@ instructions with
-- @ImplicitLod@, @Dref@ or @Proj@ in their name, in any shader stage
--
-- - #VUID-vkCmdTraceRaysKHR-None-02704# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used with any of the SPIR-V
-- @OpImageSample*@ or @OpImageSparseSample*@ instructions that
-- includes a LOD bias or any offset values, in any shader stage
--
-- - #VUID-vkCmdTraceRaysKHR-None-02705# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-robustBufferAccess robust buffer access>
-- feature is not enabled, and if the 'Vulkan.Core10.Handles.Pipeline'
-- object bound to the pipeline bind point used by this command
-- accesses a uniform buffer, it /must/ not access values outside of
-- the range of the buffer as specified in the descriptor set bound to
-- the same pipeline bind point
--
-- - #VUID-vkCmdTraceRaysKHR-None-02706# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-robustBufferAccess robust buffer access>
-- feature is not enabled, and if the 'Vulkan.Core10.Handles.Pipeline'
-- object bound to the pipeline bind point used by this command
-- accesses a storage buffer, it /must/ not access values outside of
-- the range of the buffer as specified in the descriptor set bound to
-- the same pipeline bind point
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-02707# If @commandBuffer@ is
-- an unprotected command buffer and
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#limits-protectedNoFault protectedNoFault>
-- is not supported, any resource accessed by the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command /must/ not be a protected resource
--
-- - #VUID-vkCmdTraceRaysKHR-None-04115# If a
-- 'Vulkan.Core10.Handles.ImageView' is accessed using @OpImageWrite@
-- as a result of this command, then the @Type@ of the @Texel@ operand
-- of that instruction /must/ have at least as many components as the
-- image view’s format
--
-- - #VUID-vkCmdTraceRaysKHR-OpImageWrite-04469# If a
-- 'Vulkan.Core10.Handles.BufferView' is accessed using @OpImageWrite@
-- as a result of this command, then the @Type@ of the @Texel@ operand
-- of that instruction /must/ have at least as many components as the
-- buffer view’s format
--
-- - #VUID-vkCmdTraceRaysKHR-SampledType-04470# If a
-- 'Vulkan.Core10.Handles.ImageView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a 64-bit component
-- width is accessed as a result of this command, the @SampledType@ of
-- the @OpTypeImage@ operand of that instruction /must/ have a @Width@
-- of 64
--
-- - #VUID-vkCmdTraceRaysKHR-SampledType-04471# If a
-- 'Vulkan.Core10.Handles.ImageView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a component width less
-- than 64-bit is accessed as a result of this command, the
-- @SampledType@ of the @OpTypeImage@ operand of that instruction
-- /must/ have a @Width@ of 32
--
-- - #VUID-vkCmdTraceRaysKHR-SampledType-04472# If a
-- 'Vulkan.Core10.Handles.BufferView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a 64-bit component
-- width is accessed as a result of this command, the @SampledType@ of
-- the @OpTypeImage@ operand of that instruction /must/ have a @Width@
-- of 64
--
-- - #VUID-vkCmdTraceRaysKHR-SampledType-04473# If a
-- 'Vulkan.Core10.Handles.BufferView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a component width less
-- than 64-bit is accessed as a result of this command, the
-- @SampledType@ of the @OpTypeImage@ operand of that instruction
-- /must/ have a @Width@ of 32
--
-- - #VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04474# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-sparseImageInt64Atomics sparseImageInt64Atomics>
-- feature is not enabled, 'Vulkan.Core10.Handles.Image' objects
-- created with the
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_SPARSE_RESIDENCY_BIT'
-- flag /must/ not be accessed by atomic instructions through an
-- @OpTypeImage@ with a @SampledType@ with a @Width@ of 64 by this
-- command
--
-- - #VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04475# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-sparseImageInt64Atomics sparseImageInt64Atomics>
-- feature is not enabled, 'Vulkan.Core10.Handles.Buffer' objects
-- created with the
-- 'Vulkan.Core10.Enums.BufferCreateFlagBits.BUFFER_CREATE_SPARSE_RESIDENCY_BIT'
-- flag /must/ not be accessed by atomic instructions through an
-- @OpTypeImage@ with a @SampledType@ with a @Width@ of 64 by this
-- command
--
-- - #VUID-vkCmdTraceRaysKHR-None-03429# Any shader group handle
-- referenced by this call /must/ have been queried from the currently
-- bound ray tracing pipeline
--
-- - #VUID-vkCmdTraceRaysKHR-maxPipelineRayRecursionDepth-03679# This
-- command /must/ not cause a shader call instruction to be executed
-- from a shader invocation with a
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#ray-tracing-recursion-depth recursion depth>
-- greater than the value of @maxPipelineRayRecursionDepth@ used to
-- create the bound ray tracing pipeline
--
-- - #VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03680# If the
-- buffer from which @pRayGenShaderBindingTable->deviceAddress@ was
-- queried is non-sparse then it /must/ be bound completely and
-- contiguously to a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03681# The buffer
-- from which the @pRayGenShaderBindingTable->deviceAddress@ is queried
-- /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysKHR-pRayGenShaderBindingTable-03682#
-- @pRayGenShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-size-04023# The @size@ member of
-- @pRayGenShaderBindingTable@ /must/ be equal to its @stride@ member
--
-- - #VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03683# If the buffer
-- from which @pMissShaderBindingTable->deviceAddress@ was queried is
-- non-sparse then it /must/ be bound completely and contiguously to a
-- single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03684# The buffer
-- from which the @pMissShaderBindingTable->deviceAddress@ is queried
-- /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-03685#
-- @pMissShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-03686# The @stride@ member of
-- @pMissShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-04029# The @stride@ member of
-- @pMissShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03687# If the buffer
-- from which @pHitShaderBindingTable->deviceAddress@ was queried is
-- non-sparse then it /must/ be bound completely and contiguously to a
-- single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03688# The buffer
-- from which the @pHitShaderBindingTable->deviceAddress@ is queried
-- /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-03689#
-- @pHitShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-03690# The @stride@ member of
-- @pHitShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-04035# The @stride@ member of
-- @pHitShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03691# If the
-- buffer from which @pCallableShaderBindingTable->deviceAddress@ was
-- queried is non-sparse then it /must/ be bound completely and
-- contiguously to a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03692# The
-- buffer from which the @pCallableShaderBindingTable->deviceAddress@
-- is queried /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-03693#
-- @pCallableShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-03694# The @stride@ member of
-- @pCallableShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysKHR-stride-04041# The @stride@ member of
-- @pCallableShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03696# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- the @deviceAddress@ member of @pHitShaderBindingTable@ /must/ not be
-- zero
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03697# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- the @deviceAddress@ member of @pHitShaderBindingTable@ /must/ not be
-- zero
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03511# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR',
-- the shader group handle identified by @pMissShaderBindingTable@
-- /must/ not be set to zero
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03512# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute an any-hit shader /must/ not be set to
-- zero
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03513# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute a closest hit shader /must/ not be set
-- to zero
--
-- - #VUID-vkCmdTraceRaysKHR-flags-03514# If the currently bound ray
-- tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute an intersection shader /must/ not be set
-- to zero
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04735# Any non-zero
-- hit shader group entries in @pHitShaderBindingTable@ accessed by
-- this call from a geometry with a @geometryType@ of
-- 'Vulkan.Extensions.VK_KHR_acceleration_structure.GEOMETRY_TYPE_TRIANGLES_KHR'
-- /must/ have been created with
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR'
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-04736# Any non-zero
-- hit shader group entries in @pHitShaderBindingTable@ accessed by
-- this call from a geometry with a @geometryType@ of
-- 'Vulkan.Extensions.VK_KHR_acceleration_structure.GEOMETRY_TYPE_AABBS_KHR'
-- /must/ have been created with
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR'
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-04625# @commandBuffer@ /must/
-- not be a protected command buffer
--
-- - #VUID-vkCmdTraceRaysKHR-width-03626# @width@ /must/ be less than or
-- equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[0]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[0]
--
-- - #VUID-vkCmdTraceRaysKHR-height-03627# @height@ /must/ be less than
-- or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[1]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[1]
--
-- - #VUID-vkCmdTraceRaysKHR-depth-03628# @depth@ /must/ be less than or
-- equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[2]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[2]
--
-- - #VUID-vkCmdTraceRaysKHR-width-03629# @width@ × @height@ × @depth@
-- /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxRayDispatchInvocationCount@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-parameter# @commandBuffer@
-- /must/ be a valid 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdTraceRaysKHR-pRaygenShaderBindingTable-parameter#
-- @pRaygenShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysKHR-pMissShaderBindingTable-parameter#
-- @pMissShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysKHR-pHitShaderBindingTable-parameter#
-- @pHitShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysKHR-pCallableShaderBindingTable-parameter#
-- @pCallableShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-recording# @commandBuffer@
-- /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdTraceRaysKHR-commandBuffer-cmdpool# The
-- 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support compute operations
--
-- - #VUID-vkCmdTraceRaysKHR-renderpass# This command /must/ only be
-- called outside of a render pass instance
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Outside | Compute |
-- | Secondary | | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.CommandBuffer', 'StridedDeviceAddressRegionKHR'
cmdTraceRaysKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @pRaygenShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the ray generation shader stage.
("raygenShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pMissShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the miss shader stage.
("missShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pHitShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that holds
-- the shader binding table data for the hit shader stage.
("hitShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pCallableShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the callable shader stage.
("callableShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @width@ is the width of the ray trace query dimensions.
("width" ::: Word32)
-> -- | @height@ is height of the ray trace query dimensions.
("height" ::: Word32)
-> -- | @depth@ is depth of the ray trace query dimensions.
("depth" ::: Word32)
-> io ()
cmdTraceRaysKHR commandBuffer raygenShaderBindingTable missShaderBindingTable hitShaderBindingTable callableShaderBindingTable width height depth = liftIO . evalContT $ do
let vkCmdTraceRaysKHRPtr = pVkCmdTraceRaysKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdTraceRaysKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdTraceRaysKHR is null" Nothing Nothing
let vkCmdTraceRaysKHR' = mkVkCmdTraceRaysKHR vkCmdTraceRaysKHRPtr
pRaygenShaderBindingTable <- ContT $ withCStruct (raygenShaderBindingTable)
pMissShaderBindingTable <- ContT $ withCStruct (missShaderBindingTable)
pHitShaderBindingTable <- ContT $ withCStruct (hitShaderBindingTable)
pCallableShaderBindingTable <- ContT $ withCStruct (callableShaderBindingTable)
lift $ traceAroundEvent "vkCmdTraceRaysKHR" (vkCmdTraceRaysKHR' (commandBufferHandle (commandBuffer)) pRaygenShaderBindingTable pMissShaderBindingTable pHitShaderBindingTable pCallableShaderBindingTable (width) (height) (depth))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetRayTracingShaderGroupHandlesKHR
:: FunPtr (Ptr Device_T -> Pipeline -> Word32 -> Word32 -> CSize -> Ptr () -> IO Result) -> Ptr Device_T -> Pipeline -> Word32 -> Word32 -> CSize -> Ptr () -> IO Result
-- | vkGetRayTracingShaderGroupHandlesKHR - Query ray tracing pipeline shader
-- group handles
--
-- == Valid Usage
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-04619#
-- @pipeline@ /must/ be a ray tracing pipeline
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-04050#
-- @firstGroup@ /must/ be less than the number of shader groups in
-- @pipeline@
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-firstGroup-02419# The sum
-- of @firstGroup@ and @groupCount@ /must/ be less than or equal to the
-- number of shader groups in @pipeline@
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-02420#
-- @dataSize@ /must/ be at least
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleSize@
-- × @groupCount@
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-03482#
-- @pipeline@ /must/ have not been created with
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_LIBRARY_BIT_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parameter#
-- @pipeline@ /must/ be a valid 'Vulkan.Core10.Handles.Pipeline' handle
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-pData-parameter# @pData@
-- /must/ be a valid pointer to an array of @dataSize@ bytes
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-dataSize-arraylength#
-- @dataSize@ /must/ be greater than @0@
--
-- - #VUID-vkGetRayTracingShaderGroupHandlesKHR-pipeline-parent#
-- @pipeline@ /must/ have been created, allocated, or retrieved from
-- @device@
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_ray_tracing VK_NV_ray_tracing>,
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Pipeline'
getRayTracingShaderGroupHandlesKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device containing the ray tracing pipeline.
Device
-> -- | @pipeline@ is the ray tracing pipeline object containing the shaders.
Pipeline
-> -- | @firstGroup@ is the index of the first group to retrieve a handle for
-- from the 'RayTracingPipelineCreateInfoKHR'::@pGroups@ or
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingPipelineCreateInfoNV'::@pGroups@
-- array.
("firstGroup" ::: Word32)
-> -- | @groupCount@ is the number of shader handles to retrieve.
("groupCount" ::: Word32)
-> -- | @dataSize@ is the size in bytes of the buffer pointed to by @pData@.
("dataSize" ::: Word64)
-> -- | @pData@ is a pointer to a user-allocated buffer where the results will
-- be written.
("data" ::: Ptr ())
-> io ()
getRayTracingShaderGroupHandlesKHR device pipeline firstGroup groupCount dataSize data' = liftIO $ do
let vkGetRayTracingShaderGroupHandlesKHRPtr = pVkGetRayTracingShaderGroupHandlesKHR (case device of Device{deviceCmds} -> deviceCmds)
unless (vkGetRayTracingShaderGroupHandlesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetRayTracingShaderGroupHandlesKHR is null" Nothing Nothing
let vkGetRayTracingShaderGroupHandlesKHR' = mkVkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHRPtr
r <- traceAroundEvent "vkGetRayTracingShaderGroupHandlesKHR" (vkGetRayTracingShaderGroupHandlesKHR' (deviceHandle (device)) (pipeline) (firstGroup) (groupCount) (CSize (dataSize)) (data'))
when (r < SUCCESS) (throwIO (VulkanException r))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetRayTracingCaptureReplayShaderGroupHandlesKHR
:: FunPtr (Ptr Device_T -> Pipeline -> Word32 -> Word32 -> CSize -> Ptr () -> IO Result) -> Ptr Device_T -> Pipeline -> Word32 -> Word32 -> CSize -> Ptr () -> IO Result
-- | vkGetRayTracingCaptureReplayShaderGroupHandlesKHR - Query ray tracing
-- capture replay pipeline shader group handles
--
-- == Valid Usage
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-04620#
-- @pipeline@ /must/ be a ray tracing pipeline
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-04051#
-- @firstGroup@ /must/ be less than the number of shader groups in
-- @pipeline@
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-firstGroup-03483#
-- The sum of @firstGroup@ and @groupCount@ /must/ be less than or
-- equal to the number of shader groups in @pipeline@
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-03484#
-- @dataSize@ /must/ be at least
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleCaptureReplaySize@
-- × @groupCount@
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-rayTracingPipelineShaderGroupHandleCaptureReplay-03606#
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplay@
-- /must/ be enabled to call this function
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-03607#
-- @pipeline@ /must/ have been created with a @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parameter#
-- @pipeline@ /must/ be a valid 'Vulkan.Core10.Handles.Pipeline' handle
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pData-parameter#
-- @pData@ /must/ be a valid pointer to an array of @dataSize@ bytes
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-dataSize-arraylength#
-- @dataSize@ /must/ be greater than @0@
--
-- - #VUID-vkGetRayTracingCaptureReplayShaderGroupHandlesKHR-pipeline-parent#
-- @pipeline@ /must/ have been created, allocated, or retrieved from
-- @device@
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Pipeline'
getRayTracingCaptureReplayShaderGroupHandlesKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device containing the ray tracing pipeline.
Device
-> -- | @pipeline@ is the ray tracing pipeline object containing the shaders.
Pipeline
-> -- | @firstGroup@ is the index of the first group to retrieve a handle for
-- from the 'RayTracingPipelineCreateInfoKHR'::@pGroups@ array.
("firstGroup" ::: Word32)
-> -- | @groupCount@ is the number of shader handles to retrieve.
("groupCount" ::: Word32)
-> -- | @dataSize@ is the size in bytes of the buffer pointed to by @pData@.
("dataSize" ::: Word64)
-> -- | @pData@ is a pointer to a user-allocated buffer where the results will
-- be written.
("data" ::: Ptr ())
-> io ()
getRayTracingCaptureReplayShaderGroupHandlesKHR device pipeline firstGroup groupCount dataSize data' = liftIO $ do
let vkGetRayTracingCaptureReplayShaderGroupHandlesKHRPtr = pVkGetRayTracingCaptureReplayShaderGroupHandlesKHR (case device of Device{deviceCmds} -> deviceCmds)
unless (vkGetRayTracingCaptureReplayShaderGroupHandlesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetRayTracingCaptureReplayShaderGroupHandlesKHR is null" Nothing Nothing
let vkGetRayTracingCaptureReplayShaderGroupHandlesKHR' = mkVkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHRPtr
r <- traceAroundEvent "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" (vkGetRayTracingCaptureReplayShaderGroupHandlesKHR' (deviceHandle (device)) (pipeline) (firstGroup) (groupCount) (CSize (dataSize)) (data'))
when (r < SUCCESS) (throwIO (VulkanException r))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCreateRayTracingPipelinesKHR
:: FunPtr (Ptr Device_T -> DeferredOperationKHR -> PipelineCache -> Word32 -> Ptr (SomeStruct RayTracingPipelineCreateInfoKHR) -> Ptr AllocationCallbacks -> Ptr Pipeline -> IO Result) -> Ptr Device_T -> DeferredOperationKHR -> PipelineCache -> Word32 -> Ptr (SomeStruct RayTracingPipelineCreateInfoKHR) -> Ptr AllocationCallbacks -> Ptr Pipeline -> IO Result
-- | vkCreateRayTracingPipelinesKHR - Creates a new ray tracing pipeline
-- object
--
-- = Description
--
-- The 'Vulkan.Core10.Enums.Result.ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS'
-- error is returned if the implementation is unable to re-use the shader
-- group handles provided in
-- 'RayTracingShaderGroupCreateInfoKHR'::@pShaderGroupCaptureReplayHandle@
-- when
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplay@
-- is enabled.
--
-- == Valid Usage
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-flags-03415# If the @flags@
-- member of any element of @pCreateInfos@ contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, and the @basePipelineIndex@ member of that same element is not
-- @-1@, @basePipelineIndex@ /must/ be less than the index into
-- @pCreateInfos@ that corresponds to that element
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-flags-03416# If the @flags@
-- member of any element of @pCreateInfos@ contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, the base pipeline /must/ have been created with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT'
-- flag set
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-flags-03816# @flags@ /must/ not
-- contain the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_device_group.PIPELINE_CREATE_DISPATCH_BASE'
-- flag
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-02903# If
-- @pipelineCache@ was created with
-- 'Vulkan.Core10.Enums.PipelineCacheCreateFlagBits.PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT',
-- host access to @pipelineCache@ /must/ be
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-threadingbehavior externally synchronized>
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03677# If
-- @deferredOperation@ is not 'Vulkan.Core10.APIConstants.NULL_HANDLE',
-- it /must/ be a valid
-- 'Vulkan.Extensions.Handles.DeferredOperationKHR' object
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03678# Any
-- previous deferred operation that was associated with
-- @deferredOperation@ /must/ be complete
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-rayTracingPipeline-03586# The
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-rayTracingPipeline rayTracingPipeline>
-- feature /must/ be enabled
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-03587# If
-- @deferredOperation@ is not 'Vulkan.Core10.APIConstants.NULL_HANDLE',
-- the @flags@ member of elements of @pCreateInfos@ /must/ not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-device-parameter# @device@
-- /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-parameter# If
-- @deferredOperation@ is not 'Vulkan.Core10.APIConstants.NULL_HANDLE',
-- @deferredOperation@ /must/ be a valid
-- 'Vulkan.Extensions.Handles.DeferredOperationKHR' handle
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parameter# If
-- @pipelineCache@ is not 'Vulkan.Core10.APIConstants.NULL_HANDLE',
-- @pipelineCache@ /must/ be a valid
-- 'Vulkan.Core10.Handles.PipelineCache' handle
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pCreateInfos-parameter#
-- @pCreateInfos@ /must/ be a valid pointer to an array of
-- @createInfoCount@ valid 'RayTracingPipelineCreateInfoKHR' structures
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pAllocator-parameter# If
-- @pAllocator@ is not @NULL@, @pAllocator@ /must/ be a valid pointer
-- to a valid 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks'
-- structure
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pPipelines-parameter#
-- @pPipelines@ /must/ be a valid pointer to an array of
-- @createInfoCount@ 'Vulkan.Core10.Handles.Pipeline' handles
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-createInfoCount-arraylength#
-- @createInfoCount@ /must/ be greater than @0@
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-deferredOperation-parent# If
-- @deferredOperation@ is a valid handle, it /must/ have been created,
-- allocated, or retrieved from @device@
--
-- - #VUID-vkCreateRayTracingPipelinesKHR-pipelineCache-parent# If
-- @pipelineCache@ is a valid handle, it /must/ have been created,
-- allocated, or retrieved from @device@
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.OPERATION_DEFERRED_KHR'
--
-- - 'Vulkan.Core10.Enums.Result.OPERATION_NOT_DEFERRED_KHR'
--
-- - 'Vulkan.Extensions.VK_EXT_pipeline_creation_cache_control.PIPELINE_COMPILE_REQUIRED_EXT'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'Vulkan.Extensions.Handles.DeferredOperationKHR',
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Pipeline',
-- 'Vulkan.Core10.Handles.PipelineCache', 'RayTracingPipelineCreateInfoKHR'
createRayTracingPipelinesKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that creates the ray tracing pipelines.
Device
-> -- | @deferredOperation@ is 'Vulkan.Core10.APIConstants.NULL_HANDLE' or the
-- handle of a valid 'Vulkan.Extensions.Handles.DeferredOperationKHR'
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#deferred-host-operations-requesting request deferral>
-- object for this command.
DeferredOperationKHR
-> -- | @pipelineCache@ is either 'Vulkan.Core10.APIConstants.NULL_HANDLE',
-- indicating that pipeline caching is disabled, or the handle of a valid
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-cache pipeline cache>
-- object, in which case use of that cache is enabled for the duration of
-- the command.
PipelineCache
-> -- | @pCreateInfos@ is a pointer to an array of
-- 'RayTracingPipelineCreateInfoKHR' structures.
("createInfos" ::: Vector (SomeStruct RayTracingPipelineCreateInfoKHR))
-> -- | @pAllocator@ controls host memory allocation as described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>
-- chapter.
("allocator" ::: Maybe AllocationCallbacks)
-> io (Result, ("pipelines" ::: Vector Pipeline))
createRayTracingPipelinesKHR device deferredOperation pipelineCache createInfos allocator = liftIO . evalContT $ do
let vkCreateRayTracingPipelinesKHRPtr = pVkCreateRayTracingPipelinesKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkCreateRayTracingPipelinesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCreateRayTracingPipelinesKHR is null" Nothing Nothing
let vkCreateRayTracingPipelinesKHR' = mkVkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHRPtr
pPCreateInfos <- ContT $ allocaBytes @(RayTracingPipelineCreateInfoKHR _) ((Data.Vector.length (createInfos)) * 104)
Data.Vector.imapM_ (\i e -> ContT $ pokeSomeCStruct (forgetExtensions (pPCreateInfos `plusPtr` (104 * (i)) :: Ptr (RayTracingPipelineCreateInfoKHR _))) (e) . ($ ())) (createInfos)
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
pPPipelines <- ContT $ bracket (callocBytes @Pipeline ((fromIntegral ((fromIntegral (Data.Vector.length $ (createInfos)) :: Word32))) * 8)) free
r <- lift $ traceAroundEvent "vkCreateRayTracingPipelinesKHR" (vkCreateRayTracingPipelinesKHR' (deviceHandle (device)) (deferredOperation) (pipelineCache) ((fromIntegral (Data.Vector.length $ (createInfos)) :: Word32)) (forgetExtensions (pPCreateInfos)) pAllocator (pPPipelines))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pPipelines <- lift $ generateM (fromIntegral ((fromIntegral (Data.Vector.length $ (createInfos)) :: Word32))) (\i -> peek @Pipeline ((pPPipelines `advancePtrBytes` (8 * (i)) :: Ptr Pipeline)))
pure $ (r, pPipelines)
-- | A convenience wrapper to make a compatible pair of calls to
-- 'createRayTracingPipelinesKHR' and 'destroyPipeline'
--
-- To ensure that 'destroyPipeline' is always called: pass
-- 'Control.Exception.bracket' (or the allocate function from your
-- favourite resource management library) as the last argument.
-- To just extract the pair pass '(,)' as the last argument.
--
withRayTracingPipelinesKHR :: forall io r . MonadIO io => Device -> DeferredOperationKHR -> PipelineCache -> Vector (SomeStruct RayTracingPipelineCreateInfoKHR) -> Maybe AllocationCallbacks -> (io (Result, Vector Pipeline) -> ((Result, Vector Pipeline) -> io ()) -> r) -> r
withRayTracingPipelinesKHR device deferredOperation pipelineCache pCreateInfos pAllocator b =
b (createRayTracingPipelinesKHR device deferredOperation pipelineCache pCreateInfos pAllocator)
(\(_, o1) -> traverse_ (\o1Elem -> destroyPipeline device o1Elem pAllocator) o1)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdTraceRaysIndirectKHR
:: FunPtr (Ptr CommandBuffer_T -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> DeviceAddress -> IO ()) -> Ptr CommandBuffer_T -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> Ptr StridedDeviceAddressRegionKHR -> DeviceAddress -> IO ()
-- | vkCmdTraceRaysIndirectKHR - Initialize an indirect ray tracing dispatch
--
-- = Description
--
-- 'cmdTraceRaysIndirectKHR' behaves similarly to 'cmdTraceRaysKHR' except
-- that the ray trace query dimensions are read by the device from
-- @indirectDeviceAddress@ during execution.
--
-- == Valid Usage
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-magFilter-04553# If a
-- 'Vulkan.Core10.Handles.Sampler' created with @magFilter@ or
-- @minFilter@ equal to 'Vulkan.Core10.Enums.Filter.FILTER_LINEAR' and
-- @compareEnable@ equal to 'Vulkan.Core10.FundamentalTypes.FALSE' is
-- used to sample a 'Vulkan.Core10.Handles.ImageView' as a result of
-- this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-mipmapMode-04770# If a
-- 'Vulkan.Core10.Handles.Sampler' created with @mipmapMode@ equal to
-- 'Vulkan.Core10.Enums.SamplerMipmapMode.SAMPLER_MIPMAP_MODE_LINEAR'
-- and @compareEnable@ equal to 'Vulkan.Core10.FundamentalTypes.FALSE'
-- is used to sample a 'Vulkan.Core10.Handles.ImageView' as a result of
-- this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-06479# If a
-- 'Vulkan.Core10.Handles.ImageView' is sampled with
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#textures-depth-compare-operation depth comparison>,
-- the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02691# If a
-- 'Vulkan.Core10.Handles.ImageView' is accessed using atomic
-- operations as a result of this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02692# If a
-- 'Vulkan.Core10.Handles.ImageView' is sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' as a result
-- of this command, then the image view’s
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#resources-image-view-format-features format features>
-- /must/ contain
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-filterCubic-02694# Any
-- 'Vulkan.Core10.Handles.ImageView' being sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' as a result
-- of this command /must/ have a
-- 'Vulkan.Core10.Enums.ImageViewType.ImageViewType' and format that
-- supports cubic filtering, as specified by
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FilterCubicImageViewImageFormatPropertiesEXT'::@filterCubic@
-- returned by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-filterCubicMinmax-02695# Any
-- 'Vulkan.Core10.Handles.ImageView' being sampled with
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FILTER_CUBIC_EXT' with a
-- reduction mode of either
-- 'Vulkan.Core12.Enums.SamplerReductionMode.SAMPLER_REDUCTION_MODE_MIN'
-- or
-- 'Vulkan.Core12.Enums.SamplerReductionMode.SAMPLER_REDUCTION_MODE_MAX'
-- as a result of this command /must/ have a
-- 'Vulkan.Core10.Enums.ImageViewType.ImageViewType' and format that
-- supports cubic filtering together with minmax filtering, as
-- specified by
-- 'Vulkan.Extensions.VK_EXT_filter_cubic.FilterCubicImageViewImageFormatPropertiesEXT'::@filterCubicMinmax@
-- returned by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-02696# Any
-- 'Vulkan.Core10.Handles.Image' created with a
-- 'Vulkan.Core10.Image.ImageCreateInfo'::@flags@ containing
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_CORNER_SAMPLED_BIT_NV'
-- sampled as a result of this command /must/ only be sampled using a
-- 'Vulkan.Core10.Enums.SamplerAddressMode.SamplerAddressMode' of
-- 'Vulkan.Core10.Enums.SamplerAddressMode.SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-OpTypeImage-06423# Any
-- 'Vulkan.Core10.Handles.ImageView' or
-- 'Vulkan.Core10.Handles.BufferView' being written as a storage image
-- or storage texel buffer where the image format field of the
-- @OpTypeImage@ is @Unknown@ /must/ have image format features that
-- support
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-OpTypeImage-06424# Any
-- 'Vulkan.Core10.Handles.ImageView' or
-- 'Vulkan.Core10.Handles.BufferView' being read as a storage image or
-- storage texel buffer where the image format field of the
-- @OpTypeImage@ is @Unknown@ /must/ have image format features that
-- support
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02697# For each set /n/ that is
-- statically used by the 'Vulkan.Core10.Handles.Pipeline' bound to the
-- pipeline bind point used by this command, a descriptor set /must/
-- have been bound to /n/ at the same pipeline bind point, with a
-- 'Vulkan.Core10.Handles.PipelineLayout' that is compatible for set
-- /n/, with the 'Vulkan.Core10.Handles.PipelineLayout' used to create
-- the current 'Vulkan.Core10.Handles.Pipeline', as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#descriptorsets-compatibility ???>
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-maintenance4-06425# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-maintenance4 maintenance4>
-- feature is not enabled, then for each push constant that is
-- statically used by the 'Vulkan.Core10.Handles.Pipeline' bound to the
-- pipeline bind point used by this command, a push constant value
-- /must/ have been set for the same pipeline bind point, with a
-- 'Vulkan.Core10.Handles.PipelineLayout' that is compatible for push
-- constants, with the 'Vulkan.Core10.Handles.PipelineLayout' used to
-- create the current 'Vulkan.Core10.Handles.Pipeline', as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#descriptorsets-compatibility ???>
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02699# Descriptors in each
-- bound descriptor set, specified via
-- 'Vulkan.Core10.CommandBufferBuilding.cmdBindDescriptorSets', /must/
-- be valid if they are statically used by the
-- 'Vulkan.Core10.Handles.Pipeline' bound to the pipeline bind point
-- used by this command
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02700# A valid pipeline /must/
-- be bound to the pipeline bind point used by this command
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02701# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command requires any dynamic state, that state
-- /must/ have been set or inherited (if the
-- @VK_NV_inherited_viewport_scissor@ extension is enabled) for
-- @commandBuffer@, and done so after any previously bound pipeline
-- with the corresponding state not specified as dynamic
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02859# There /must/ not have
-- been any calls to dynamic state setting commands for any state not
-- specified as dynamic in the 'Vulkan.Core10.Handles.Pipeline' object
-- bound to the pipeline bind point used by this command, since that
-- pipeline was bound
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02702# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used to sample from any
-- 'Vulkan.Core10.Handles.Image' with a
-- 'Vulkan.Core10.Handles.ImageView' of the type
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_3D',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_CUBE',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_1D_ARRAY',
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_2D_ARRAY' or
-- 'Vulkan.Core10.Enums.ImageViewType.IMAGE_VIEW_TYPE_CUBE_ARRAY', in
-- any shader stage
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02703# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used with any of the SPIR-V
-- @OpImageSample*@ or @OpImageSparseSample*@ instructions with
-- @ImplicitLod@, @Dref@ or @Proj@ in their name, in any shader stage
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02704# If the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command accesses a
-- 'Vulkan.Core10.Handles.Sampler' object that uses unnormalized
-- coordinates, that sampler /must/ not be used with any of the SPIR-V
-- @OpImageSample*@ or @OpImageSparseSample*@ instructions that
-- includes a LOD bias or any offset values, in any shader stage
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02705# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-robustBufferAccess robust buffer access>
-- feature is not enabled, and if the 'Vulkan.Core10.Handles.Pipeline'
-- object bound to the pipeline bind point used by this command
-- accesses a uniform buffer, it /must/ not access values outside of
-- the range of the buffer as specified in the descriptor set bound to
-- the same pipeline bind point
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-02706# If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-robustBufferAccess robust buffer access>
-- feature is not enabled, and if the 'Vulkan.Core10.Handles.Pipeline'
-- object bound to the pipeline bind point used by this command
-- accesses a storage buffer, it /must/ not access values outside of
-- the range of the buffer as specified in the descriptor set bound to
-- the same pipeline bind point
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-02707# If
-- @commandBuffer@ is an unprotected command buffer and
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#limits-protectedNoFault protectedNoFault>
-- is not supported, any resource accessed by the
-- 'Vulkan.Core10.Handles.Pipeline' object bound to the pipeline bind
-- point used by this command /must/ not be a protected resource
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-04115# If a
-- 'Vulkan.Core10.Handles.ImageView' is accessed using @OpImageWrite@
-- as a result of this command, then the @Type@ of the @Texel@ operand
-- of that instruction /must/ have at least as many components as the
-- image view’s format
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-04469# If a
-- 'Vulkan.Core10.Handles.BufferView' is accessed using @OpImageWrite@
-- as a result of this command, then the @Type@ of the @Texel@ operand
-- of that instruction /must/ have at least as many components as the
-- buffer view’s format
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-SampledType-04470# If a
-- 'Vulkan.Core10.Handles.ImageView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a 64-bit component
-- width is accessed as a result of this command, the @SampledType@ of
-- the @OpTypeImage@ operand of that instruction /must/ have a @Width@
-- of 64
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-SampledType-04471# If a
-- 'Vulkan.Core10.Handles.ImageView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a component width less
-- than 64-bit is accessed as a result of this command, the
-- @SampledType@ of the @OpTypeImage@ operand of that instruction
-- /must/ have a @Width@ of 32
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-SampledType-04472# If a
-- 'Vulkan.Core10.Handles.BufferView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a 64-bit component
-- width is accessed as a result of this command, the @SampledType@ of
-- the @OpTypeImage@ operand of that instruction /must/ have a @Width@
-- of 64
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-SampledType-04473# If a
-- 'Vulkan.Core10.Handles.BufferView' with a
-- 'Vulkan.Core10.Enums.Format.Format' that has a component width less
-- than 64-bit is accessed as a result of this command, the
-- @SampledType@ of the @OpTypeImage@ operand of that instruction
-- /must/ have a @Width@ of 32
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04474# If
-- the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-sparseImageInt64Atomics sparseImageInt64Atomics>
-- feature is not enabled, 'Vulkan.Core10.Handles.Image' objects
-- created with the
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.IMAGE_CREATE_SPARSE_RESIDENCY_BIT'
-- flag /must/ not be accessed by atomic instructions through an
-- @OpTypeImage@ with a @SampledType@ with a @Width@ of 64 by this
-- command
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04475# If
-- the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-sparseImageInt64Atomics sparseImageInt64Atomics>
-- feature is not enabled, 'Vulkan.Core10.Handles.Buffer' objects
-- created with the
-- 'Vulkan.Core10.Enums.BufferCreateFlagBits.BUFFER_CREATE_SPARSE_RESIDENCY_BIT'
-- flag /must/ not be accessed by atomic instructions through an
-- @OpTypeImage@ with a @SampledType@ with a @Width@ of 64 by this
-- command
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-None-03429# Any shader group handle
-- referenced by this call /must/ have been queried from the currently
-- bound ray tracing pipeline
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-maxPipelineRayRecursionDepth-03679#
-- This command /must/ not cause a shader call instruction to be
-- executed from a shader invocation with a
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#ray-tracing-recursion-depth recursion depth>
-- greater than the value of @maxPipelineRayRecursionDepth@ used to
-- create the bound ray tracing pipeline
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03680# If
-- the buffer from which @pRayGenShaderBindingTable->deviceAddress@ was
-- queried is non-sparse then it /must/ be bound completely and
-- contiguously to a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03681# The
-- buffer from which the @pRayGenShaderBindingTable->deviceAddress@ is
-- queried /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pRayGenShaderBindingTable-03682#
-- @pRayGenShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-size-04023# The @size@ member of
-- @pRayGenShaderBindingTable@ /must/ be equal to its @stride@ member
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03683# If
-- the buffer from which @pMissShaderBindingTable->deviceAddress@ was
-- queried is non-sparse then it /must/ be bound completely and
-- contiguously to a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03684# The
-- buffer from which the @pMissShaderBindingTable->deviceAddress@ is
-- queried /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-03685#
-- @pMissShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-03686# The @stride@ member of
-- @pMissShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-04029# The @stride@ member of
-- @pMissShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03687# If the
-- buffer from which @pHitShaderBindingTable->deviceAddress@ was
-- queried is non-sparse then it /must/ be bound completely and
-- contiguously to a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03688# The
-- buffer from which the @pHitShaderBindingTable->deviceAddress@ is
-- queried /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-03689#
-- @pHitShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-03690# The @stride@ member of
-- @pHitShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-04035# The @stride@ member of
-- @pHitShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03691#
-- If the buffer from which
-- @pCallableShaderBindingTable->deviceAddress@ was queried is
-- non-sparse then it /must/ be bound completely and contiguously to a
-- single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03692#
-- The buffer from which the
-- @pCallableShaderBindingTable->deviceAddress@ is queried /must/ have
-- been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR'
-- usage flag
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-03693#
-- @pCallableShaderBindingTable->deviceAddress@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupBaseAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-03694# The @stride@ member of
-- @pCallableShaderBindingTable@ /must/ be a multiple of
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@shaderGroupHandleAlignment@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-stride-04041# The @stride@ member of
-- @pCallableShaderBindingTable@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxShaderGroupStride@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03696# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- the @deviceAddress@ member of @pHitShaderBindingTable@ /must/ not be
-- zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03697# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- the @deviceAddress@ member of @pHitShaderBindingTable@ /must/ not be
-- zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03511# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR',
-- the shader group handle identified by @pMissShaderBindingTable@
-- /must/ not be set to zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03512# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute an any-hit shader /must/ not be set to
-- zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03513# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute a closest hit shader /must/ not be set
-- to zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-flags-03514# If the currently bound
-- ray tracing pipeline was created with @flags@ that included
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- entries in @pHitShaderBindingTable@ accessed as a result of this
-- command in order to execute an intersection shader /must/ not be set
-- to zero
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04735# Any
-- non-zero hit shader group entries in @pHitShaderBindingTable@
-- accessed by this call from a geometry with a @geometryType@ of
-- 'Vulkan.Extensions.VK_KHR_acceleration_structure.GEOMETRY_TYPE_TRIANGLES_KHR'
-- /must/ have been created with
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-04736# Any
-- non-zero hit shader group entries in @pHitShaderBindingTable@
-- accessed by this call from a geometry with a @geometryType@ of
-- 'Vulkan.Extensions.VK_KHR_acceleration_structure.GEOMETRY_TYPE_AABBS_KHR'
-- /must/ have been created with
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR'
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03632# If the
-- buffer from which @indirectDeviceAddress@ was queried is non-sparse
-- then it /must/ be bound completely and contiguously to a single
-- 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03633# The
-- buffer from which @indirectDeviceAddress@ was queried /must/ have
-- been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_INDIRECT_BUFFER_BIT'
-- bit set
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03634#
-- @indirectDeviceAddress@ /must/ be a multiple of @4@
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-03635# @commandBuffer@
-- /must/ not be a protected command buffer
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-indirectDeviceAddress-03636# All
-- device addresses between @indirectDeviceAddress@ and
-- @indirectDeviceAddress@ + @sizeof@('TraceRaysIndirectCommandKHR') -
-- 1 /must/ be in the buffer device address range of the same buffer
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-rayTracingPipelineTraceRaysIndirect-03637#
-- The
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-rayTracingPipelineTraceRaysIndirect ::rayTracingPipelineTraceRaysIndirect>
-- feature /must/ be enabled
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-rayTracingMotionBlurPipelineTraceRaysIndirect-04951#
-- If the bound ray tracing pipeline was created with
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV'
-- 'Vulkan.Extensions.VK_NV_ray_tracing_motion_blur.PhysicalDeviceRayTracingMotionBlurFeaturesNV'::@rayTracingMotionBlurPipelineTraceRaysIndirect@
-- feature /must/ be enabled
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pRaygenShaderBindingTable-parameter#
-- @pRaygenShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pMissShaderBindingTable-parameter#
-- @pMissShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pHitShaderBindingTable-parameter#
-- @pHitShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-pCallableShaderBindingTable-parameter#
-- @pCallableShaderBindingTable@ /must/ be a valid pointer to a valid
-- 'StridedDeviceAddressRegionKHR' structure
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-cmdpool# The
-- 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support compute operations
--
-- - #VUID-vkCmdTraceRaysIndirectKHR-renderpass# This command /must/ only
-- be called outside of a render pass instance
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Outside | Compute |
-- | Secondary | | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.CommandBuffer',
-- 'Vulkan.Core10.FundamentalTypes.DeviceAddress',
-- 'StridedDeviceAddressRegionKHR'
cmdTraceRaysIndirectKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @pRaygenShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the ray generation shader stage.
("raygenShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pMissShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the miss shader stage.
("missShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pHitShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that holds
-- the shader binding table data for the hit shader stage.
("hitShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @pCallableShaderBindingTable@ is a 'StridedDeviceAddressRegionKHR' that
-- holds the shader binding table data for the callable shader stage.
("callableShaderBindingTable" ::: StridedDeviceAddressRegionKHR)
-> -- | @indirectDeviceAddress@ is a buffer device address which is a pointer to
-- a 'TraceRaysIndirectCommandKHR' structure containing the trace ray
-- parameters.
("indirectDeviceAddress" ::: DeviceAddress)
-> io ()
cmdTraceRaysIndirectKHR commandBuffer raygenShaderBindingTable missShaderBindingTable hitShaderBindingTable callableShaderBindingTable indirectDeviceAddress = liftIO . evalContT $ do
let vkCmdTraceRaysIndirectKHRPtr = pVkCmdTraceRaysIndirectKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdTraceRaysIndirectKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdTraceRaysIndirectKHR is null" Nothing Nothing
let vkCmdTraceRaysIndirectKHR' = mkVkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHRPtr
pRaygenShaderBindingTable <- ContT $ withCStruct (raygenShaderBindingTable)
pMissShaderBindingTable <- ContT $ withCStruct (missShaderBindingTable)
pHitShaderBindingTable <- ContT $ withCStruct (hitShaderBindingTable)
pCallableShaderBindingTable <- ContT $ withCStruct (callableShaderBindingTable)
lift $ traceAroundEvent "vkCmdTraceRaysIndirectKHR" (vkCmdTraceRaysIndirectKHR' (commandBufferHandle (commandBuffer)) pRaygenShaderBindingTable pMissShaderBindingTable pHitShaderBindingTable pCallableShaderBindingTable (indirectDeviceAddress))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetRayTracingShaderGroupStackSizeKHR
:: FunPtr (Ptr Device_T -> Pipeline -> Word32 -> ShaderGroupShaderKHR -> IO DeviceSize) -> Ptr Device_T -> Pipeline -> Word32 -> ShaderGroupShaderKHR -> IO DeviceSize
-- | vkGetRayTracingShaderGroupStackSizeKHR - Query ray tracing pipeline
-- shader group shader stack size
--
-- = Description
--
-- The return value is the ray tracing pipeline stack size in bytes for the
-- specified shader as called from the specified shader group.
--
-- == Valid Usage
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-04622#
-- @pipeline@ /must/ be a ray tracing pipeline
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-group-03608# The value
-- of @group@ must be less than the number of shader groups in
-- @pipeline@
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-groupShader-03609# The
-- shader identified by @groupShader@ in @group@ /must/ not be
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-parameter#
-- @pipeline@ /must/ be a valid 'Vulkan.Core10.Handles.Pipeline' handle
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-groupShader-parameter#
-- @groupShader@ /must/ be a valid 'ShaderGroupShaderKHR' value
--
-- - #VUID-vkGetRayTracingShaderGroupStackSizeKHR-pipeline-parent#
-- @pipeline@ /must/ have been created, allocated, or retrieved from
-- @device@
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.Device', 'Vulkan.Core10.Handles.Pipeline',
-- 'ShaderGroupShaderKHR'
getRayTracingShaderGroupStackSizeKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device containing the ray tracing pipeline.
Device
-> -- | @pipeline@ is the ray tracing pipeline object containing the shaders
-- groups.
Pipeline
-> -- | @group@ is the index of the shader group to query.
("group" ::: Word32)
-> -- | @groupShader@ is the type of shader from the group to query.
ShaderGroupShaderKHR
-> io (DeviceSize)
getRayTracingShaderGroupStackSizeKHR device pipeline group groupShader = liftIO $ do
let vkGetRayTracingShaderGroupStackSizeKHRPtr = pVkGetRayTracingShaderGroupStackSizeKHR (case device of Device{deviceCmds} -> deviceCmds)
unless (vkGetRayTracingShaderGroupStackSizeKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetRayTracingShaderGroupStackSizeKHR is null" Nothing Nothing
let vkGetRayTracingShaderGroupStackSizeKHR' = mkVkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHRPtr
r <- traceAroundEvent "vkGetRayTracingShaderGroupStackSizeKHR" (vkGetRayTracingShaderGroupStackSizeKHR' (deviceHandle (device)) (pipeline) (group) (groupShader))
pure $ (r)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdSetRayTracingPipelineStackSizeKHR
:: FunPtr (Ptr CommandBuffer_T -> Word32 -> IO ()) -> Ptr CommandBuffer_T -> Word32 -> IO ()
-- | vkCmdSetRayTracingPipelineStackSizeKHR - Set the stack size dynamically
-- for a ray tracing pipeline
--
-- = Description
--
-- This command sets the stack size for subsequent ray tracing commands
-- when the ray tracing pipeline is created with
-- 'Vulkan.Core10.Enums.DynamicState.DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR'
-- set in
-- 'Vulkan.Core10.Pipeline.PipelineDynamicStateCreateInfo'::@pDynamicStates@.
-- Otherwise, the stack size is computed as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-tracing-pipeline-stack Ray Tracing Pipeline Stack>.
--
-- == Valid Usage
--
-- - #VUID-vkCmdSetRayTracingPipelineStackSizeKHR-pipelineStackSize-03610#
-- @pipelineStackSize@ /must/ be large enough for any dynamic execution
-- through the shaders in the ray tracing pipeline used by a subsequent
-- trace call
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdSetRayTracingPipelineStackSizeKHR-commandBuffer-cmdpool#
-- The 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support compute operations
--
-- - #VUID-vkCmdSetRayTracingPipelineStackSizeKHR-renderpass# This
-- command /must/ only be called outside of a render pass instance
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Outside | Compute |
-- | Secondary | | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.CommandBuffer'
cmdSetRayTracingPipelineStackSizeKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @pipelineStackSize@ is the stack size to use for subsequent ray tracing
-- trace commands.
("pipelineStackSize" ::: Word32)
-> io ()
cmdSetRayTracingPipelineStackSizeKHR commandBuffer pipelineStackSize = liftIO $ do
let vkCmdSetRayTracingPipelineStackSizeKHRPtr = pVkCmdSetRayTracingPipelineStackSizeKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
unless (vkCmdSetRayTracingPipelineStackSizeKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdSetRayTracingPipelineStackSizeKHR is null" Nothing Nothing
let vkCmdSetRayTracingPipelineStackSizeKHR' = mkVkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHRPtr
traceAroundEvent "vkCmdSetRayTracingPipelineStackSizeKHR" (vkCmdSetRayTracingPipelineStackSizeKHR' (commandBufferHandle (commandBuffer)) (pipelineStackSize))
pure $ ()
-- | VkRayTracingShaderGroupCreateInfoKHR - Structure specifying shaders in a
-- shader group
--
-- == Valid Usage
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03474# If @type@ is
-- 'RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR' then @generalShader@
-- /must/ be a valid index into
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ referring to a shader
-- of
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_RAYGEN_BIT_KHR',
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_MISS_BIT_KHR',
-- or
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_CALLABLE_BIT_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03475# If @type@ is
-- 'RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR' then @closestHitShader@,
-- @anyHitShader@, and @intersectionShader@ /must/ be
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03476# If @type@ is
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR' then
-- @intersectionShader@ /must/ be a valid index into
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ referring to a shader
-- of
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_INTERSECTION_BIT_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-type-03477# If @type@ is
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' then
-- @intersectionShader@ /must/ be
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-closestHitShader-03478#
-- @closestHitShader@ /must/ be either
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' or a valid index into
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ referring to a shader
-- of
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_CLOSEST_HIT_BIT_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-anyHitShader-03479#
-- @anyHitShader@ /must/ be either
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' or a valid index into
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ referring to a shader
-- of
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_ANY_HIT_BIT_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03603#
-- If
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplayMixed@
-- is 'Vulkan.Core10.FundamentalTypes.FALSE' then
-- @pShaderGroupCaptureReplayHandle@ /must/ not be provided if it has
-- not been provided on a previous call to ray tracing pipeline
-- creation
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03604#
-- If
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplayMixed@
-- is 'Vulkan.Core10.FundamentalTypes.FALSE' then the caller /must/
-- guarantee that no ray tracing pipeline creation commands with
-- @pShaderGroupCaptureReplayHandle@ provided execute simultaneously
-- with ray tracing pipeline creation commands without
-- @pShaderGroupCaptureReplayHandle@ provided
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR'
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-pNext-pNext# @pNext@
-- /must/ be @NULL@
--
-- - #VUID-VkRayTracingShaderGroupCreateInfoKHR-type-parameter# @type@
-- /must/ be a valid 'RayTracingShaderGroupTypeKHR' value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'RayTracingPipelineCreateInfoKHR', 'RayTracingShaderGroupTypeKHR',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data RayTracingShaderGroupCreateInfoKHR = RayTracingShaderGroupCreateInfoKHR
{ -- | @type@ is the type of hit group specified in this structure.
type' :: RayTracingShaderGroupTypeKHR
, -- | @generalShader@ is the index of the ray generation, miss, or callable
-- shader from 'RayTracingPipelineCreateInfoKHR'::@pStages@ in the group if
-- the shader group has @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR', and
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' otherwise.
generalShader :: Word32
, -- | @closestHitShader@ is the optional index of the closest hit shader from
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ in the group if the shader
-- group has @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' or
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR', and
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' otherwise.
closestHitShader :: Word32
, -- | @anyHitShader@ is the optional index of the any-hit shader from
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ in the group if the shader
-- group has @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' or
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR', and
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' otherwise.
anyHitShader :: Word32
, -- | @intersectionShader@ is the index of the intersection shader from
-- 'RayTracingPipelineCreateInfoKHR'::@pStages@ in the group if the shader
-- group has @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR', and
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR' otherwise.
intersectionShader :: Word32
, -- | @pShaderGroupCaptureReplayHandle@ is @NULL@ or a pointer to replay
-- information for this shader group. Ignored if
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplay@
-- is 'Vulkan.Core10.FundamentalTypes.FALSE'.
shaderGroupCaptureReplayHandle :: Ptr ()
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (RayTracingShaderGroupCreateInfoKHR)
#endif
deriving instance Show RayTracingShaderGroupCreateInfoKHR
instance ToCStruct RayTracingShaderGroupCreateInfoKHR where
withCStruct x f = allocaBytes 48 $ \p -> pokeCStruct p x (f p)
pokeCStruct p RayTracingShaderGroupCreateInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr RayTracingShaderGroupTypeKHR)) (type')
poke ((p `plusPtr` 20 :: Ptr Word32)) (generalShader)
poke ((p `plusPtr` 24 :: Ptr Word32)) (closestHitShader)
poke ((p `plusPtr` 28 :: Ptr Word32)) (anyHitShader)
poke ((p `plusPtr` 32 :: Ptr Word32)) (intersectionShader)
poke ((p `plusPtr` 40 :: Ptr (Ptr ()))) (shaderGroupCaptureReplayHandle)
f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr RayTracingShaderGroupTypeKHR)) (zero)
poke ((p `plusPtr` 20 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 24 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 28 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 32 :: Ptr Word32)) (zero)
f
instance FromCStruct RayTracingShaderGroupCreateInfoKHR where
peekCStruct p = do
type' <- peek @RayTracingShaderGroupTypeKHR ((p `plusPtr` 16 :: Ptr RayTracingShaderGroupTypeKHR))
generalShader <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32))
closestHitShader <- peek @Word32 ((p `plusPtr` 24 :: Ptr Word32))
anyHitShader <- peek @Word32 ((p `plusPtr` 28 :: Ptr Word32))
intersectionShader <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
pShaderGroupCaptureReplayHandle <- peek @(Ptr ()) ((p `plusPtr` 40 :: Ptr (Ptr ())))
pure $ RayTracingShaderGroupCreateInfoKHR
type' generalShader closestHitShader anyHitShader intersectionShader pShaderGroupCaptureReplayHandle
instance Storable RayTracingShaderGroupCreateInfoKHR where
sizeOf ~_ = 48
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero RayTracingShaderGroupCreateInfoKHR where
zero = RayTracingShaderGroupCreateInfoKHR
zero
zero
zero
zero
zero
zero
-- | VkRayTracingPipelineCreateInfoKHR - Structure specifying parameters of a
-- newly created ray tracing pipeline
--
-- = Description
--
-- The parameters @basePipelineHandle@ and @basePipelineIndex@ are
-- described in more detail in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-pipeline-derivatives Pipeline Derivatives>.
--
-- When
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_LIBRARY_BIT_KHR'
-- is specified, this pipeline defines a /pipeline library/ which /cannot/
-- be bound as a ray tracing pipeline directly. Instead, pipeline libraries
-- define common shaders and shader groups which /can/ be included in
-- future pipeline creation.
--
-- If pipeline libraries are included in @pLibraryInfo@, shaders defined in
-- those libraries are treated as if they were defined as additional
-- entries in @pStages@, appended in the order they appear in the
-- @pLibraries@ array and in the @pStages@ array when those libraries were
-- defined.
--
-- When referencing shader groups in order to obtain a shader group handle,
-- groups defined in those libraries are treated as if they were defined as
-- additional entries in @pGroups@, appended in the order they appear in
-- the @pLibraries@ array and in the @pGroups@ array when those libraries
-- were defined. The shaders these groups reference are set when the
-- pipeline library is created, referencing those specified in the pipeline
-- library, not in the pipeline that includes it.
--
-- The default stack size for a pipeline if
-- 'Vulkan.Core10.Enums.DynamicState.DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR'
-- is not provided is computed as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-tracing-pipeline-stack Ray Tracing Pipeline Stack>.
--
-- == Valid Usage
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03421# If @flags@
-- contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, and @basePipelineIndex@ is @-1@, @basePipelineHandle@ /must/
-- be a valid handle to a ray tracing 'Vulkan.Core10.Handles.Pipeline'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03422# If @flags@
-- contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, and @basePipelineHandle@ is
-- 'Vulkan.Core10.APIConstants.NULL_HANDLE', @basePipelineIndex@ /must/
-- be a valid index into the calling command’s @pCreateInfos@ parameter
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03423# If @flags@
-- contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, and @basePipelineIndex@ is not @-1@, @basePipelineHandle@
-- /must/ be 'Vulkan.Core10.APIConstants.NULL_HANDLE'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03424# If @flags@
-- contains the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_DERIVATIVE_BIT'
-- flag, and @basePipelineHandle@ is not
-- 'Vulkan.Core10.APIConstants.NULL_HANDLE', @basePipelineIndex@ /must/
-- be @-1@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pStages-03426# The shader
-- code for the entry points identified by @pStages@, and the rest of
-- the state identified by this structure /must/ adhere to the pipeline
-- linking rules described in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#interfaces Shader Interfaces>
-- chapter
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-layout-03427# @layout@
-- /must/ be
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#descriptorsets-pipelinelayout-consistency consistent>
-- with all shaders specified in @pStages@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-layout-03428# The number of
-- resources in @layout@ accessible to each shader stage that is used
-- by the pipeline /must/ be less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxPerStageResources@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-02904# @flags@ /must/
-- not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pipelineCreationCacheControl-02905#
-- If the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-pipelineCreationCacheControl pipelineCreationCacheControl>
-- feature is not enabled, @flags@ /must/ not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT'
-- or
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-stage-03425# If @flags@ does
-- not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_LIBRARY_BIT_KHR',
-- the @stage@ member of at least one element of @pStages@, including
-- those implicitly added by @pLibraryInfo@, /must/ be
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_RAYGEN_BIT_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-maxPipelineRayRecursionDepth-03589#
-- @maxPipelineRayRecursionDepth@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxRayRecursionDepth@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03465# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_LIBRARY_BIT_KHR',
-- @pLibraryInterface@ /must/ not be @NULL@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03590# If
-- @pLibraryInfo@ is not @NULL@ and its @libraryCount@ member is
-- greater than @0@, its @pLibraryInterface@ member /must/ not be
-- @NULL@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraries-03591# Each
-- element of @pLibraryInfo->pLibraries@ /must/ have been created with
-- the value of @maxPipelineRayRecursionDepth@ equal to that in this
-- pipeline
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03592# If
-- @pLibraryInfo@ is not @NULL@, each element of its @pLibraries@
-- member /must/ have been created with a @layout@ that is compatible
-- with the @layout@ in this pipeline
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03593# If
-- @pLibraryInfo@ is not @NULL@, each element of its @pLibraries@
-- member /must/ have been created with values of the
-- @maxPipelineRayPayloadSize@ and @maxPipelineRayHitAttributeSize@
-- members of @pLibraryInterface@ equal to those in this pipeline
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03594# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04718# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04719# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04720# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04721# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04722# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-04723# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR',
-- each element of @pLibraryInfo->pLibraries@ /must/ have been created
-- with the
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR'
-- bit set
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03595# If the
-- @VK_KHR_pipeline_library@ extension is not enabled, @pLibraryInfo@
-- and @pLibraryInterface@ /must/ be @NULL@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03470# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR',
-- for any element of @pGroups@ with a @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' or
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR', the
-- @anyHitShader@ of that element /must/ not be
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03471# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR',
-- for any element of @pGroups@ with a @type@ of
-- 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' or
-- 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR', the
-- @closestHitShader@ of that element /must/ not be
-- 'Vulkan.Core10.APIConstants.SHADER_UNUSED_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-rayTraversalPrimitiveCulling-03596#
-- If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-rayTraversalPrimitiveCulling rayTraversalPrimitiveCulling>
-- feature is not enabled, @flags@ /must/ not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-rayTraversalPrimitiveCulling-03597#
-- If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-rayTraversalPrimitiveCulling rayTraversalPrimitiveCulling>
-- feature is not enabled, @flags@ /must/ not include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-03598# If @flags@
-- includes
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR',
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-rayTracingPipelineShaderGroupHandleCaptureReplay rayTracingPipelineShaderGroupHandleCaptureReplay>
-- /must/ be enabled
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-rayTracingPipelineShaderGroupHandleCaptureReplay-03599#
-- If
-- 'PhysicalDeviceRayTracingPipelineFeaturesKHR'::@rayTracingPipelineShaderGroupHandleCaptureReplay@
-- is 'Vulkan.Core10.FundamentalTypes.TRUE' and the
-- @pShaderGroupCaptureReplayHandle@ member of any element of @pGroups@
-- is not @NULL@, @flags@ /must/ include
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03600# If
-- @pLibraryInfo@ is not @NULL@ and its @libraryCount@ is @0@,
-- @stageCount@ /must/ not be @0@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-03601# If
-- @pLibraryInfo@ is not @NULL@ and its @libraryCount@ is @0@,
-- @groupCount@ /must/ not be @0@
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pDynamicStates-03602# Any
-- element of the @pDynamicStates@ member of @pDynamicState@ /must/ be
-- 'Vulkan.Core10.Enums.DynamicState.DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-sType-sType# @sType@ /must/
-- be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pNext-pNext# @pNext@ /must/
-- be @NULL@ or a pointer to a valid instance of
-- 'Vulkan.Core13.Promoted_From_VK_EXT_pipeline_creation_feedback.PipelineCreationFeedbackCreateInfo'
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-sType-unique# The @sType@
-- value of each struct in the @pNext@ chain /must/ be unique
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-flags-parameter# @flags@
-- /must/ be a valid combination of
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits'
-- values
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pStages-parameter# If
-- @stageCount@ is not @0@, @pStages@ /must/ be a valid pointer to an
-- array of @stageCount@ valid
-- 'Vulkan.Core10.Pipeline.PipelineShaderStageCreateInfo' structures
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pGroups-parameter# If
-- @groupCount@ is not @0@, @pGroups@ /must/ be a valid pointer to an
-- array of @groupCount@ valid 'RayTracingShaderGroupCreateInfoKHR'
-- structures
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInfo-parameter# If
-- @pLibraryInfo@ is not @NULL@, @pLibraryInfo@ /must/ be a valid
-- pointer to a valid
-- 'Vulkan.Extensions.VK_KHR_pipeline_library.PipelineLibraryCreateInfoKHR'
-- structure
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pLibraryInterface-parameter#
-- If @pLibraryInterface@ is not @NULL@, @pLibraryInterface@ /must/ be
-- a valid pointer to a valid
-- 'RayTracingPipelineInterfaceCreateInfoKHR' structure
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-pDynamicState-parameter# If
-- @pDynamicState@ is not @NULL@, @pDynamicState@ /must/ be a valid
-- pointer to a valid
-- 'Vulkan.Core10.Pipeline.PipelineDynamicStateCreateInfo' structure
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-layout-parameter# @layout@
-- /must/ be a valid 'Vulkan.Core10.Handles.PipelineLayout' handle
--
-- - #VUID-VkRayTracingPipelineCreateInfoKHR-commonparent# Both of
-- @basePipelineHandle@, and @layout@ that are valid handles of
-- non-ignored parameters /must/ have been created, allocated, or
-- retrieved from the same 'Vulkan.Core10.Handles.Device'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Handles.Pipeline',
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlags',
-- 'Vulkan.Core10.Pipeline.PipelineDynamicStateCreateInfo',
-- 'Vulkan.Core10.Handles.PipelineLayout',
-- 'Vulkan.Extensions.VK_KHR_pipeline_library.PipelineLibraryCreateInfoKHR',
-- 'Vulkan.Core10.Pipeline.PipelineShaderStageCreateInfo',
-- 'RayTracingPipelineInterfaceCreateInfoKHR',
-- 'RayTracingShaderGroupCreateInfoKHR',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'createRayTracingPipelinesKHR'
data RayTracingPipelineCreateInfoKHR (es :: [Type]) = RayTracingPipelineCreateInfoKHR
{ -- | @pNext@ is @NULL@ or a pointer to a structure extending this structure.
next :: Chain es
, -- | @flags@ is a bitmask of
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits'
-- specifying how the pipeline will be generated.
flags :: PipelineCreateFlags
, -- | @pStages@ is a pointer to an array of @stageCount@
-- 'Vulkan.Core10.Pipeline.PipelineShaderStageCreateInfo' structures
-- describing the set of the shader stages to be included in the ray
-- tracing pipeline.
stages :: Vector (SomeStruct PipelineShaderStageCreateInfo)
, -- | @pGroups@ is a pointer to an array of @groupCount@
-- 'RayTracingShaderGroupCreateInfoKHR' structures describing the set of
-- the shader stages to be included in each shader group in the ray tracing
-- pipeline.
groups :: Vector RayTracingShaderGroupCreateInfoKHR
, -- | @maxPipelineRayRecursionDepth@ is the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-tracing-recursion-depth maximum recursion depth>
-- of shaders executed by this pipeline.
maxPipelineRayRecursionDepth :: Word32
, -- | @pLibraryInfo@ is a pointer to a
-- 'Vulkan.Extensions.VK_KHR_pipeline_library.PipelineLibraryCreateInfoKHR'
-- structure defining pipeline libraries to include.
libraryInfo :: Maybe PipelineLibraryCreateInfoKHR
, -- | @pLibraryInterface@ is a pointer to a
-- 'RayTracingPipelineInterfaceCreateInfoKHR' structure defining additional
-- information when using pipeline libraries.
libraryInterface :: Maybe RayTracingPipelineInterfaceCreateInfoKHR
, -- | @pDynamicState@ is a pointer to a
-- 'Vulkan.Core10.Pipeline.PipelineDynamicStateCreateInfo' structure, and
-- is used to indicate which properties of the pipeline state object are
-- dynamic and /can/ be changed independently of the pipeline state. This
-- /can/ be @NULL@, which means no state in the pipeline is considered
-- dynamic.
dynamicState :: Maybe PipelineDynamicStateCreateInfo
, -- | @layout@ is the description of binding locations used by both the
-- pipeline and descriptor sets used with the pipeline.
layout :: PipelineLayout
, -- | @basePipelineHandle@ is a pipeline to derive from.
basePipelineHandle :: Pipeline
, -- | @basePipelineIndex@ is an index into the @pCreateInfos@ parameter to use
-- as a pipeline to derive from.
basePipelineIndex :: Int32
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (RayTracingPipelineCreateInfoKHR (es :: [Type]))
#endif
deriving instance Show (Chain es) => Show (RayTracingPipelineCreateInfoKHR es)
instance Extensible RayTracingPipelineCreateInfoKHR where
extensibleTypeName = "RayTracingPipelineCreateInfoKHR"
setNext RayTracingPipelineCreateInfoKHR{..} next' = RayTracingPipelineCreateInfoKHR{next = next', ..}
getNext RayTracingPipelineCreateInfoKHR{..} = next
extends :: forall e b proxy. Typeable e => proxy e -> (Extends RayTracingPipelineCreateInfoKHR e => b) -> Maybe b
extends _ f
| Just Refl <- eqT @e @PipelineCreationFeedbackCreateInfo = Just f
| otherwise = Nothing
instance (Extendss RayTracingPipelineCreateInfoKHR es, PokeChain es) => ToCStruct (RayTracingPipelineCreateInfoKHR es) where
withCStruct x f = allocaBytes 104 $ \p -> pokeCStruct p x (f p)
pokeCStruct p RayTracingPipelineCreateInfoKHR{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR)
pNext'' <- fmap castPtr . ContT $ withChain (next)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext''
lift $ poke ((p `plusPtr` 16 :: Ptr PipelineCreateFlags)) (flags)
lift $ poke ((p `plusPtr` 20 :: Ptr Word32)) ((fromIntegral (Data.Vector.length $ (stages)) :: Word32))
pPStages' <- ContT $ allocaBytes @(PipelineShaderStageCreateInfo _) ((Data.Vector.length (stages)) * 48)
Data.Vector.imapM_ (\i e -> ContT $ pokeSomeCStruct (forgetExtensions (pPStages' `plusPtr` (48 * (i)) :: Ptr (PipelineShaderStageCreateInfo _))) (e) . ($ ())) (stages)
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr (PipelineShaderStageCreateInfo _)))) (pPStages')
lift $ poke ((p `plusPtr` 32 :: Ptr Word32)) ((fromIntegral (Data.Vector.length $ (groups)) :: Word32))
pPGroups' <- ContT $ allocaBytes @RayTracingShaderGroupCreateInfoKHR ((Data.Vector.length (groups)) * 48)
lift $ Data.Vector.imapM_ (\i e -> poke (pPGroups' `plusPtr` (48 * (i)) :: Ptr RayTracingShaderGroupCreateInfoKHR) (e)) (groups)
lift $ poke ((p `plusPtr` 40 :: Ptr (Ptr RayTracingShaderGroupCreateInfoKHR))) (pPGroups')
lift $ poke ((p `plusPtr` 48 :: Ptr Word32)) (maxPipelineRayRecursionDepth)
pLibraryInfo'' <- case (libraryInfo) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
lift $ poke ((p `plusPtr` 56 :: Ptr (Ptr PipelineLibraryCreateInfoKHR))) pLibraryInfo''
pLibraryInterface'' <- case (libraryInterface) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
lift $ poke ((p `plusPtr` 64 :: Ptr (Ptr RayTracingPipelineInterfaceCreateInfoKHR))) pLibraryInterface''
pDynamicState'' <- case (dynamicState) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
lift $ poke ((p `plusPtr` 72 :: Ptr (Ptr PipelineDynamicStateCreateInfo))) pDynamicState''
lift $ poke ((p `plusPtr` 80 :: Ptr PipelineLayout)) (layout)
lift $ poke ((p `plusPtr` 88 :: Ptr Pipeline)) (basePipelineHandle)
lift $ poke ((p `plusPtr` 96 :: Ptr Int32)) (basePipelineIndex)
lift $ f
cStructSize = 104
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR)
pNext' <- fmap castPtr . ContT $ withZeroChain @es
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext'
lift $ poke ((p `plusPtr` 48 :: Ptr Word32)) (zero)
lift $ poke ((p `plusPtr` 80 :: Ptr PipelineLayout)) (zero)
lift $ poke ((p `plusPtr` 96 :: Ptr Int32)) (zero)
lift $ f
instance (Extendss RayTracingPipelineCreateInfoKHR es, PeekChain es) => FromCStruct (RayTracingPipelineCreateInfoKHR es) where
peekCStruct p = do
pNext <- peek @(Ptr ()) ((p `plusPtr` 8 :: Ptr (Ptr ())))
next <- peekChain (castPtr pNext)
flags <- peek @PipelineCreateFlags ((p `plusPtr` 16 :: Ptr PipelineCreateFlags))
stageCount <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32))
pStages <- peek @(Ptr (PipelineShaderStageCreateInfo _)) ((p `plusPtr` 24 :: Ptr (Ptr (PipelineShaderStageCreateInfo _))))
pStages' <- generateM (fromIntegral stageCount) (\i -> peekSomeCStruct (forgetExtensions ((pStages `advancePtrBytes` (48 * (i)) :: Ptr (PipelineShaderStageCreateInfo _)))))
groupCount <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
pGroups <- peek @(Ptr RayTracingShaderGroupCreateInfoKHR) ((p `plusPtr` 40 :: Ptr (Ptr RayTracingShaderGroupCreateInfoKHR)))
pGroups' <- generateM (fromIntegral groupCount) (\i -> peekCStruct @RayTracingShaderGroupCreateInfoKHR ((pGroups `advancePtrBytes` (48 * (i)) :: Ptr RayTracingShaderGroupCreateInfoKHR)))
maxPipelineRayRecursionDepth <- peek @Word32 ((p `plusPtr` 48 :: Ptr Word32))
pLibraryInfo <- peek @(Ptr PipelineLibraryCreateInfoKHR) ((p `plusPtr` 56 :: Ptr (Ptr PipelineLibraryCreateInfoKHR)))
pLibraryInfo' <- maybePeek (\j -> peekCStruct @PipelineLibraryCreateInfoKHR (j)) pLibraryInfo
pLibraryInterface <- peek @(Ptr RayTracingPipelineInterfaceCreateInfoKHR) ((p `plusPtr` 64 :: Ptr (Ptr RayTracingPipelineInterfaceCreateInfoKHR)))
pLibraryInterface' <- maybePeek (\j -> peekCStruct @RayTracingPipelineInterfaceCreateInfoKHR (j)) pLibraryInterface
pDynamicState <- peek @(Ptr PipelineDynamicStateCreateInfo) ((p `plusPtr` 72 :: Ptr (Ptr PipelineDynamicStateCreateInfo)))
pDynamicState' <- maybePeek (\j -> peekCStruct @PipelineDynamicStateCreateInfo (j)) pDynamicState
layout <- peek @PipelineLayout ((p `plusPtr` 80 :: Ptr PipelineLayout))
basePipelineHandle <- peek @Pipeline ((p `plusPtr` 88 :: Ptr Pipeline))
basePipelineIndex <- peek @Int32 ((p `plusPtr` 96 :: Ptr Int32))
pure $ RayTracingPipelineCreateInfoKHR
next flags pStages' pGroups' maxPipelineRayRecursionDepth pLibraryInfo' pLibraryInterface' pDynamicState' layout basePipelineHandle basePipelineIndex
instance es ~ '[] => Zero (RayTracingPipelineCreateInfoKHR es) where
zero = RayTracingPipelineCreateInfoKHR
()
zero
mempty
mempty
zero
Nothing
Nothing
Nothing
zero
zero
zero
-- | VkPhysicalDeviceRayTracingPipelineFeaturesKHR - Structure describing the
-- ray tracing features that can be supported by an implementation
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- - @sType@ is the type of this structure.
--
-- - @pNext@ is @NULL@ or a pointer to a structure extending this
-- structure.
--
-- - #features-rayTracingPipeline# @rayTracingPipeline@ indicates whether
-- the implementation supports the ray tracing pipeline functionality.
-- See
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-tracing Ray Tracing>.
--
-- - #features-rayTracingPipelineShaderGroupHandleCaptureReplay#
-- @rayTracingPipelineShaderGroupHandleCaptureReplay@ indicates whether
-- the implementation supports saving and reusing shader group handles,
-- e.g. for trace capture and replay.
--
-- - #features-rayTracingPipelineShaderGroupHandleCaptureReplayMixed#
-- @rayTracingPipelineShaderGroupHandleCaptureReplayMixed@ indicates
-- whether the implementation supports reuse of shader group handles
-- being arbitrarily mixed with creation of non-reused shader group
-- handles. If this is 'Vulkan.Core10.FundamentalTypes.FALSE', all
-- reused shader group handles /must/ be specified before any
-- non-reused handles /may/ be created.
--
-- - #features-rayTracingPipelineTraceRaysIndirect#
-- @rayTracingPipelineTraceRaysIndirect@ indicates whether the
-- implementation supports indirect ray tracing commands, e.g.
-- 'cmdTraceRaysIndirectKHR'.
--
-- - #features-rayTraversalPrimitiveCulling#
-- @rayTraversalPrimitiveCulling@ indicates whether the implementation
-- supports
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#ray-traversal-culling-primitive primitive culling during ray traversal>.
--
-- If the 'PhysicalDeviceRayTracingPipelineFeaturesKHR' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceRayTracingPipelineFeaturesKHR' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage
--
-- - #VUID-VkPhysicalDeviceRayTracingPipelineFeaturesKHR-rayTracingPipelineShaderGroupHandleCaptureReplayMixed-03575#
-- If @rayTracingPipelineShaderGroupHandleCaptureReplayMixed@ is
-- 'Vulkan.Core10.FundamentalTypes.TRUE',
-- @rayTracingPipelineShaderGroupHandleCaptureReplay@ /must/ also be
-- 'Vulkan.Core10.FundamentalTypes.TRUE'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkPhysicalDeviceRayTracingPipelineFeaturesKHR-sType-sType#
-- @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceRayTracingPipelineFeaturesKHR = PhysicalDeviceRayTracingPipelineFeaturesKHR
{ -- No documentation found for Nested "VkPhysicalDeviceRayTracingPipelineFeaturesKHR" "rayTracingPipeline"
rayTracingPipeline :: Bool
, -- No documentation found for Nested "VkPhysicalDeviceRayTracingPipelineFeaturesKHR" "rayTracingPipelineShaderGroupHandleCaptureReplay"
rayTracingPipelineShaderGroupHandleCaptureReplay :: Bool
, -- No documentation found for Nested "VkPhysicalDeviceRayTracingPipelineFeaturesKHR" "rayTracingPipelineShaderGroupHandleCaptureReplayMixed"
rayTracingPipelineShaderGroupHandleCaptureReplayMixed :: Bool
, -- No documentation found for Nested "VkPhysicalDeviceRayTracingPipelineFeaturesKHR" "rayTracingPipelineTraceRaysIndirect"
rayTracingPipelineTraceRaysIndirect :: Bool
, -- No documentation found for Nested "VkPhysicalDeviceRayTracingPipelineFeaturesKHR" "rayTraversalPrimitiveCulling"
rayTraversalPrimitiveCulling :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceRayTracingPipelineFeaturesKHR)
#endif
deriving instance Show PhysicalDeviceRayTracingPipelineFeaturesKHR
instance ToCStruct PhysicalDeviceRayTracingPipelineFeaturesKHR where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceRayTracingPipelineFeaturesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (rayTracingPipeline))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (rayTracingPipelineShaderGroupHandleCaptureReplay))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (rayTracingPipelineShaderGroupHandleCaptureReplayMixed))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (rayTracingPipelineTraceRaysIndirect))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (rayTraversalPrimitiveCulling))
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceRayTracingPipelineFeaturesKHR where
peekCStruct p = do
rayTracingPipeline <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
rayTracingPipelineShaderGroupHandleCaptureReplay <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
rayTracingPipelineShaderGroupHandleCaptureReplayMixed <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
rayTracingPipelineTraceRaysIndirect <- peek @Bool32 ((p `plusPtr` 28 :: Ptr Bool32))
rayTraversalPrimitiveCulling <- peek @Bool32 ((p `plusPtr` 32 :: Ptr Bool32))
pure $ PhysicalDeviceRayTracingPipelineFeaturesKHR
(bool32ToBool rayTracingPipeline) (bool32ToBool rayTracingPipelineShaderGroupHandleCaptureReplay) (bool32ToBool rayTracingPipelineShaderGroupHandleCaptureReplayMixed) (bool32ToBool rayTracingPipelineTraceRaysIndirect) (bool32ToBool rayTraversalPrimitiveCulling)
instance Storable PhysicalDeviceRayTracingPipelineFeaturesKHR where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceRayTracingPipelineFeaturesKHR where
zero = PhysicalDeviceRayTracingPipelineFeaturesKHR
zero
zero
zero
zero
zero
-- | VkPhysicalDeviceRayTracingPipelinePropertiesKHR - Properties of the
-- physical device for ray tracing
--
-- = Description
--
-- If the 'PhysicalDeviceRayTracingPipelinePropertiesKHR' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2',
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- Limits specified by this structure /must/ match those specified with the
-- same name in
-- 'Vulkan.Extensions.VK_NV_ray_tracing.PhysicalDeviceRayTracingPropertiesNV'.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceRayTracingPipelinePropertiesKHR = PhysicalDeviceRayTracingPipelinePropertiesKHR
{ -- | @shaderGroupHandleSize@ is the size in bytes of the shader header.
shaderGroupHandleSize :: Word32
, -- | #limits-maxRayRecursionDepth# @maxRayRecursionDepth@ is the maximum
-- number of levels of ray recursion allowed in a trace command.
maxRayRecursionDepth :: Word32
, -- | @maxShaderGroupStride@ is the maximum stride in bytes allowed between
-- shader groups in the shader binding table.
maxShaderGroupStride :: Word32
, -- | @shaderGroupBaseAlignment@ is the /required/ alignment in bytes for the
-- base of the shader binding table.
shaderGroupBaseAlignment :: Word32
, -- | @shaderGroupHandleCaptureReplaySize@ is the number of bytes for the
-- information required to do capture and replay for shader group handles.
shaderGroupHandleCaptureReplaySize :: Word32
, -- | @maxRayDispatchInvocationCount@ is the maximum number of ray generation
-- shader invocations which /may/ be produced by a single
-- 'cmdTraceRaysIndirectKHR' or 'cmdTraceRaysKHR' command.
maxRayDispatchInvocationCount :: Word32
, -- | @shaderGroupHandleAlignment@ is the /required/ alignment in bytes for
-- each shader binding table entry. The value /must/ be a power of two.
shaderGroupHandleAlignment :: Word32
, -- | @maxRayHitAttributeSize@ is the maximum size in bytes for a ray
-- attribute structure
maxRayHitAttributeSize :: Word32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceRayTracingPipelinePropertiesKHR)
#endif
deriving instance Show PhysicalDeviceRayTracingPipelinePropertiesKHR
instance ToCStruct PhysicalDeviceRayTracingPipelinePropertiesKHR where
withCStruct x f = allocaBytes 48 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceRayTracingPipelinePropertiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (shaderGroupHandleSize)
poke ((p `plusPtr` 20 :: Ptr Word32)) (maxRayRecursionDepth)
poke ((p `plusPtr` 24 :: Ptr Word32)) (maxShaderGroupStride)
poke ((p `plusPtr` 28 :: Ptr Word32)) (shaderGroupBaseAlignment)
poke ((p `plusPtr` 32 :: Ptr Word32)) (shaderGroupHandleCaptureReplaySize)
poke ((p `plusPtr` 36 :: Ptr Word32)) (maxRayDispatchInvocationCount)
poke ((p `plusPtr` 40 :: Ptr Word32)) (shaderGroupHandleAlignment)
poke ((p `plusPtr` 44 :: Ptr Word32)) (maxRayHitAttributeSize)
f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 20 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 24 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 28 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 32 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 36 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 40 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 44 :: Ptr Word32)) (zero)
f
instance FromCStruct PhysicalDeviceRayTracingPipelinePropertiesKHR where
peekCStruct p = do
shaderGroupHandleSize <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
maxRayRecursionDepth <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32))
maxShaderGroupStride <- peek @Word32 ((p `plusPtr` 24 :: Ptr Word32))
shaderGroupBaseAlignment <- peek @Word32 ((p `plusPtr` 28 :: Ptr Word32))
shaderGroupHandleCaptureReplaySize <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
maxRayDispatchInvocationCount <- peek @Word32 ((p `plusPtr` 36 :: Ptr Word32))
shaderGroupHandleAlignment <- peek @Word32 ((p `plusPtr` 40 :: Ptr Word32))
maxRayHitAttributeSize <- peek @Word32 ((p `plusPtr` 44 :: Ptr Word32))
pure $ PhysicalDeviceRayTracingPipelinePropertiesKHR
shaderGroupHandleSize maxRayRecursionDepth maxShaderGroupStride shaderGroupBaseAlignment shaderGroupHandleCaptureReplaySize maxRayDispatchInvocationCount shaderGroupHandleAlignment maxRayHitAttributeSize
instance Storable PhysicalDeviceRayTracingPipelinePropertiesKHR where
sizeOf ~_ = 48
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceRayTracingPipelinePropertiesKHR where
zero = PhysicalDeviceRayTracingPipelinePropertiesKHR
zero
zero
zero
zero
zero
zero
zero
zero
-- | VkStridedDeviceAddressRegionKHR - Structure specifying a region of
-- device addresses with a stride
--
-- == Valid Usage
--
-- - #VUID-VkStridedDeviceAddressRegionKHR-size-04631# If @size@ is not
-- zero, all addresses between @deviceAddress@ and @deviceAddress@ +
-- @size@ - 1 /must/ be in the buffer device address range of the same
-- buffer
--
-- - #VUID-VkStridedDeviceAddressRegionKHR-size-04632# If @size@ is not
-- zero, @stride@ /must/ be less than or equal to the size of the
-- buffer from which @deviceAddress@ was queried
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'Vulkan.Core10.FundamentalTypes.DeviceAddress',
-- 'Vulkan.Core10.FundamentalTypes.DeviceSize', 'cmdTraceRaysIndirectKHR',
-- 'cmdTraceRaysKHR'
data StridedDeviceAddressRegionKHR = StridedDeviceAddressRegionKHR
{ -- | @deviceAddress@ is the device address (as returned by the
-- 'Vulkan.Core12.Promoted_From_VK_KHR_buffer_device_address.getBufferDeviceAddress'
-- command) at which the region starts, or zero if the region is unused.
deviceAddress :: DeviceAddress
, -- | @stride@ is the byte stride between consecutive elements.
stride :: DeviceSize
, -- | @size@ is the size in bytes of the region starting at @deviceAddress@.
size :: DeviceSize
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (StridedDeviceAddressRegionKHR)
#endif
deriving instance Show StridedDeviceAddressRegionKHR
instance ToCStruct StridedDeviceAddressRegionKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p StridedDeviceAddressRegionKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (deviceAddress)
poke ((p `plusPtr` 8 :: Ptr DeviceSize)) (stride)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (size)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 8 :: Ptr DeviceSize)) (zero)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (zero)
f
instance FromCStruct StridedDeviceAddressRegionKHR where
peekCStruct p = do
deviceAddress <- peek @DeviceAddress ((p `plusPtr` 0 :: Ptr DeviceAddress))
stride <- peek @DeviceSize ((p `plusPtr` 8 :: Ptr DeviceSize))
size <- peek @DeviceSize ((p `plusPtr` 16 :: Ptr DeviceSize))
pure $ StridedDeviceAddressRegionKHR
deviceAddress stride size
instance Storable StridedDeviceAddressRegionKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero StridedDeviceAddressRegionKHR where
zero = StridedDeviceAddressRegionKHR
zero
zero
zero
-- | VkTraceRaysIndirectCommandKHR - Structure specifying the parameters of
-- an indirect ray tracing command
--
-- = Description
--
-- The members of 'TraceRaysIndirectCommandKHR' have the same meaning as
-- the similarly named parameters of 'cmdTraceRaysKHR'.
--
-- == Valid Usage
--
-- - #VUID-VkTraceRaysIndirectCommandKHR-width-03638# @width@ /must/ be
-- less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[0]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[0]
--
-- - #VUID-VkTraceRaysIndirectCommandKHR-height-03639# @height@ /must/ be
-- less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[1]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[1]
--
-- - #VUID-VkTraceRaysIndirectCommandKHR-depth-03640# @depth@ /must/ be
-- less than or equal to
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupCount@[2]
-- ×
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@maxComputeWorkGroupSize@[2]
--
-- - #VUID-VkTraceRaysIndirectCommandKHR-width-03641# @width@ × @height@
-- × @depth@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxRayDispatchInvocationCount@
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>
data TraceRaysIndirectCommandKHR = TraceRaysIndirectCommandKHR
{ -- | @width@ is the width of the ray trace query dimensions.
width :: Word32
, -- | @height@ is height of the ray trace query dimensions.
height :: Word32
, -- | @depth@ is depth of the ray trace query dimensions.
depth :: Word32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (TraceRaysIndirectCommandKHR)
#endif
deriving instance Show TraceRaysIndirectCommandKHR
instance ToCStruct TraceRaysIndirectCommandKHR where
withCStruct x f = allocaBytes 12 $ \p -> pokeCStruct p x (f p)
pokeCStruct p TraceRaysIndirectCommandKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr Word32)) (width)
poke ((p `plusPtr` 4 :: Ptr Word32)) (height)
poke ((p `plusPtr` 8 :: Ptr Word32)) (depth)
f
cStructSize = 12
cStructAlignment = 4
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 4 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 8 :: Ptr Word32)) (zero)
f
instance FromCStruct TraceRaysIndirectCommandKHR where
peekCStruct p = do
width <- peek @Word32 ((p `plusPtr` 0 :: Ptr Word32))
height <- peek @Word32 ((p `plusPtr` 4 :: Ptr Word32))
depth <- peek @Word32 ((p `plusPtr` 8 :: Ptr Word32))
pure $ TraceRaysIndirectCommandKHR
width height depth
instance Storable TraceRaysIndirectCommandKHR where
sizeOf ~_ = 12
alignment ~_ = 4
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero TraceRaysIndirectCommandKHR where
zero = TraceRaysIndirectCommandKHR
zero
zero
zero
-- | VkRayTracingPipelineInterfaceCreateInfoKHR - Structure specifying
-- additional interface information when using libraries
--
-- = Description
--
-- @maxPipelineRayPayloadSize@ is calculated as the maximum number of bytes
-- used by any block declared in the @RayPayloadKHR@ or
-- @IncomingRayPayloadKHR@ storage classes.
-- @maxPipelineRayHitAttributeSize@ is calculated as the maximum number of
-- bytes used by any block declared in the @HitAttributeKHR@ storage class.
-- As variables in these storage classes do not have explicit offsets, the
-- size should be calculated as if each variable has a
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-alignment-requirements scalar alignment>
-- equal to the largest scalar alignment of any of the block’s members.
--
-- Note
--
-- There is no explicit upper limit for @maxPipelineRayPayloadSize@, but in
-- practice it should be kept as small as possible. Similar to invocation
-- local memory, it must be allocated for each shader invocation and for
-- devices which support many simultaneous invocations, this storage can
-- rapidly be exhausted, resulting in failure.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'RayTracingPipelineCreateInfoKHR',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data RayTracingPipelineInterfaceCreateInfoKHR = RayTracingPipelineInterfaceCreateInfoKHR
{ -- | @maxPipelineRayPayloadSize@ is the maximum payload size in bytes used by
-- any shader in the pipeline.
maxPipelineRayPayloadSize :: Word32
, -- | @maxPipelineRayHitAttributeSize@ is the maximum attribute structure size
-- in bytes used by any shader in the pipeline.
--
-- #VUID-VkRayTracingPipelineInterfaceCreateInfoKHR-maxPipelineRayHitAttributeSize-03605#
-- @maxPipelineRayHitAttributeSize@ /must/ be less than or equal to
-- 'PhysicalDeviceRayTracingPipelinePropertiesKHR'::@maxRayHitAttributeSize@
maxPipelineRayHitAttributeSize :: Word32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (RayTracingPipelineInterfaceCreateInfoKHR)
#endif
deriving instance Show RayTracingPipelineInterfaceCreateInfoKHR
instance ToCStruct RayTracingPipelineInterfaceCreateInfoKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p RayTracingPipelineInterfaceCreateInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (maxPipelineRayPayloadSize)
poke ((p `plusPtr` 20 :: Ptr Word32)) (maxPipelineRayHitAttributeSize)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 20 :: Ptr Word32)) (zero)
f
instance FromCStruct RayTracingPipelineInterfaceCreateInfoKHR where
peekCStruct p = do
maxPipelineRayPayloadSize <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
maxPipelineRayHitAttributeSize <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32))
pure $ RayTracingPipelineInterfaceCreateInfoKHR
maxPipelineRayPayloadSize maxPipelineRayHitAttributeSize
instance Storable RayTracingPipelineInterfaceCreateInfoKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero RayTracingPipelineInterfaceCreateInfoKHR where
zero = RayTracingPipelineInterfaceCreateInfoKHR
zero
zero
-- | VkRayTracingShaderGroupTypeKHR - Shader group types
--
-- = Description
--
-- Note
--
-- For current group types, the hit group type could be inferred from the
-- presence or absence of the intersection shader, but we provide the type
-- explicitly for future hit groups that do not have that property.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_ray_tracing VK_NV_ray_tracing>,
-- 'RayTracingShaderGroupCreateInfoKHR',
-- 'Vulkan.Extensions.VK_NV_ray_tracing.RayTracingShaderGroupCreateInfoNV'
newtype RayTracingShaderGroupTypeKHR = RayTracingShaderGroupTypeKHR Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR' indicates a shader group
-- with a single
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_RAYGEN_BIT_KHR',
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_MISS_BIT_KHR', or
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.SHADER_STAGE_CALLABLE_BIT_KHR'
-- shader in it.
pattern RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = RayTracingShaderGroupTypeKHR 0
-- | 'RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR' specifies a
-- shader group that only hits triangles and /must/ not contain an
-- intersection shader, only closest hit and any-hit shaders.
pattern RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = RayTracingShaderGroupTypeKHR 1
-- | 'RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR' specifies a
-- shader group that only intersects with custom geometry and /must/
-- contain an intersection shader and /may/ contain closest hit and any-hit
-- shaders.
pattern RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = RayTracingShaderGroupTypeKHR 2
{-# complete RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR :: RayTracingShaderGroupTypeKHR #-}
conNameRayTracingShaderGroupTypeKHR :: String
conNameRayTracingShaderGroupTypeKHR = "RayTracingShaderGroupTypeKHR"
enumPrefixRayTracingShaderGroupTypeKHR :: String
enumPrefixRayTracingShaderGroupTypeKHR = "RAY_TRACING_SHADER_GROUP_TYPE_"
showTableRayTracingShaderGroupTypeKHR :: [(RayTracingShaderGroupTypeKHR, String)]
showTableRayTracingShaderGroupTypeKHR =
[ (RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR , "GENERAL_KHR")
, (RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR , "TRIANGLES_HIT_GROUP_KHR")
, (RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, "PROCEDURAL_HIT_GROUP_KHR")
]
instance Show RayTracingShaderGroupTypeKHR where
showsPrec = enumShowsPrec enumPrefixRayTracingShaderGroupTypeKHR
showTableRayTracingShaderGroupTypeKHR
conNameRayTracingShaderGroupTypeKHR
(\(RayTracingShaderGroupTypeKHR x) -> x)
(showsPrec 11)
instance Read RayTracingShaderGroupTypeKHR where
readPrec = enumReadPrec enumPrefixRayTracingShaderGroupTypeKHR
showTableRayTracingShaderGroupTypeKHR
conNameRayTracingShaderGroupTypeKHR
RayTracingShaderGroupTypeKHR
-- | VkShaderGroupShaderKHR - Shader group shaders
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_ray_tracing_pipeline VK_KHR_ray_tracing_pipeline>,
-- 'getRayTracingShaderGroupStackSizeKHR'
newtype ShaderGroupShaderKHR = ShaderGroupShaderKHR Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'SHADER_GROUP_SHADER_GENERAL_KHR' uses the shader specified in the group
-- with 'RayTracingShaderGroupCreateInfoKHR'::@generalShader@
pattern SHADER_GROUP_SHADER_GENERAL_KHR = ShaderGroupShaderKHR 0
-- | 'SHADER_GROUP_SHADER_CLOSEST_HIT_KHR' uses the shader specified in the
-- group with 'RayTracingShaderGroupCreateInfoKHR'::@closestHitShader@
pattern SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = ShaderGroupShaderKHR 1
-- | 'SHADER_GROUP_SHADER_ANY_HIT_KHR' uses the shader specified in the group
-- with 'RayTracingShaderGroupCreateInfoKHR'::@anyHitShader@
pattern SHADER_GROUP_SHADER_ANY_HIT_KHR = ShaderGroupShaderKHR 2
-- | 'SHADER_GROUP_SHADER_INTERSECTION_KHR' uses the shader specified in the
-- group with 'RayTracingShaderGroupCreateInfoKHR'::@intersectionShader@
pattern SHADER_GROUP_SHADER_INTERSECTION_KHR = ShaderGroupShaderKHR 3
{-# complete SHADER_GROUP_SHADER_GENERAL_KHR,
SHADER_GROUP_SHADER_CLOSEST_HIT_KHR,
SHADER_GROUP_SHADER_ANY_HIT_KHR,
SHADER_GROUP_SHADER_INTERSECTION_KHR :: ShaderGroupShaderKHR #-}
conNameShaderGroupShaderKHR :: String
conNameShaderGroupShaderKHR = "ShaderGroupShaderKHR"
enumPrefixShaderGroupShaderKHR :: String
enumPrefixShaderGroupShaderKHR = "SHADER_GROUP_SHADER_"
showTableShaderGroupShaderKHR :: [(ShaderGroupShaderKHR, String)]
showTableShaderGroupShaderKHR =
[ (SHADER_GROUP_SHADER_GENERAL_KHR , "GENERAL_KHR")
, (SHADER_GROUP_SHADER_CLOSEST_HIT_KHR , "CLOSEST_HIT_KHR")
, (SHADER_GROUP_SHADER_ANY_HIT_KHR , "ANY_HIT_KHR")
, (SHADER_GROUP_SHADER_INTERSECTION_KHR, "INTERSECTION_KHR")
]
instance Show ShaderGroupShaderKHR where
showsPrec = enumShowsPrec enumPrefixShaderGroupShaderKHR
showTableShaderGroupShaderKHR
conNameShaderGroupShaderKHR
(\(ShaderGroupShaderKHR x) -> x)
(showsPrec 11)
instance Read ShaderGroupShaderKHR where
readPrec = enumReadPrec enumPrefixShaderGroupShaderKHR
showTableShaderGroupShaderKHR
conNameShaderGroupShaderKHR
ShaderGroupShaderKHR
type KHR_RAY_TRACING_PIPELINE_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION"
pattern KHR_RAY_TRACING_PIPELINE_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_RAY_TRACING_PIPELINE_SPEC_VERSION = 1
type KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME = "VK_KHR_ray_tracing_pipeline"
-- No documentation found for TopLevel "VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME"
pattern KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME = "VK_KHR_ray_tracing_pipeline"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_ray_tracing_pipeline.hs | bsd-3-clause | 195,430 | 2 | 22 | 34,423 | 14,953 | 9,377 | 5,576 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
-- https://github.com/Gabriel439/post-rfc/blob/master/sotu.md#scripting--command-line-applications
module Lib
( someFunc
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text as Text
import Data.Yaml (FromJSON)
import qualified Data.Yaml as Yaml
import GHC.Generics (Generic)
import Turtle (shell, (<>))
import qualified Turtle as T
data Build = Build { language :: String
, install :: [T.Text]
, blurb :: Maybe String}
deriving (Show, Generic, FromJSON)
getYaml :: FilePath -> IO BS.ByteString
getYaml = BS.readFile
runCmd :: T.Text -> IO ()
runCmd cmd = do
print ("Running: " <> cmd)
res <- shell cmd T.empty
case res of
T.ExitSuccess -> return ()
T.ExitFailure n -> T.die ("Shell cmd: " <> cmd <> " failed with code: " <> T.repr n)
runCmds cmds = T.sh $ do
res <- T.select cmds
T.liftIO (runCmd res)
someFunc :: IO ()
someFunc =
do
d <- Yaml.decodeEither <$> getYaml "build.yml"
case d of
Left err -> T.die $ Text.pack err
Right ps -> runCmds (install ps)
| minimal/build-lib | src/Lib.hs | bsd-3-clause | 1,322 | 0 | 14 | 406 | 371 | 198 | 173 | 35 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE EmptyDataDecls, MultiParamTypeClasses, FunctionalDependencies,
Rank2Types, DeriveDataTypeable, FlexibleInstances,
UndecidableInstances, FlexibleContexts,ScopedTypeVariables,
TypeFamilies
#-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.TypeLevel.Bool
-- Copyright : (c) 2008 Benedikt Huber (port to Associative types (ghc 6.9+))
-- (c) 2008 Alfonso Acosta, Oleg Kiselyov, Wolfgang Jeltsch
-- and KTH's SAM group
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental (MPTC, non-standarad instances)
-- Portability : non-portable
--
-- Type-level Booleans.
--
----------------------------------------------------------------------------
module Data.TypeLevel.Bool (
-- * Type-level boolean values
-- Bool, toBool,
False, false,
True, true,
-- reifyBool,
-- * Type-level boolean operations
Not,
And,
Or
-- Not, not,
-- And, (&&),
-- Or, (||),
-- Xor, xor,
-- Impl, imp,
) where
import Data.Generics (Typeable)
import Prelude hiding (Bool, not, (&&), (||), Eq)
import qualified Prelude as P
------------------------------------
-- Definition of type-level Booleans
------------------------------------
-- | True type-level value
data True deriving Typeable
instance Show True where
show _ = "True"
-- | True value-level reflecting function
true :: True
true = undefined
-- | False type-level value
data False deriving Typeable
instance Show False where
show _ = "False"
-- | False value-level reflecting function
false :: False
false = undefined
type family And b_0 b_1
type instance And True True = True
type instance And True False = False
type instance And False True = False
type instance And False False = False
type family Or b_0 b_1
type instance Or True True = True
type instance Or True False = True
type instance Or False True = True
type instance Or False False = False
type family Not b
type instance Not True = False
type instance Not False = True
#if 0
type family Id b
type family Const a b
type instance Id a = a
type instance Const b a = b
-- | Booleans, internal version
class BoolI b where
toBool :: b -> P.Bool
type Not b
type And b :: * -> *
type Or b :: * -> *
type Xor b :: * -> *
type Impl b :: * -> *
type BoolEq b :: * -> *
-- To prevent the user from adding new instances to BoolI we do NOT export
-- BoolI itself. Rather, we export the following proxy (Bool).
-- The proxy entails BoolI and so can be used to add BoolI
-- constraints in the signatures. However, all the constraints below
-- are expressed in terms of BoolI rather than the proxy. Thus, even if the
-- user adds new instances to the proxy, it would not matter.
-- Besides, because the following proxy instances are most general,
-- one may not add further instances without the overlapping instances
-- extension.
-- | Type-level Booleans
class BoolI b => Bool b
instance BoolI b => Bool b
instance BoolI True where
toBool _ = True
type Not True = False
type And True = Id
type Or True = Const True
type Xor True = Not
type Impl True = Id
type BoolEq True = Id
instance BoolI False where
toBool _ = False
type Not False = True
type And False = Const False
type Or False = Id
type Xor False = Id
type Impl False = Const True
type BoolEq False = Not
-- | Reification function. In CPS style (best possible solution)
reifyBool :: P.Bool -> (forall b . Bool b => b -> r) -> r
reifyBool True f = f true
reifyBool False f = f false
-------------
-- Operations
-------------
-- | value-level reflection function for the 'Not' type-level relation
not :: b1 -> Not b1
not = undefined
-- | 'And' type-level relation. @And b1 b2 b3@ establishes that
-- @b1 && b2 = b3@
-- | value-level reflection function for the 'And' type-level relation
(&&) :: b1 -> b2 -> And b1 b2
(&&) = undefined
infixr 3 &&
-- | Or type-level relation. @Or b1 b2 b3@ establishes that
-- @b1 || b2 = b3@
-- | value-level reflection function for the 'Or' type-level relation
(||) :: b1 -> b2 -> Or b1 b2
(||) = undefined
infixr 2 ||
-- | Exclusive or type-level relation. @Xor b1 b2 b3@ establishes that
-- @xor b1 b2 = b3@
-- | value-level reflection function for the 'Xor' type-level relation
xor :: b1 -> b2 -> Xor b1 b2
xor = undefined
-- | Implication type-level relation. @Imp b1 b2 b3@ establishes that
-- @b1 =>b2 = b3@
-- | value-level reflection function for the Imp type-level relation
imp :: b1 -> b2 -> Impl b1 b2
imp = undefined
-- Although equality can be defined as the composition of Xor and Not
-- we define it specifically
-- | Boolean equality type-level relation
-- FIXME: eq should be named (==) but it clashes with the (==) defined
-- in Data.TypeLevel.Num . The chosen (and ugly) workaround was
-- to rename it to eq.
-- | value-level reflection function for the 'Eq' type-level relation
boolEq :: b1 -> b2 -> BoolEq b1 b2
boolEq = undefined
#endif
| coreyoconnor/type-level-tf | src/Data/TypeLevel/Bool.hs | bsd-3-clause | 5,146 | 0 | 10 | 1,123 | 842 | 511 | 331 | -1 | -1 |
-- | All the solutions of the 4-queens puzzle.
module Example.Monad.Queens4All
( run )
where
import Control.Applicative
import Control.Monad ( join )
import Data.Maybe
import qualified Data.Traversable as T
import Z3.Monad
run :: IO ()
run = do
sols <- evalZ3With Nothing opts script
putStrLn "Solutions: "
mapM_ print sols
where opts = opt "MODEL" True +? opt "MODEL_COMPLETION" True
type Solution = [Integer]
getSolutions :: AST -> AST -> AST -> AST -> Z3 [Solution]
getSolutions q1 q2 q3 q4 = go []
where go acc = do
mb_sol <- getSolution
case mb_sol of
Nothing -> return acc
Just sol -> do restrictSolution sol
go (sol:acc)
restrictSolution :: Solution -> Z3 ()
restrictSolution [c1,c2,c3,c4] =
assert =<< mkNot =<< mkOr =<< T.sequence
[ mkEq q1 =<< mkIntNum c1
, mkEq q2 =<< mkIntNum c2
, mkEq q3 =<< mkIntNum c3
, mkEq q4 =<< mkIntNum c4
]
restrictSolution _____________ = error "invalid argument"
getSolution :: Z3 (Maybe Solution)
getSolution = fmap snd $ withModel $ \m ->
catMaybes <$> mapM (evalInt m) [q1,q2,q3,q4]
script :: Z3 [Solution]
script = do
q1 <- mkFreshIntVar "q1"
q2 <- mkFreshIntVar "q2"
q3 <- mkFreshIntVar "q3"
q4 <- mkFreshIntVar "q4"
_1 <- mkIntNum (1::Integer)
_4 <- mkIntNum (4::Integer)
-- the ith-queen is in the ith-row.
-- qi is the column of the ith-queen
assert =<< mkAnd =<< T.sequence
[ mkLe _1 q1, mkLe q1 _4 -- 1 <= q1 <= 4
, mkLe _1 q2, mkLe q2 _4
, mkLe _1 q3, mkLe q3 _4
, mkLe _1 q4, mkLe q4 _4
]
-- different columns
assert =<< mkDistinct [q1,q2,q3,q4]
-- avoid diagonal attacks
assert =<< mkNot =<< mkOr =<< T.sequence
[ diagonal 1 q1 q2 -- diagonal line of attack between q1 and q2
, diagonal 2 q1 q3
, diagonal 3 q1 q4
, diagonal 1 q2 q3
, diagonal 2 q2 q4
, diagonal 1 q3 q4
]
getSolutions q1 q2 q3 q4
where mkAbs :: AST -> Z3 AST
mkAbs x = do
_0 <- mkIntNum (0::Integer)
join $ mkIte <$> mkLe _0 x <*> pure x <*> mkUnaryMinus x
diagonal :: Integer -> AST -> AST -> Z3 AST
diagonal d c c' =
join $ mkEq <$> (mkAbs =<< mkSub [c',c]) <*> (mkIntNum d)
| sukwon0709/z3-haskell | examples/Example/Monad/Queens4All.hs | bsd-3-clause | 2,355 | 0 | 16 | 733 | 824 | 412 | 412 | 62 | 3 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UnicodeSyntax #-}
module Test.Complexity.Pretty (prettyStats, printStats) where
import "base-unicode-symbols" Data.Function.Unicode ( (∘) )
import qualified "vector" Data.Vector as V
import "this" Test.Complexity.Types ( MeasurementStats(..), Sample )
import "pretty" Text.PrettyPrint
import "base" Text.Printf ( printf )
prettyStats ∷ MeasurementStats → Doc
prettyStats (MeasurementStats {..}) = text "desc:" <+> text msDesc
$+$ text ""
$+$ vcat (map ppSample msSamples)
where ppSample ∷ Sample → Doc
ppSample (x, y) = (text ∘ printf "%3i") x <+> char '|' <+> ppStats y
ppStats (Stats {..}) = int (V.length statsSamples)
<+> hsep (map (text ∘ printf "%13.9f")
[statsMin, statsMean2, statsMax, statsStdDev]
)
printStats ∷ [MeasurementStats] → IO ()
printStats = mapM_ (\s → do putStrLn ∘ render ∘ prettyStats $ s
putStrLn ""
)
| roelvandijk/complexity | Test/Complexity/Pretty.hs | bsd-3-clause | 1,204 | 0 | 13 | 404 | 306 | 169 | 137 | 21 | 1 |
{-# LANGUAGE ViewPatterns, GADTs, FlexibleContexts, DataKinds #-}
module Sprite.GUI where
import Graphics.UI.Gtk.OpenGL
import Graphics.UI.Gtk hiding (Point, Object)
import Graphics.Rendering.OpenGL
import Control.Concurrent.STM
import Control.Monad.Trans
import Data.List.PointedList
import Sprite.Widget (graphing)
import Sprite.Logic
import Sprite.GL
import Sprite.D2
run :: (Point -> a -> STM a)
-> (ScrollDirection -> Point -> a -> STM a)
-> (a -> IO ())
-> RenderSocket IO Input
-> RenderSocket IO Output
-> TVar (PointedList (Graph a))
-> IO ()
run setx scrollx renderx renderSI renderSO ref = do
initGUI
bootGL
window <- windowNew
onDestroy window mainQuit
set window [ containerBorderWidth := 8,
windowTitle := "tracks widget" ]
hb <- hBoxNew False 1
connects <- graphing setx scrollx renderx renderSI renderSO ref
set window [containerChild := connects]
widgetShowAll window
dat <- widgetGetDrawWindow $ window
cursorNew Tcross >>= drawWindowSetCursor dat . Just
mainGUI
| paolino/sprites | Sprite/GUI.hs | bsd-3-clause | 1,071 | 0 | 15 | 224 | 326 | 165 | 161 | 33 | 1 |
module Rules.Libffi (libffiRules, libffiDependencies) where
import Base
import Expression
import GHC
import Oracles
import Rules.Actions
import Settings.Builders.Common
import Settings.Packages.Rts
import Settings.TargetDirectory
import Settings.User
rtsBuildPath :: FilePath
rtsBuildPath = targetPath Stage1 rts -/- "build"
libffiDependencies :: [FilePath]
libffiDependencies = (rtsBuildPath -/-) <$> [ "ffi.h", "ffitarget.h" ]
libffiBuild :: FilePath
libffiBuild = "libffi/build"
libffiLibrary :: FilePath
libffiLibrary = libffiBuild -/- "inst/lib/libffi.a"
libffiMakefile :: FilePath
libffiMakefile = libffiBuild -/- "Makefile.in"
fixLibffiMakefile :: String -> String
fixLibffiMakefile = unlines . map
( replace "-MD" "-MMD"
. replace "@toolexeclibdir@" "$(libdir)"
. replace "@INSTALL@" "$(subst ../install-sh,C:/msys/home/chEEtah/ghc/install-sh,@INSTALL@)"
) . lines
target :: PartialTarget
target = PartialTarget Stage0 libffi
-- TODO: remove code duplication (see Settings/Builders/GhcCabal.hs)
configureEnvironment :: Action [CmdOption]
configureEnvironment = do
cFlags <- interpretPartial target . fromDiffExpr $ mconcat
[ cArgs
, argStagedSettingList ConfCcArgs ]
ldFlags <- interpretPartial target $ fromDiffExpr ldArgs
sequence [ builderEnv "CC" $ Gcc Stage1
, builderEnv "CXX" $ Gcc Stage1
, builderEnv "LD" Ld
, builderEnv "AR" Ar
, builderEnv "NM" Nm
, builderEnv "RANLIB" Ranlib
, return . AddEnv "CFLAGS" $ unwords cFlags ++ " -w"
, return . AddEnv "LDFLAGS" $ unwords ldFlags ++ " -w" ]
where
builderEnv var builder = do
needBuilder False builder
path <- builderPath builder
return $ AddEnv var path
configureArguments :: Action [String]
configureArguments = do
top <- topDirectory
targetPlatform <- setting TargetPlatform
return [ "--prefix=" ++ top ++ "/libffi/build/inst"
, "--libdir=" ++ top ++ "/libffi/build/inst/lib"
, "--enable-static=yes"
, "--enable-shared=no" -- TODO: add support for yes
, "--host=" ++ targetPlatform ]
libffiRules :: Rules ()
libffiRules = do
libffiDependencies &%> \_ -> do
when trackBuildSystem $ need [sourcePath -/- "Rules/Libffi.hs"]
liftIO $ removeFiles libffiBuild ["//*"]
tarballs <- getDirectoryFiles "" ["libffi-tarballs/libffi*.tar.gz"]
when (length tarballs /= 1) $
putError $ "libffiRules: exactly one libffi tarball expected"
++ "(found: " ++ show tarballs ++ ")."
need tarballs
build $ fullTarget target Tar tarballs ["libffi-tarballs"]
let libname = dropExtension . dropExtension . takeFileName $ head tarballs
moveDirectory ("libffi-tarballs" -/- libname) libffiBuild
fixFile libffiMakefile fixLibffiMakefile
forM_ ["config.guess", "config.sub"] $ \file ->
copyFile file (libffiBuild -/- file)
envs <- configureEnvironment
args <- configureArguments
runConfigure libffiBuild envs args
runMake libffiBuild ["MAKEFLAGS="]
runMake libffiBuild ["MAKEFLAGS=", "install"]
forM_ ["ffi.h", "ffitarget.h"] $ \file -> do
let src = libffiBuild -/- "inst/lib" -/- libname -/- "include" -/- file
copyFile src (rtsBuildPath -/- file)
libffiName <- rtsLibffiLibraryName
copyFile libffiLibrary (rtsBuildPath -/- "lib" ++ libffiName <.> "a")
putSuccess $ "| Successfully built custom library 'libffi'"
-- chmod +x libffi/ln
-- # wc on OS X has spaces in its output, which libffi's Makefile
-- # doesn't expect, so we tweak it to sed them out
-- mv libffi/build/Makefile libffi/build/Makefile.orig
-- sed "s#wc -w#wc -w | sed 's/ //g'#" < libffi/build/Makefile.orig > libffi/build/Makefile
| quchen/shaking-up-ghc | src/Rules/Libffi.hs | bsd-3-clause | 3,933 | 0 | 21 | 941 | 863 | 432 | 431 | 82 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.KO.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Locale
import Duckling.Numeral.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale KO Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (NumeralValue 0)
[ "0"
, "영"
, "빵"
, "공"
]
, examples (NumeralValue 1)
[ "1"
, "일"
, "하나"
, "한"
]
, examples (NumeralValue 10)
[ "10"
, "십"
, "열"
]
, examples (NumeralValue 11)
[ "11"
, "십일"
, "열하나"
, "십하나"
, "열한"
]
, examples (NumeralValue 20)
[ "20"
, "이십"
, "스물"
]
, examples (NumeralValue 35)
[ "35"
, "삼십오"
, "서른다섯"
]
, examples (NumeralValue 47)
[ "47"
, "사십칠"
, "마흔일곱"
]
, examples (NumeralValue 52)
[ "52"
, "오십이"
, "쉰둘"
, "쉰두"
]
, examples (NumeralValue 69)
[ "69"
, "육십구"
, "예순아홉"
]
, examples (NumeralValue 71)
[ "71"
, "칠십일"
, "일흔하나"
, "일흔한"
]
, examples (NumeralValue 84)
[ "84"
, "팔십사"
, "여든넷"
]
, examples (NumeralValue 93)
[ "93"
, "구십삼"
, "아흔셋"
]
, examples (NumeralValue 100)
[ "100"
, "백"
]
, examples (NumeralValue 123)
[ "123"
, "백이십삼"
]
, examples (NumeralValue 579)
[ "579"
, "오백칠십구"
]
, examples (NumeralValue 1000)
[ "1000"
, "천"
]
, examples (NumeralValue 1723)
[ "1723"
, "천칠백이십삼"
]
, examples (NumeralValue 5619)
[ "5619"
, "오천육백십구"
]
, examples (NumeralValue 10000)
[ "10000"
, "만"
, "일만"
]
, examples (NumeralValue 12345)
[ "12345"
, "만이천삼백사십오"
, "일만이천삼백사십오"
]
, examples (NumeralValue 58194)
[ "58194"
, "오만팔천백구십사"
]
, examples (NumeralValue 581900)
[ "581900"
, "오십팔만천구백"
]
, examples (NumeralValue 5819014)
[ "5819014"
, "오백팔십일만구천십사"
]
, examples (NumeralValue 58190148)
[ "58190148"
, "오천팔백십구만백사십팔"
]
, examples (NumeralValue 100000000)
[ "100000000"
, "일억"
]
, examples (NumeralValue 274500000000)
[ "274500000000"
, "이천칠백사십오억"
]
, examples (NumeralValue 100000002)
[ "100000002"
, "일억이"
]
, examples (NumeralValue 27350000)
[ "27350000"
, "이천칠백삼십오만"
]
, examples (NumeralValue 3235698120)
[ "3235698120"
, "삼십이억삼천오백육십구만팔천백이십"
]
, examples (NumeralValue 40234985729)
[ "40234985729"
, "사백이억삼천사백구십팔만오천칠백이십구"
]
, examples (NumeralValue 701239801123)
[ "701239801123"
, "칠천십이억삼천구백팔십만천백이십삼"
]
, examples (NumeralValue 3.4)
[ "3.4"
, "삼점사"
]
, examples (NumeralValue 4123.3)
[ "4123.3"
, "사천백이십삼점삼"
]
, examples (NumeralValue 1.23)
[ "일점이삼"
]
, examples (NumeralValue (-3))
[ "-3"
, "마이너스3"
, "마이너스삼"
, "마이너스 3"
, "마이나스3"
, "마이나스 3"
]
, examples (NumeralValue 0.75)
[ "3/4"
, "사분의삼"
]
]
| facebookincubator/duckling | Duckling/Numeral/KO/Corpus.hs | bsd-3-clause | 5,023 | 0 | 11 | 2,235 | 891 | 507 | 384 | 144 | 1 |
-- | Yesod.Test.Json provides convenience functions for working
-- with Test.Hspec and Network.Wai.Test on JSON data.
module Yesod.Test.Json (
testApp,
APIFunction,
assertBool,
assertString,
assertOK,
assertJSON,
Session(..),
H.Assertion,
module Test.Hspec,
module Data.Aeson,
SResponse(..)
) where
import qualified Test.HUnit as H
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.ByteString.Lazy as L
import Data.ByteString (ByteString)
import Data.Text (Text)
import Data.Aeson
import Network.HTTP.Types
import Test.Hspec
import Network.Wai
import Network.Wai.Test
import Control.Monad.IO.Class
import Yesod.Default.Config
import Data.Conduit.List
-- | A request to your server.
type APIFunction = ByteString -- ^ method
-> [Text] -- ^ path
-> Maybe Value -- JSON data
-> Session SResponse
-- Assert a boolean value
assertBool :: String -> Bool -> Session ()
assertBool s b = liftIO $ H.assertBool s b
-- Fail a test with an error string
assertString :: String -> Session ()
assertString = liftIO . H.assertString
-- Assert a 200 response code
assertOK :: SResponse -> Session ()
assertOK SResponse{simpleStatus = s, simpleBody = b} = assertBool (concat
[ "Expected status code 200, but received "
, show sc
, ". Response body: "
, show (L8.unpack b)
]) $ sc == 200
where
sc = statusCode s
-- Assert a JSON body meeting some requirement
assertJSON :: (ToJSON a, FromJSON a) => (a -> (String, Bool)) -> SResponse -> Session ()
assertJSON f SResponse{simpleBody = lbs} = do
case decode lbs of
Nothing -> assertString $ "Invalid JSON: " ++ show (L8.unpack lbs)
Just a ->
case fromJSON a of
Error s -> assertString (concat [s, "\nInput JSON: ", show a])
Success x -> uncurry assertBool (f x)
-- | Make a request to your server
apiRequest :: AppConfig env extra -> APIFunction
apiRequest conf m p x = srequest $ SRequest r (maybe L.empty encode x) where
r = defaultRequest {
serverPort = appPort conf,
requestBody = sourceList . L.toChunks $ encode x,
requestMethod = m,
pathInfo = p
}
-- | Run a test suite for your 'Application'
testApp :: Application -> AppConfig env extra ->
(((APIFunction -> Session ()) -> H.Assertion) -> Spec) -> IO ()
testApp app conf specfun = do
let apiTest f = runSession (f (apiRequest conf)) app
hspec $ (specfun apiTest)
| bogiebro/yesod-test-json | Yesod/Test/Json.hs | bsd-3-clause | 2,449 | 26 | 17 | 539 | 743 | 407 | 336 | 61 | 3 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Plots.Axis.Labels where
import Control.Lens hiding (( # ))
import Data.Default
import Data.Data
import Numeric
import Diagrams.Prelude hiding (view)
import Diagrams.TwoD.Text
import Data.Monoid.Recommend
------------------------------------------------------------------------
-- Axis labels
------------------------------------------------------------------------
-- Labels for the axis. Pretty basic right now.
data AxisLabelPosition
= MiddleAxisLabel
| LowerAxisLabel
| UpperAxisLabel
data AxisLabelPlacement
= InsideAxisLabel
| OutsideAxisLabel
-- | Function to render the axis label from a string. This is very basic
-- now and will be replace by a more sophisticated system.
type AxisLabelFunction b v n = TextAlignment n -> String -> QDiagram b v n Any
data AxisLabel b v n = AxisLabel
{ _axisLabelFunction :: AxisLabelFunction b v n
, _axisLabelText :: String
, _axisLabelStyle :: Style v n
, _axisLabelGap :: n
, _axisLabelPos :: AxisLabelPosition
, _axisLabelPlacement :: AxisLabelPlacement
}
makeLenses ''AxisLabel
instance (TypeableFloat n, Renderable (Text n) b)
=> Default (AxisLabel b V2 n) where
def = AxisLabel
{ _axisLabelFunction = mkText'
, _axisLabelText = ""
, _axisLabelStyle = mempty & recommendFontSize (output 8)
, _axisLabelGap = 20
, _axisLabelPos = MiddleAxisLabel
, _axisLabelPlacement = OutsideAxisLabel
}
type AxisLabels b v n = v (AxisLabel b v n)
------------------------------------------------------------------------
-- Tick labels
------------------------------------------------------------------------
-- Labels that are placed next to the ticks (usually) of an axis.
-- | Tick labels functions are used to draw the tick labels. They has access to
-- the major ticks and the current bounds. Returns the position of the
-- tick and label to use at that position.
type TickLabelFunction b v n
= [n] -> (n,n) -> TextAlignment n -> [(n, QDiagram b v n Any)]
data TickLabels b v n = TickLabels
{ _tickLabelFunction :: TickLabelFunction b v n
, _tickLabelStyle :: Style v n
, _tickGap :: n
} deriving Typeable
makeLenses ''TickLabels
type AxisTickLabels b v n = v (TickLabels b v n)
instance (TypeableFloat n, Renderable (Text n) b)
=> Default (TickLabels b V2 n) where
def = TickLabels
{ _tickLabelFunction = atMajorTicks label
, _tickLabelStyle = mempty & recommendFontSize (output 8)
, _tickGap = 8
}
-- | Make a 'TickLabelFunction' by specifying how to draw a single label
-- from a position on the axis.
atMajorTicks
:: (TextAlignment n -> n -> QDiagram b v n Any)
-> TickLabelFunction b v n
atMajorTicks f ticks _ a = map ((,) <*> f a) ticks
-- | Standard way to render a label by using 'Text'.
label
:: (TypeableFloat n, Renderable (Text n) b)
=> TextAlignment n
-> n
-> QDiagram b V2 n Any
label a n = mkText' a $ showFFloat (Just 2) n ""
leftLabel :: (TypeableFloat n, Renderable (Text n) b) => n -> QDiagram b V2 n Any
leftLabel n = alignedText 1 0.5 (showFFloat (Just 2) n "")
-- | Use the list of strings as the labels for the axis, starting at 0
-- and going to 1, 2, 3 ...
stringLabels :: Num n
=> (TextAlignment n -> String -> QDiagram b v n Any)
-> [String]
-> TickLabelFunction b v n
stringLabels f ls _ _ a = imap (\i l -> (fromIntegral i, f a l)) ls
-- -- horrible name
-- labelFunctionFromTicks :: (Double -> Diagram b R2) -> TickFunction -> LabelFunction b
-- labelFunctionFromTickFunction f aF bounds = map ((,) <*> f) (aF bounds)
| AjayRamanathan/plots | src/Plots/Axis/Labels.hs | bsd-3-clause | 3,989 | 0 | 11 | 927 | 850 | 478 | 372 | 73 | 1 |
module Rules.Generators.GhcVersionH (generateGhcVersionH) where
import Base
import Expression
import Oracles
import Settings.User
generateGhcVersionH :: Expr String
generateGhcVersionH = do
when trackBuildSystem . lift $
need [sourcePath -/- "Rules/Generators/GhcVersionH.hs"]
version <- getSetting ProjectVersionInt
patchLevel1 <- getSetting ProjectPatchLevel1
patchLevel2 <- getSetting ProjectPatchLevel2
return . unlines $
[ "#ifndef __GHCVERSION_H__"
, "#define __GHCVERSION_H__"
, ""
, "#ifndef __GLASGOW_HASKELL__"
, "# define __GLASGOW_HASKELL__ " ++ version
, "#endif"
, ""]
++
[ "#define __GLASGOW_HASKELL_PATCHLEVEL1__ " ++ patchLevel1 | patchLevel1 /= "" ]
++
[ "#define __GLASGOW_HASKELL_PATCHLEVEL2__ " ++ patchLevel2 | patchLevel2 /= "" ]
++
[ ""
, "#define MIN_VERSION_GLASGOW_HASKELL(ma,mi,pl1,pl2) (\\"
, " ((ma)*100+(mi)) < __GLASGOW_HASKELL__ || \\"
, " ((ma)*100+(mi)) == __GLASGOW_HASKELL__ \\"
, " && (pl1) < __GLASGOW_HASKELL_PATCHLEVEL1__ || \\"
, " ((ma)*100+(mi)) == __GLASGOW_HASKELL__ \\"
, " && (pl1) == __GLASGOW_HASKELL_PATCHLEVEL1__ \\"
, " && (pl2) <= __GLASGOW_HASKELL_PATCHLEVEL2__ )"
, ""
, "#endif /* __GHCVERSION_H__ */" ]
| quchen/shaking-up-ghc | src/Rules/Generators/GhcVersionH.hs | bsd-3-clause | 1,412 | 0 | 12 | 397 | 201 | 110 | 91 | 35 | 1 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards
, OverloadedStrings, StandaloneDeriving
, ScopedTypeVariables, CPP #-}
module Database where
import Data.Typeable
import Data.TCache.IndexQuery
import Data.TCache.DefaultPersistence
import Data.TCache.AWS
import Data.Monoid
import qualified Data.Text as T
import Data.String
import Data.ByteString.Lazy.Char8 hiding (index)
-- #define ALONE -- to execute it alone, uncomment this
#ifdef ALONE
import MFlow.Wai.Blaze.Html.All
main= do
syncWrite $ Asyncronous 120 defaultCheck 1000
index idnumber
runNavigation "" $ transientNav grid
#else
import MFlow.Wai.Blaze.Html.All hiding(select, page)
import Menu
#endif
-- to run it alone, remove Menu.hs and uncomment this:
--askm= ask
--
--main= do
-- syncWrite $ Asyncronous 120 defaultCheck 1000
-- index idnumber
-- runNavigation "" $ step database
data MyData= MyData{idnumber :: Int, textdata :: T.Text} deriving (Typeable, Read, Show) -- that is enough for file persistence
instance Indexable MyData where
key= show . idnumber -- the key of the register
domain= "mflowdemo"
instance Serializable MyData where
serialize= pack . show
deserialize= read . unpack
setPersist = const . Just $ amazonS3Persist domain -- False
data Options= NewText | Exit deriving (Show, Typeable)
database= do
liftIO $ index idnumber
database'
database'= do
all <- allTexts
r <- page $ listtexts all
case r of
NewText -> do
text <- page $ p "Insert the text"
++> htmlEdit ["bold","italic"] "" -- rich text editor with bold and italic buttons
(getMultilineText "" <! [("rows","3"),("cols","80")]) <++ br
<** submitButton "enter"
addtext all text -- store the name in the cache (later will be written to disk automatically)
database'
Exit -> return ()
where
menu= wlink NewText << p "enter a new text" <|>
wlink Exit << p "exit to the home page"
listtexts all = do
h3 "list of all texts"
++> mconcat[p $ preEscapedToHtml t >> hr | t <- all]
++> menu
<++ b "or press the back button or enter the URL any other page in the web site"
addtext all text= liftIO . atomically . newDBRef $ MyData (Prelude.length all) text
allTexts= liftIO . atomically . select textdata $ idnumber .>=. (0 :: Int)
| agocorona/MFlow | Demos/Database.hs | bsd-3-clause | 2,521 | 0 | 20 | 689 | 543 | 290 | 253 | 47 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Sock.Handler.Common
where
------------------------------------------------------------------------------
import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
import Control.Concurrent.STM.TMChan
import Control.Concurrent.STM.TMChan.Extra
import Control.Monad
import Control.Monad.Trans (MonadIO, liftIO, lift)
import Control.Monad.Trans.Resource.Extra as R
------------------------------------------------------------------------------
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import qualified Data.Conduit as C
import Data.Proxy
import Data.Int (Int64)
------------------------------------------------------------------------------
import qualified Network.HTTP.Types as H (Status)
import qualified Network.HTTP.Types.Request as H
import qualified Network.HTTP.Types.Response as H
import qualified Network.HTTP.Types.Extra as H
------------------------------------------------------------------------------
import qualified Blaze.ByteString.Builder as B
------------------------------------------------------------------------------
import Network.Sock.Frame
import Network.Sock.Request
import Network.Sock.Session
import Network.Sock.Types.Protocol
import Network.Sock.Types.Handler
------------------------------------------------------------------------------
-- | Ideally we would C.register putMvar and put different value there depending on context,
-- but currently registert does not support registering function of the form (a -> IO ()), but it should be
-- quite an easy fix.
-- | The default Source for polling transports.
pollingSource :: Handler tag
=> Proxy tag -- ^ Handler tag.
-> Request -- ^ Request we are responsing to.
-> Session -- ^ Associated session.
-> C.Source (C.ResourceT IO) (C.Flush B.Builder)
pollingSource tag req ses = do
status <- tryTakeMVar $! sessionStatus ses
case status of
Just (SessionFresh) -> addCleanup ses $ \key ->
initialize >> yieldAndFlush (format' FrameOpen) >> releaseOpened key
Just (SessionOpened) -> addCleanup ses $ \key ->
liftSTM (getTMChanContents $ sessionOutgoingBuffer ses) >>= loop key id
Just (SessionClosed code reason) -> addCleanup ses $ \key ->
yieldAndFlush (format' $ FrameClose code reason) >> releaseClosed key
Nothing -> yieldAndFlush $ format' $ FrameClose 2010 "Another connection still open"
where
loop key front [] = yieldAndFlush (format' $ FrameMessages $ front []) >> releaseOpened key
loop key front (x:xs) =
case x of
Message s -> loop key (front . (s:)) xs
Raw s -> yieldAndFlush s >> releaseOpened key
Control Close -> do
-- yieldAndFlush (format' $ FrameMessages $ front []) -- ^ We mimic the way sockjs-node behaves.
yieldAndFlush (format' $ FrameClose 3000 "Go away!")
finalize >> releaseClosed key
format' = format tag req
initialize = lift $! initializeSession ses $! requestApplication req
finalize = lift $! finalizeSession ses
{-# INLINE pollingSource #-}
-- | The default Source for streaming transports.
streamingSource :: Handler tag
=> Proxy tag -- ^ Handler tag.
-> Request -- ^ Request we are responsing to.
-> Int64 -- ^ Maximum amount of bytes to be transfered (we can exceed the maximum if the last message is long, but the loop will stop).
-> C.Source (C.ResourceT IO) (C.Flush B.Builder) -- ^ Prelude sent before any other data.
-> Session -- ^ Associated session.
-> C.Source (C.ResourceT IO) (C.Flush B.Builder)
streamingSource tag req limit prelude ses = do
status <- tryTakeMVar $! sessionStatus ses
case status of
Just (SessionFresh) -> addCleanup ses $ \key ->
initialize >> prelude >> yieldOpenFrame >> loop key 0
Just (SessionOpened) -> addCleanup ses $ \key ->
prelude >> yieldOpenFrame >> loop key 0
Just (SessionClosed code reason) -> addCleanup ses $ \key ->
prelude >> yieldAndFlush (format' $ FrameClose code reason) >> releaseClosed key
Nothing -> prelude >> yieldAndFlush (format' $ FrameClose 2010 "Another connection still open")
where
loop key n = liftSTM (readTMChan $ sessionOutgoingBuffer ses) >>=
maybe (return ())
(\x -> do
case x of
Message s -> do
let load = format' (FrameMessages [s])
let newn = n + BL.length load
yieldAndFlush load
if newn < limit
then loop key newn
else releaseOpened key
Raw s -> do
let load = s
let newn = n + BL.length load
yieldAndFlush load
if newn < limit
then loop key newn
else releaseOpened key
Control Close -> do
yieldAndFlush (format' $ FrameClose 3000 "Go away!")
finalize >> releaseClosed key
)
format' = format tag req
initialize = lift $! initializeSession ses $! requestApplication req
finalize = lift $! finalizeSession ses
yieldOpenFrame = yieldAndFlush $ format' FrameOpen
{-# INLINE streamingSource #-}
------------------------------------------------------------------------------
-- | Common utility functions
addCleanup :: Session
-> (ReleaseKeyF SessionStatus -> C.Source (C.ResourceT IO) (C.Flush B.Builder))
-> C.Source (C.ResourceT IO) (C.Flush B.Builder)
addCleanup ses fsrc = do
key <- registerPutStatus ses
C.addCleanup (flip unless $! R.releaseF key $! SessionClosed 1002 "Connection interrupted")
(fsrc key)
registerPutStatus :: Session -> C.Pipe l i o u (C.ResourceT IO) (ReleaseKeyF SessionStatus)
registerPutStatus ses =
lift $! R.registerF (void . tryPutMVar (sessionStatus ses))
(SessionClosed 1002 "Connection interrupted")
{-# INLINE registerPutStatus #-}
releaseClosed :: ReleaseKeyF SessionStatus -> C.Pipe l i o u (C.ResourceT IO) ()
releaseClosed key = lift $! R.releaseF key $! SessionClosed 3000 "Go away!"
{-# INLINE releaseClosed #-}
releaseOpened :: ReleaseKeyF SessionStatus -> C.Pipe l i o u (C.ResourceT IO) ()
releaseOpened key = lift $! R.releaseF key $! SessionOpened
{-# INLINE releaseOpened #-}
liftSTM :: MonadIO m => STM a -> m a
liftSTM = liftIO . atomically
{-# INLINE liftSTM #-}
------------------------------------------------------------------------------
-- | Used as a response to http://example.com/<application_prefix>/<server_id>/<session_id>/<transport>
--
-- Documentation: http://sockjs.github.com/sockjs-protocol/sockjs-protocol-0.3.html#section-7
-- TODO: Put somewhere else.
responseOptions :: (H.IsResponse res, H.IsRequest req)
=> [BS.ByteString]
-> req
-> res
responseOptions methods req = H.response204 headers ""
where headers = [("Access-Control-Allow-Methods", BS.intercalate ", " methods)]
++ H.headerCached
++ H.headerCORS "*" req
{-# INLINE responseOptions #-}
respondSource :: (H.IsResponse res, Handler tag)
=> Proxy tag
-> Request
-> H.Status
-> C.Source (C.ResourceT IO) (C.Flush B.Builder)
-> res
respondSource tag req status source = H.responseSource status (headers tag req) source
{-# INLINE respondSource #-}
respondFrame :: (H.IsResponse res, Handler tag)
=> Proxy tag
-> Request
-> H.Status
-> Frame
-> res
respondFrame tag req st fr = respondLBS tag req st (format tag req fr)
{-# INLINE respondFrame #-}
respondLBS :: (H.IsResponse res, Handler tag)
=> Proxy tag
-> Request
-> H.Status
-> BL.ByteString
-> res
respondLBS tag req status body = H.responseLBS status (headers tag req) body
{-# INLINE respondLBS #-}
-- | Yields a Chunk (a ByteString) and then Flushes.
yieldAndFlush :: Monad m => BL.ByteString -> C.Pipe l i (C.Flush B.Builder) u m ()
yieldAndFlush load = C.yield (C.Chunk $ B.fromLazyByteString load) >> C.yield C.Flush
{-# INLINE yieldAndFlush #-}
| Palmik/wai-sockjs | src/Network/Sock/Handler/Common.hs | bsd-3-clause | 9,309 | 0 | 24 | 2,940 | 2,031 | 1,044 | 987 | 152 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Definition where
import qualified Data.Text as T
import Language
data Definition = Definition
{ word :: T.Text
, sourceLang :: Language
, definitionLang :: Language
, partOfSpeechList :: [(T.Text, [T.Text])]
}
showPartOfSpeech :: (T.Text, [T.Text]) -> T.Text
showPartOfSpeech (pos, ds) = T.concat [ pos
, "\n"
, T.intercalate "\n" ds
, "\n"
]
prettyPrintDefinition :: Definition -> T.Text
prettyPrintDefinition def =
let ps = partOfSpeechList def
in T.concat [ "----------------------------\n"
, word def
, "\n\n"
, T.concat $ map showPartOfSpeech ps
, "----------------------------"
]
| aupiff/def | src/Definition.hs | bsd-3-clause | 963 | 0 | 12 | 424 | 196 | 113 | 83 | 22 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module PeerTrader where
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM (atomically, dupTChan,
readTChan, writeTChan)
import Control.Exception (SomeException)
import Control.Exception.Enclosed (catchAny)
import Control.Monad.Reader
import Data.Maybe (isNothing)
import Data.Traversable as T
import Database.Groundhog.Postgresql (runDbConn)
import Logging
import NoteScript
import P2PPicks.Keys
import P2PPicks.Types
import Prosper
import Prosper.Monad
import PeerTrader.Account
import PeerTrader.Investment.Database
import PeerTrader.Ops
import PeerTrader.P2PPicks.Account
import PeerTrader.StrategyType
import PeerTrader.Types (UserLogin)
-- | Given 'Prosper' actions per 'Listing', read from the 'Listing' 'TChan'
-- and automatically invest based upon those Listings.
--
-- Write all 'InvestResponse' to the database
processListings
:: UserLogin
-> StrategyType
-> (Listing -> Prosper (Maybe (Async InvestResponse)))
-> OpsReader (Maybe (ThreadId, ThreadId))
processListings n stratType s =
getAccountVar n prosperChans >>= setUpChans
where
processingQueue AutoFilterStrategy = do
ps <- asks _prosperState
return $ prosperProcessingQueue ps
processingQueue (P2PPicksStrategy ProfitMax) = asks _p2pProfitMaxResults
processingQueue (P2PPicksStrategy LossMin) = asks _p2pLossMinResults
setUpChans (Just (ProsperChans (Just pq) i r)) = initializeThreads pq i r
setUpChans (Just (ProsperChans Nothing i r)) = do
-- Duplicate the read listings TChan
pq <- processingQueue stratType
pq' <- (liftIO . atomically . dupTChan) pq
putAccountVar n prosperChans $ ProsperChans (Just pq') i r
initializeThreads pq' i r
setUpChans Nothing =
warningM (show n) "Could not retrieve Prosper Chans!" >>
return Nothing
initializeThreads pq is readInvests = do
responseThread <- do
app <- ask
liftIO . forkIO . forever $ do
investResp <- atomically $ readTChan readInvests
runDbConn (insertResponse n stratType investResp) app
strategyTypeResponse app stratType investResp
scriptThread <- evalProsper . forkP . forever $ do
ls <- liftIO . atomically $ readTChan pq
forkP $ do
mResp <- s ls
maybe (return ()) (liftIO . atomically . writeTChan is <=< waitProsper) mResp
return $ Just (scriptThread, responseThread)
strategyTypeResponse app (P2PPicksStrategy p2pType)
ir@(InvestResponse { investStatus = Success }) =
flip runReaderT app $ do
keys <- asks _p2ppicksKeys
Just p2pacct <- runDbConn (getP2PPicksAccount n) app
let lid = investListingId ir
amt = requestedAmount ir
sid = subscriberId p2pacct
-- TODO Handle failed report to P2P Picks
reportResult <- flip runReaderT keys $ reportInvestment sid p2pType lid amt
debugM "P2PPicks" $ "Sending report to P2PPicks " ++ show reportResult
strategyTypeResponse _ _ _ = return ()
-- | Interpret a NoteScript into Prosper commands
startProsperScript
:: UserLogin
-> StrategyType
-> ProsperScript (Maybe (Async InvestResponse))
-> OpsReader (Maybe (ThreadId, ThreadId))
startProsperScript n stratType pscript = do
tid <- join <$> getAccountVar n prosperScriptThread
ui <- getAccountVar n prosperUser
-- Kill current NoteScript
_ <- T.mapM (liftIO . (\(t1, t2) -> killThread t1 >> killThread t2)) tid
case ui of
Just i -> do
let lg = show n
debugM lg $ "Setting prosper script for user: " ++ show n
newTid <- processListings n stratType (\l -> prosperScript i l pscript)
when (isNothing newTid) $ warningM lg "Unable to set new thread..."
putAccountVar n prosperScriptThread newTid
return newTid
_ -> warningM (show n) "Could not find prosper user." >> return Nothing
-- | Kill the strategy thread. Stop automatically investing.
killProsper :: UserLogin -> OpsReader ()
killProsper n = do
tid <- join <$> getAccountVar n prosperScriptThread
stateTid <- join <$> getAccountVar n prosperInvestThread
_ <- T.mapM (liftIO . (\(t1, t2) -> killThread t1 >> killThread t2)) tid
_ <- T.mapM (liftIO . killThread) stateTid
putAccountVar n prosperScriptThread Nothing
putAccountVar n (prosperChans.readListings) Nothing
putAccountVar n prosperInvestThread Nothing
putAccountVar n prosperInvestState Nothing
initializeProsper' :: IO ProsperState
initializeProsper' = do
ps <- initializeProsper "prosper.cfg"
_ <- runProsper ps updateAllListings -- Synchronize
_ <- forkIO $ updateListingsLoop ps
return ps
where
-- TODO Do something with the retry package here. Need to retry at a non-constant rate.
updateListingsLoop :: ProsperState -> IO ()
updateListingsLoop ps = forever $
runProsper ps updateAllListings `catchAny` statusExceptionHandler
statusExceptionHandler :: SomeException -> IO ()
statusExceptionHandler e = debugM "MarketData" $
"Caught some exception... " ++ show e ++ " Continuing..."
-- | Initialize logs and initialize ProsperState
--
-- ProsperState reads configuration from prosper.cfg. This involves initializing
-- Listings poll thread
initializePeerTrader :: MonadIO m => m ProsperState
initializePeerTrader = liftIO $ do
initializeLogs
debugM "Prosper" "Initializing Prosper..."
initializeProsper'
| WraithM/peertrader-backend | src/PeerTrader.hs | bsd-3-clause | 5,958 | 0 | 20 | 1,621 | 1,412 | 691 | 721 | -1 | -1 |
module NAA.Loop (runNoughtsAndArrs) where
import NAA.AI
import NAA.Logic
import NAA.State
import NAA.Data hiding (player)
import NAA.Interface
import NAA.Interface.CLI
import Control.Monad.Trans
import Control.Monad.State.Lazy
-- So not only do we need to keep running the GameState over IO but we probably
-- want some scene state information.
type NoughtsAndArrs a = StateT GameState IO a
-- We start a game, loop as necessary, and end a game.
-- The end of the game is handled by the loop!
runNoughtsAndArrs :: UserInterface -> NoughtsAndArrs ()
runNoughtsAndArrs ui = do
gs <- get
liftIO $ onGameStart ui gs
runGameLoop ui -- >> onGameEnd ui
-- | Loop maintains the liftime of a single game. It carries out a game with
-- they help of an Interface.
--
-- 1) onRetrieveMove. This is a blocking call. It only returns when the player has
-- chosen a move to make. It can be an invalid one, and no input validation
-- is expected.
-- It is the Interface's responsibility to show the game state in a way that
-- a human being can make an informed decision about what
-- move to take. It is also the Interface's responsibility to deal with the
-- necessary IO to retrieve the move's coordinates.
--
-- 2) onInvalidMove. This is a non-blocking call. This notifies the interface
-- that the move given by onRetrieveMove was found to be not valid for the
-- current GameState. This permits the error to be fed back to the player in
-- some form. onRetrieveMove is invoked again.
--
-- 3) onPlayerMove. This is a non-blocking call. This is invoked when a player
-- has made a valid move, and is an opportunity for the Interface to update
-- itself.
--
-- 4) onGameEnd. This is a non-blocking call. This is invoked when the game has
-- reached an end state: either draw, or win.
--
runGameLoop :: UserInterface -> NoughtsAndArrs ()
runGameLoop ui = do
-- Get the current game state and prompt the user to make a move
-----------------------------------------------------------------
gs@GameState{human=plyr,computer=comp} <- get
gs' <- withGameState gs $ \gs -> do
let bs@(BoardState {turn=currentTurn}) = boardState gs
let retrieveMove = if plyr == currentTurn
then retrieveMoveFromPlayer plyr
else unbeatableAI
move <- retrieveMove gs -- obtain a move from the AI or player
-- Compute the upated game state
---------------------------------
let bs' = bs `apply` move
let gs' = gs {boardState=bs'}
onPlayerMove ui move gs'
return gs'
put gs'
-- Here, we decide how to continue after the move has been made.
-- If it yielded an end-game result then we will notify the interface
-- Otherwise we continue looping, running the Noughts and Arrs game.
-----------------------------------------------------------------
let BoardState {board=brd} = boardState gs'
case judge brd of
Just result -> liftIO $ onGameEnd ui gs' result -- when the game ends with result
Nothing -> runGameLoop ui -- neither won or lost. Continue.
where
withGameState gs mf = liftIO (mf gs)
retrieveMoveFromPlayer :: Player -> GameState -> IO Move
retrieveMoveFromPlayer plyr gs = msum . repeat $ do
m <- onRetrieveMove ui plyr gs
validate m gs
return m
validate :: Move -> GameState -> IO (Idx2D)
validate m (GameState {boardState=bs})
| m `isValidIn` bs = return $ snd m
| otherwise = onInvalidMove ui m >> mzero
| fatuhoku/haskell-noughts-and-crosses | src/NAA/Loop.hs | bsd-3-clause | 3,522 | 0 | 19 | 790 | 557 | 300 | 257 | 43 | 3 |
module Math.Misc.Permutation where
import Math.Misc.Parity
import Misc.Misc
import Math.Algebra.Monoid
-- | A permutation is represented as a pair of lists. The condition
-- that the two lists contain precisely the same elements is /not
-- checked/.
type Permutation a = ([a], [a])
-- | Calculate the parity of a given permutation. The parity is 'Even'
-- if the permutation can be decomposed into an even number of
-- transpositions, and 'Odd' otherwise.
parity :: (Eq a) => Permutation a -> Parity
parity = go Even
where
go p (u, v)
| u == v = p
| otherwise = go (p <*> fromLength v1) (tail u, v1 ++ tail' v2)
where
pivot = head u
(v1, v2) = break (== pivot) v | michiexile/hplex | pershom/src/Math/Misc/Permutation.hs | bsd-3-clause | 748 | 0 | 11 | 207 | 177 | 100 | 77 | 12 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module Himpress.Plumbing where
import qualified Himpress.Format as F
import Paths_himpress
import System.INotify
import System.FilePath (takeExtension)
import Control.Concurrent (threadDelay)
import Control.Arrow ((&&&),(***))
import Control.Monad (mapM_)
import Prelude hiding (writeFile,readFile,putStrLn,getContents)
import Data.Text.IO (readFile,getContents)
import Data.Text.Lazy.IO (writeFile,putStrLn)
import Data.Text (pack,Text)
import System.Console.CmdArgs
data Opts = Opts {
size::(Int,Int) -- "window" size for calculating presentation coordinates
, input::String -- Input file - defaults to stdin
, output::String -- Output file - defaults to stdout
, title::String -- Presentation title
, include::String -- CSS and JS files to directly include in the presentation.
-- CSS is placed in the head, JS as the last elements of the body.
, link::String -- CSS and JS files to link to
, watch::Bool -- If set, himpress watches the input file and includes using inotify, and recompiles on changes.
, nodefault::Bool -- If set, doesn't include the default CSS and JS files.
, bare::Bool -- If set, himpress will only generate the markup for slides, without a full document.
} deriving (Show,Data,Typeable)
opts = Opts {
size = (1000,1000) &= help "Default size (in pixels) for each step"
, input = "" &= typFile
, output = "" &= typFile
, title = "" &= help "Presentation title"
, include = "" &= typ listType &= help "CSS and JS files to include in the presentation - path from current directory."
, link = "" &= typ listType &= help "CSS and JS files to link to from the presentation"
, bare = False &= help "Only generate markup for slides, and not the full document"
, nodefault = False &= help "Don't include the default CSS and JS"
, watch = False &= help "Recompile presentation on changes"
} &= summary "himpress v1.0, (c) Matthew Sorensen 2012"
listType="<space-separated list of paths>"
--- Overall interface this file spits out:
format::Opts->IO (IO F.Format)
format o = fmap (dynamicOptions $ staticOptions o) $ includes o
dependencies::Opts->[FilePath]
dependencies o = if null $ input o
then words $ include o
else input o : words (include o)
watchFiles::[FilePath]->IO () ->IO ()
watchFiles files act = withINotify $ \inot -> mapM_ (watch inot) files >> loop
where watch inot f = addWatch inot [CloseWrite] f $ const act
loop = threadDelay 1000000 >> loop
-- The assumption is made that you're an idiot (read: "I'm lazy") if this tool is
-- set up to watch a file that it potentially modifies.
outputStream o | output o == "" = putStrLn
| otherwise = writeFile $ output o
inputStream::Opts->IO Text
inputStream o = case (input o,watch o) of
("",True) -> error "An input file must be provided if --watch is used."
("",False)-> getContents
(f,_) -> readFile f
splitCssJs = (hasExt ".css" &&& hasExt ".js") . words
where hasExt e = filter $ (e==) . takeExtension
staticOptions o = let (css,js) = splitCssJs $ link o
plinks = map $ F.Link . pack
in F.Format {
F.meta = []
, F.title = pack $ title o
, F.bare = bare o
, F.scripts = plinks js
, F.style = plinks css
}
dynamicOptions::F.Format->([FilePath],[FilePath])->IO F.Format
dynamicOptions base (css,js) = do
css <- mapM readFile css
js <- mapM readFile js
return base { F.scripts = F.scripts base ++ map F.Inline js ,
F.style = F.style base ++ map F.Inline css}
includes::Opts->IO ([FilePath],[FilePath])
includes o
| nodefault o = return $ splitCssJs $ include o
| otherwise = do
imp <- getDataFileName "js/impress.min.js"
start <- getDataFileName "js/start.js"
sty <- getDataFileName "css/style.min.css"
return $ ((sty:) *** ([imp,start]++)) $ splitCssJs $ include o
| matthewSorensen/himpress | Himpress/Plumbing.hs | bsd-3-clause | 4,258 | 0 | 14 | 1,151 | 1,117 | 604 | 513 | 78 | 3 |
module Pos.Chain.Ssc.Error
( module Pos.Chain.Ssc.Error.Seed
, module Pos.Chain.Ssc.Error.Verify
) where
import Pos.Chain.Ssc.Error.Seed
import Pos.Chain.Ssc.Error.Verify
| input-output-hk/pos-haskell-prototype | chain/src/Pos/Chain/Ssc/Error.hs | mit | 213 | 0 | 5 | 56 | 44 | 33 | 11 | 5 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./CSMOF/As.hs
Description : abstract CSMOF syntax
Copyright : (c) Daniel Calegari Universidad de la Republica, Uruguay 2013
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module CSMOF.As where
import Common.Id
import Data.Data
-- Simplified MOF Metamodel
data Metamodel = Metamodel
{ metamodelName :: String
, element :: [NamedElement]
, model :: [Model]
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Metamodel where
getRange _ = nullRange
rangeSpan _ = []
data NamedElement = NamedElement
{ namedElementName :: String
, namedElementOwner :: Metamodel
, namedElementSubClasses :: TypeOrTypedElement
} deriving (Eq, Ord, Typeable, Data)
instance GetRange NamedElement where
getRange _ = nullRange
rangeSpan _ = []
data TypeOrTypedElement = TType { getType :: Type }
| TTypedElement { getTypeElement :: TypedElement }
deriving (Eq, Ord, Typeable, Data)
instance GetRange TypeOrTypedElement where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class TypeOrTypedElement and make super of type NamedElement
data Type = Type { typeSuper :: NamedElement
, typeSubClasses :: DataTypeOrClass
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Type where
getRange _ = nullRange
rangeSpan _ = []
data DataTypeOrClass = DDataType { getDataType :: Datatype }
| DClass { getClass :: Class }
deriving (Eq, Ord, Typeable, Data)
instance GetRange DataTypeOrClass where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class DataTypeOrClass and make super of type Type
data Datatype = Datatype { classSuper :: Type } deriving (Eq, Ord, Typeable, Data)
instance GetRange Datatype where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class DataTypeOrClass and make super of type Type
data Class = Class
{ classSuperType :: Type
, isAbstract :: Bool
, superClass :: [Class]
, ownedAttribute :: [Property]
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Class where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class TypeOrTypedElement and make super of type NamedElement
data TypedElement = TypedElement
{ typedElementSuper :: NamedElement
, typedElementType :: Type
, typedElementSubClasses :: Property
} deriving (Eq, Ord, Typeable, Data)
instance GetRange TypedElement where
getRange _ = nullRange
rangeSpan _ = []
data Property = Property
{ propertySuper :: TypedElement
, multiplicityElement :: MultiplicityElement
, opposite :: Maybe Property
, propertyClass :: Class
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Property where
getRange _ = nullRange
rangeSpan _ = []
data MultiplicityElement = MultiplicityElement
{ lower :: Integer
, upper :: Integer
, multiplicityElementSubClasses :: Property
} deriving (Eq, Ord, Typeable, Data)
instance GetRange MultiplicityElement where
getRange _ = nullRange
rangeSpan _ = []
-- Model part of CSMOF
data Model = Model
{ modelName :: String
, object :: [Object]
, link :: [Link]
, modelType :: Metamodel
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Model where
getRange _ = nullRange
rangeSpan _ = []
data Object = Object
{ objectName :: String
, objectType :: Type
, objectOwner :: Model
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Object where
getRange _ = nullRange
rangeSpan _ = []
data Link = Link
{ linkType :: Property
, source :: Object
, target :: Object
, linkOwner :: Model
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Link where
getRange _ = nullRange
rangeSpan _ = []
| spechub/Hets | CSMOF/As.hs | gpl-2.0 | 4,559 | 0 | 9 | 1,448 | 985 | 553 | 432 | 102 | 0 |
{-| Implementation of the generic daemon functionality.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Daemon
( DaemonOptions(..)
, OptType
, CheckFn
, PrepFn
, MainFn
, defaultOptions
, oShowHelp
, oShowVer
, oNoDaemonize
, oNoUserChecks
, oDebug
, oPort
, oBindAddress
, oSyslogUsage
, oForceNode
, oNoVoting
, oYesDoIt
, parseArgs
, parseAddress
, cleanupSocket
, describeError
, genericMain
, getFQDN
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Maybe (fromMaybe, listToMaybe)
import Text.Printf
import Data.Word
import GHC.IO.Handle (hDuplicateTo)
import Network.BSD (getHostName)
import qualified Network.Socket as Socket
import System.Console.GetOpt
import System.Directory
import System.Exit
import System.Environment
import System.IO
import System.IO.Error (isDoesNotExistError, modifyIOError, annotateIOError)
import System.Posix.Directory
import System.Posix.Files
import System.Posix.IO
import System.Posix.Process
import System.Posix.Types
import System.Posix.Signals
import Ganeti.Common as Common
import Ganeti.Logging
import Ganeti.Runtime
import Ganeti.BasicTypes
import Ganeti.Utils
import qualified Ganeti.Constants as C
import qualified Ganeti.Ssconf as Ssconf
-- * Constants
-- | \/dev\/null path.
devNull :: FilePath
devNull = "/dev/null"
-- | Error message prefix, used in two separate paths (when forking
-- and when not).
daemonStartupErr :: String -> String
daemonStartupErr = ("Error when starting the daemon process: " ++)
-- * Data types
-- | Command line options structure.
data DaemonOptions = DaemonOptions
{ optShowHelp :: Bool -- ^ Just show the help
, optShowVer :: Bool -- ^ Just show the program version
, optShowComp :: Bool -- ^ Just show the completion info
, optDaemonize :: Bool -- ^ Whether to daemonize or not
, optPort :: Maybe Word16 -- ^ Override for the network port
, optDebug :: Bool -- ^ Enable debug messages
, optNoUserChecks :: Bool -- ^ Ignore user checks
, optBindAddress :: Maybe String -- ^ Override for the bind address
, optSyslogUsage :: Maybe SyslogUsage -- ^ Override for Syslog usage
, optForceNode :: Bool -- ^ Ignore node checks
, optNoVoting :: Bool -- ^ skip voting for master
, optYesDoIt :: Bool -- ^ force dangerous options
}
-- | Default values for the command line options.
defaultOptions :: DaemonOptions
defaultOptions = DaemonOptions
{ optShowHelp = False
, optShowVer = False
, optShowComp = False
, optDaemonize = True
, optPort = Nothing
, optDebug = False
, optNoUserChecks = False
, optBindAddress = Nothing
, optSyslogUsage = Nothing
, optForceNode = False
, optNoVoting = False
, optYesDoIt = False
}
instance StandardOptions DaemonOptions where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- | Abrreviation for the option type.
type OptType = GenericOptType DaemonOptions
-- | Check function type.
type CheckFn a = DaemonOptions -> IO (Either ExitCode a)
-- | Prepare function type.
type PrepFn a b = DaemonOptions -> a -> IO b
-- | Main execution function type.
type MainFn a b = DaemonOptions -> a -> b -> IO ()
-- * Command line options
oNoDaemonize :: OptType
oNoDaemonize =
(Option "f" ["foreground"]
(NoArg (\ opts -> Ok opts { optDaemonize = False }))
"Don't detach from the current terminal",
OptComplNone)
oDebug :: OptType
oDebug =
(Option "d" ["debug"]
(NoArg (\ opts -> Ok opts { optDebug = True }))
"Enable debug messages",
OptComplNone)
oNoUserChecks :: OptType
oNoUserChecks =
(Option "" ["no-user-checks"]
(NoArg (\ opts -> Ok opts { optNoUserChecks = True }))
"Ignore user checks",
OptComplNone)
oPort :: Int -> OptType
oPort def =
(Option "p" ["port"]
(reqWithConversion (tryRead "reading port")
(\port opts -> Ok opts { optPort = Just port }) "PORT")
("Network port (default: " ++ show def ++ ")"),
OptComplInteger)
oBindAddress :: OptType
oBindAddress =
(Option "b" ["bind"]
(ReqArg (\addr opts -> Ok opts { optBindAddress = Just addr })
"ADDR")
"Bind address (default depends on cluster configuration)",
OptComplInetAddr)
oSyslogUsage :: OptType
oSyslogUsage =
(Option "" ["syslog"]
(reqWithConversion syslogUsageFromRaw
(\su opts -> Ok opts { optSyslogUsage = Just su })
"SYSLOG")
("Enable logging to syslog (except debug \
\messages); one of 'no', 'yes' or 'only' [" ++ C.syslogUsage ++
"]"),
OptComplChoices ["yes", "no", "only"])
oForceNode :: OptType
oForceNode =
(Option "" ["force-node"]
(NoArg (\ opts -> Ok opts { optForceNode = True }))
"Force the daemon to run on a different node than the master",
OptComplNone)
oNoVoting :: OptType
oNoVoting =
(Option "" ["no-voting"]
(NoArg (\ opts -> Ok opts { optNoVoting = True }))
"Skip node agreement check (dangerous)",
OptComplNone)
oYesDoIt :: OptType
oYesDoIt =
(Option "" ["yes-do-it"]
(NoArg (\ opts -> Ok opts { optYesDoIt = True }))
"Force a dangerous operation",
OptComplNone)
-- | Generic options.
genericOpts :: [OptType]
genericOpts = [ oShowHelp
, oShowVer
, oShowComp
]
-- | Annotates and transforms IOErrors into a Result type. This can be
-- used in the error handler argument to 'catch', for example.
ioErrorToResult :: String -> IOError -> IO (Result a)
ioErrorToResult description exc =
return . Bad $ description ++ ": " ++ show exc
-- | Small wrapper over getArgs and 'parseOpts'.
parseArgs :: String -> [OptType] -> IO (DaemonOptions, [String])
parseArgs cmd options = do
cmd_args <- getArgs
parseOpts defaultOptions cmd_args cmd (options ++ genericOpts) []
-- * Daemon-related functions
-- | PID file mode.
pidFileMode :: FileMode
pidFileMode = unionFileModes ownerReadMode ownerWriteMode
-- | PID file open flags.
pidFileFlags :: OpenFileFlags
pidFileFlags = defaultFileFlags { noctty = True, trunc = False }
-- | Writes a PID file and locks it.
writePidFile :: FilePath -> IO Fd
writePidFile path = do
fd <- openFd path ReadWrite (Just pidFileMode) pidFileFlags
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
my_pid <- getProcessID
_ <- fdWrite fd (show my_pid ++ "\n")
return fd
-- | Helper function to ensure a socket doesn't exist. Should only be
-- called once we have locked the pid file successfully.
cleanupSocket :: FilePath -> IO ()
cleanupSocket socketPath =
catchJust (guard . isDoesNotExistError) (removeLink socketPath)
(const $ return ())
-- | Sets up a daemon's environment.
setupDaemonEnv :: FilePath -> FileMode -> IO ()
setupDaemonEnv cwd umask = do
changeWorkingDirectory cwd
_ <- setFileCreationMask umask
_ <- createSession
return ()
-- | Cleanup function, performing all the operations that need to be done prior
-- to shutting down a daemon.
finalCleanup :: FilePath -> IO ()
finalCleanup = removeFile
-- | Signal handler for the termination signal.
handleSigTerm :: ThreadId -> IO ()
handleSigTerm mainTID =
-- Throw termination exception to the main thread, so that the daemon is
-- actually stopped in the proper way, executing all the functions waiting on
-- "finally" statement.
Control.Exception.throwTo mainTID ExitSuccess
-- | Signal handler for reopening log files.
handleSigHup :: FilePath -> IO ()
handleSigHup path = do
setupDaemonFDs (Just path)
logInfo "Reopening log files after receiving SIGHUP"
-- | Sets up a daemon's standard file descriptors.
setupDaemonFDs :: Maybe FilePath -> IO ()
setupDaemonFDs logfile = do
null_in_handle <- openFile devNull ReadMode
null_out_handle <- openFile (fromMaybe devNull logfile) AppendMode
hDuplicateTo null_in_handle stdin
hDuplicateTo null_out_handle stdout
hDuplicateTo null_out_handle stderr
hClose null_in_handle
hClose null_out_handle
-- | Computes the default bind address for a given family.
defaultBindAddr :: Int -- ^ The port we want
-> Socket.Family -- ^ The cluster IP family
-> Result (Socket.Family, Socket.SockAddr)
defaultBindAddr port Socket.AF_INET =
Ok (Socket.AF_INET,
Socket.SockAddrInet (fromIntegral port) Socket.iNADDR_ANY)
defaultBindAddr port Socket.AF_INET6 =
Ok (Socket.AF_INET6,
Socket.SockAddrInet6 (fromIntegral port) 0 Socket.iN6ADDR_ANY 0)
defaultBindAddr _ fam = Bad $ "Unsupported address family: " ++ show fam
-- | Based on the options, compute the socket address to use for the
-- daemon.
parseAddress :: DaemonOptions -- ^ Command line options
-> Int -- ^ Default port for this daemon
-> IO (Result (Socket.Family, Socket.SockAddr))
parseAddress opts defport = do
let port = maybe defport fromIntegral $ optPort opts
def_family <- Ssconf.getPrimaryIPFamily Nothing
case optBindAddress opts of
Nothing -> return (def_family >>= defaultBindAddr port)
Just saddr -> Control.Exception.catch
(resolveAddr port saddr)
(ioErrorToResult $ "Invalid address " ++ saddr)
-- | Environment variable to override the assumed host name of the
-- current node.
vClusterHostNameEnvVar :: String
vClusterHostNameEnvVar = "GANETI_HOSTNAME"
-- | Get the real full qualified host name.
getFQDN' :: IO String
getFQDN' = do
hostname <- getHostName
addrInfos <- Socket.getAddrInfo Nothing (Just hostname) Nothing
let address = listToMaybe addrInfos >>= (Just . Socket.addrAddress)
case address of
Just a -> do
fqdn <- liftM fst $ Socket.getNameInfo [] True False a
return (fromMaybe hostname fqdn)
Nothing -> return hostname
-- | Return the full qualified host name, honoring the vcluster setup.
getFQDN :: IO String
getFQDN = do
let ioErrorToNothing :: IOError -> IO (Maybe String)
ioErrorToNothing _ = return Nothing
vcluster_node <- Control.Exception.catch
(liftM Just (getEnv vClusterHostNameEnvVar))
ioErrorToNothing
case vcluster_node of
Just node_name -> return node_name
Nothing -> getFQDN'
-- | Returns if the current node is the master node.
isMaster :: IO Bool
isMaster = do
curNode <- getFQDN
masterNode <- Ssconf.getMasterNode Nothing
case masterNode of
Ok n -> return (curNode == n)
Bad _ -> return False
-- | Ensures that the daemon runs on the right node (and exits
-- gracefully if it doesnt)
ensureNode :: GanetiDaemon -> DaemonOptions -> IO ()
ensureNode daemon opts = do
is_master <- isMaster
when (daemonOnlyOnMaster daemon
&& not is_master
&& not (optForceNode opts)) $ do
putStrLn "Not master, exiting."
exitWith (ExitFailure C.exitNotmaster)
-- | Run an I\/O action that might throw an I\/O error, under a
-- handler that will simply annotate and re-throw the exception.
describeError :: String -> Maybe Handle -> Maybe FilePath -> IO a -> IO a
describeError descr hndl fpath =
modifyIOError (\e -> annotateIOError e descr hndl fpath)
-- | Run an I\/O action as a daemon.
--
-- WARNING: this only works in single-threaded mode (either using the
-- single-threaded runtime, or using the multi-threaded one but with
-- only one OS thread, i.e. -N1).
daemonize :: FilePath -> (Maybe Fd -> IO ()) -> IO ()
daemonize logfile action = do
(rpipe, wpipe) <- createPipe
-- first fork
_ <- forkProcess $ do
-- in the child
closeFd rpipe
let wpipe' = Just wpipe
setupDaemonEnv "/" (unionFileModes groupModes otherModes)
setupDaemonFDs (Just logfile) `Control.Exception.catch`
handlePrepErr False wpipe'
-- second fork, launches the actual child code; standard
-- double-fork technique
_ <- forkProcess (action wpipe')
exitImmediately ExitSuccess
closeFd wpipe
hndl <- fdToHandle rpipe
errors <- hGetContents hndl
ecode <- if null errors
then return ExitSuccess
else do
hPutStrLn stderr $ daemonStartupErr errors
return $ ExitFailure C.exitFailure
exitImmediately ecode
-- | Generic daemon startup.
genericMain :: GanetiDaemon -- ^ The daemon we're running
-> [OptType] -- ^ The available options
-> CheckFn a -- ^ Check function
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> IO ()
genericMain daemon options check_fn prep_fn exec_fn = do
let progname = daemonName daemon
(opts, args) <- parseArgs progname options
-- Modify handleClient in Ganeti.UDSServer to remove this logging from luxid.
when (optDebug opts && daemon == GanetiLuxid) .
hPutStrLn stderr $
printf C.debugModeConfidentialityWarning (daemonName daemon)
ensureNode daemon opts
exitUnless (null args) "This program doesn't take any arguments"
unless (optNoUserChecks opts) $ do
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
verifyDaemonUser daemon ents
syslog <- case optSyslogUsage opts of
Nothing -> exitIfBad "Invalid cluster syslog setting" $
syslogUsageFromRaw C.syslogUsage
Just v -> return v
log_file <- daemonLogFile daemon
-- run the check function and optionally exit if it returns an exit code
check_result <- check_fn opts
check_result' <- case check_result of
Left code -> exitWith code
Right v -> return v
let processFn = if optDaemonize opts
then daemonize log_file
else \action -> action Nothing
_ <- installHandler lostConnection (Catch (handleSigHup log_file)) Nothing
processFn $ innerMain daemon opts syslog check_result' prep_fn exec_fn
-- | Full prepare function.
--
-- This is executed after daemonization, and sets up both the log
-- files (a generic functionality) and the custom prepare function of
-- the daemon.
fullPrep :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> IO (FilePath, b)
fullPrep daemon opts syslog check_result prep_fn = do
logfile <- if optDaemonize opts
then return Nothing
else liftM Just $ daemonLogFile daemon
pidfile <- daemonPidFile daemon
let dname = daemonName daemon
setupLogging logfile dname (optDebug opts) True False syslog
_ <- describeError "writing PID file; already locked?"
Nothing (Just pidfile) $ writePidFile pidfile
logNotice $ dname ++ " daemon startup"
prep_res <- prep_fn opts check_result
tid <- myThreadId
_ <- installHandler sigTERM (Catch $ handleSigTerm tid) Nothing
return (pidfile, prep_res)
-- | Inner daemon function.
--
-- This is executed after daemonization.
innerMain :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> Maybe Fd -- ^ Error reporting function
-> IO ()
innerMain daemon opts syslog check_result prep_fn exec_fn fd = do
(pidFile, prep_result) <- fullPrep daemon opts syslog check_result prep_fn
`Control.Exception.catch` handlePrepErr True fd
-- no error reported, we should now close the fd
maybeCloseFd fd
finally (exec_fn opts check_result prep_result) (finalCleanup pidFile)
-- | Daemon prepare error handling function.
handlePrepErr :: Bool -> Maybe Fd -> IOError -> IO a
handlePrepErr logging_setup fd err = do
let msg = show err
case fd of
-- explicitly writing to the fd directly, since when forking it's
-- better (safer) than trying to convert this into a full handle
Just fd' -> fdWrite fd' msg >> return ()
Nothing -> hPutStrLn stderr (daemonStartupErr msg)
when logging_setup $ logError msg
exitWith $ ExitFailure 1
-- | Close a file descriptor.
maybeCloseFd :: Maybe Fd -> IO ()
maybeCloseFd Nothing = return ()
maybeCloseFd (Just fd) = closeFd fd
| apyrgio/ganeti | src/Ganeti/Daemon.hs | bsd-2-clause | 17,864 | 0 | 15 | 4,043 | 3,718 | 1,940 | 1,778 | 361 | 4 |
module Language.Haskell.GhcMod.Lint where
import Exception (ghandle)
import Control.Exception (SomeException(..))
import Language.Haskell.GhcMod.Logger (checkErrorPrefix)
import Language.Haskell.GhcMod.Convert
import Language.Haskell.GhcMod.Monad
import Language.Haskell.GhcMod.Types
import Language.Haskell.HLint (hlint)
-- | Checking syntax of a target file using hlint.
-- Warnings and errors are returned.
lint :: IOish m
=> FilePath -- ^ A target file.
-> GhcModT m String
lint file = do
opt <- options
ghandle handler . pack =<< liftIO (hlint $ file : "--quiet" : hlintOpts opt)
where
pack = convert' . map (init . show) -- init drops the last \n.
handler (SomeException e) = return $ checkErrorPrefix ++ show e ++ "\n"
| cabrera/ghc-mod | Language/Haskell/GhcMod/Lint.hs | bsd-3-clause | 754 | 0 | 12 | 128 | 199 | 112 | 87 | 16 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.