code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module PatBindIn1 where
main :: Int
main = foo 3
foo :: Int -> Int
foo x
= (h + t) + (snd tup)
where
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h, t) = head $ (zip [1 .. 10] [3 .. 15])
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h, t) = head $ (zip [1 .. 10] [3 .. 15])
|
mpickering/HaRe
|
old/testing/demote/PatBindIn1AST.hs
|
bsd-3-clause
| 310 | 0 | 10 | 114 | 173 | 101 | 72 | 14 | 1 |
-- From comment:76 in Trac #9858
-- This exploit still works in GHC 7.10.1.
-- By Shachaf Ben-Kiki, Ørjan Johansen and Nathan van Doorn
{-# LANGUAGE Safe #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ImpredicativeTypes #-}
module T9858a where
import Data.Typeable
type E = (:~:)
type PX = Proxy (((),()) => ())
type PY = Proxy (() -> () -> ())
data family F p a b
newtype instance F a b PX = ID (a -> a)
newtype instance F a b PY = UC (a -> b)
{-# NOINLINE ecast #-}
ecast :: E p q -> f p -> f q
ecast Refl = id
supercast :: F a b PX -> F a b PY
supercast = case cast e of
Just e' -> ecast e'
where
e = Refl
e :: E PX PX
uc :: a -> b
uc = case supercast (ID id) of UC f -> f
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_fail/T9858a.hs
|
bsd-3-clause
| 733 | 19 | 9 | 182 | 268 | 146 | 122 | -1 | -1 |
module A (T,t) where
data T = T
t = T
instance Eq T where
t1 == t2 = True
|
hferreiro/replay
|
testsuite/tests/driver/recomp008/A1.hs
|
bsd-3-clause
| 79 | 0 | 6 | 25 | 43 | 24 | 19 | 5 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DataKinds #-}
module Control.Monad.Apiary.Filter.Capture
( path, fetch, fetch', anyPath, restPath
) where
import Control.Monad.Apiary.Internal (Filter, Filter', focus)
import Control.Monad.Apiary.Filter.Internal
(Doc(DocPath, DocFetch, DocAny, DocRest))
import GHC.TypeLits.Compat(KnownSymbol, symbolVal)
import Data.Proxy.Compat(Proxy(..))
import Data.Apiary.Param(Path, pathRep, readPathAs)
import Network.Routing.Dict(KV((:=)))
import qualified Network.Routing.Dict as Dict
import qualified Network.Routing as R
import qualified Data.Text as T
import Text.Blaze.Html(Html)
-- | check first path and drill down. since 0.11.0.
path :: Monad actM => T.Text -> Filter' exts actM m
path p = focus (DocPath p) Nothing (R.exact p)
-- | get first path and drill down. since 0.11.0.
fetch' :: (k Dict.</ prms, KnownSymbol k, Path p, Monad actM) => proxy k -> proxy' p -> Maybe Html
-> Filter exts actM m prms (k ':= p ': prms)
fetch' k p h = focus (DocFetch (T.pack $ symbolVal k) (pathRep p) h) Nothing $ R.fetch k (readPathAs p)
fetch :: forall proxy k p exts prms actM m. (k Dict.</ prms, KnownSymbol k, Path p, Monad actM)
=> proxy (k ':= p) -> Maybe Html
-> Filter exts actM m prms (k ':= p ': prms)
fetch _ h = fetch' k p h
where
k = Proxy :: Proxy k
p = Proxy :: Proxy p
anyPath :: (Monad m, Monad actM) => Filter' exts actM m
anyPath = focus DocAny Nothing R.any
restPath :: (k Dict.</ prms, KnownSymbol k, Monad m, Monad actM)
=> proxy k -> Maybe Html
-> Filter exts actM m prms (k ':= [T.Text] ': prms)
restPath k h = focus (DocRest (T.pack $ symbolVal k) h) Nothing (R.rest k)
|
philopon/apiary
|
src/Control/Monad/Apiary/Filter/Capture.hs
|
mit
| 1,884 | 0 | 13 | 358 | 679 | 380 | 299 | -1 | -1 |
{-# htermination readsPrec :: Int -> String -> [(Int,String)] #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_readsPrec_5.hs
|
mit
| 66 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-# htermination (mapM :: (b -> Maybe a) -> (List b) -> Maybe (List a)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Maybe a = Nothing | Just a ;
map :: (b -> a) -> (List b) -> (List a);
map f Nil = Nil;
map f (Cons x xs) = Cons (f x) (map f xs);
pt :: (c -> b) -> (a -> c) -> a -> b;
pt f g x = f (g x);
gtGtEsMaybe :: Maybe b -> (b -> Maybe c) -> Maybe c
gtGtEsMaybe (Just x) k = k x;
gtGtEsMaybe Nothing k = Nothing;
returnMaybe :: b -> Maybe b
returnMaybe = Just;
sequence0 x xs = returnMaybe (Cons x xs);
sequence1 cs x = gtGtEsMaybe (sequence cs) (sequence0 x);
sequence Nil = returnMaybe Nil;
sequence (Cons c cs) = gtGtEsMaybe c (sequence1 cs);
mapM f = pt sequence (map f);
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/mapM_1.hs
|
mit
| 783 | 0 | 9 | 208 | 375 | 198 | 177 | 19 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Kafka.IntegrationSpec
where
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
import Control.Monad (forM, forM_)
import Control.Monad.Loops
import Data.Either
import Data.Map (fromList)
import Data.Monoid ((<>))
import Kafka.Consumer
import Kafka.Metadata
import Kafka.Producer
import Kafka.TestEnv
import Test.Hspec
import qualified Data.ByteString as BS
{- HLINT ignore "Redundant do" -}
spec :: Spec
spec = do
describe "Per-message commit" $ do
specWithProducer "Run producer" $ do
it "1. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer with per-message commit" consumerProps $ do
it "2. should receive 2 messages" $ \k -> do
res <- receiveMessages k
length <$> res `shouldBe` Right 2
comRes <- forM res . mapM $ commitOffsetMessage OffsetCommit k
comRes `shouldBe` Right [Nothing, Nothing]
specWithProducer "Run producer again" $ do
it "3. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after per-message commit" consumerProps $ do
it "4. should receive 2 messages again" $ \k -> do
res <- receiveMessages k
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
comRes `shouldBe` Nothing
describe "Store offsets" $ do
specWithProducer "Run producer" $ do
it "1. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer with no auto store" consumerPropsNoStore $ do
it "2. should receive 2 messages without storing" $ \k -> do
res <- receiveMessages k
length <$> res `shouldBe` Right 2
comRes <- commitAllOffsets OffsetCommit k
comRes `shouldBe` Just (KafkaResponseError RdKafkaRespErrNoOffset)
specWithProducer "Run producer again" $ do
it "3. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after commit without store" consumerPropsNoStore $ do
it "4. should receive 4 messages and store them" $ \k -> do
res <- receiveMessages k
storeRes <- forM res . mapM $ storeOffsetMessage k
comRes <- commitAllOffsets OffsetCommit k
length <$> storeRes `shouldBe` Right 4
length <$> res `shouldBe` Right 4
comRes `shouldBe` Nothing
specWithProducer "Run producer again" $ do
it "5. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after commit with store" consumerPropsNoStore $ do
it "6. should receive 2 messages" $ \k -> do
res <- receiveMessages k
storeRes <- forM res $ mapM (storeOffsetMessage k)
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
length <$> storeRes `shouldBe` Right 2
comRes `shouldBe` Nothing
specWithKafka "Part 3 - Consume after committing stored offsets" consumerPropsNoStore $ do
it "5. sends 2 messages to test topic" $ \(_, prod) -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
it "6. should receive 2 messages" $ \(k, _) -> do
res <- receiveMessages k
storeRes <- forM res $ mapM (storeOffsetMessage k)
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
length <$> storeRes `shouldBe` Right 2
comRes `shouldBe` Nothing
describe "Kafka.IntegrationSpec" $ do
specWithProducer "Run producer" $ do
it "sends messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
it "sends messages with callback to test topic" $ \prod -> do
var <- newEmptyMVar
let
msg = ProducerRecord
{ prTopic = "callback-topic"
, prPartition = UnassignedPartition
, prKey = Nothing
, prValue = Just "test from producer"
}
res <- produceMessage' prod msg (putMVar var)
res `shouldBe` Right ()
callbackRes <- flushProducer prod *> takeMVar var
callbackRes `shouldSatisfy` \case
DeliverySuccess _ _ -> True
DeliveryFailure _ _ -> False
NoMessageError _ -> False
specWithConsumer "Run consumer with async polling" (consumerProps <> groupId (makeGroupId "async")) runConsumerSpec
specWithConsumer "Run consumer with sync polling" (consumerProps <> groupId (makeGroupId "sync") <> callbackPollMode CallbackPollModeSync) runConsumerSpec
describe "Kafka.Consumer.BatchSpec" $ do
specWithConsumer "Batch consumer" (consumerProps <> groupId "batch-consumer") $ do
it "should consume first batch" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 5)
length res `shouldBe` 5
forM_ res (`shouldSatisfy` isRight)
it "should consume second batch with not enough messages" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 50)
let res' = Prelude.filter (/= Left (KafkaResponseError RdKafkaRespErrPartitionEof)) res
length res' `shouldSatisfy` (< 50)
forM_ res' (`shouldSatisfy` isRight)
it "should consume empty batch when there are no messages" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 50)
length res `shouldBe` 0
----------------------------------------------------------------------------------------------------------------
data ReadState = Skip | Read
receiveMessages :: KafkaConsumer -> IO (Either KafkaError [ConsumerRecord (Maybe BS.ByteString) (Maybe BS.ByteString)])
receiveMessages kafka =
Right . rights <$> allMessages
where
allMessages =
unfoldrM (\s -> do
msg <- pollMessage kafka (Timeout 1000)
case (s, msg) of
(Skip, Left _) -> pure $ Just (msg, Skip)
(_, Right msg') -> pure $ Just (Right msg', Read)
(Read, _) -> pure Nothing
) Skip
testMessages :: TopicName -> [ProducerRecord]
testMessages t =
[ ProducerRecord t UnassignedPartition Nothing (Just "test from producer")
, ProducerRecord t UnassignedPartition (Just "key") (Just "test from producer (with key)")
]
sendMessages :: [ProducerRecord] -> KafkaProducer -> IO (Either KafkaError ())
sendMessages msgs prod =
Right <$> (forM_ msgs (produceMessage prod) >> flushProducer prod)
runConsumerSpec :: SpecWith KafkaConsumer
runConsumerSpec = do
it "should receive messages" $ \k -> do
res <- receiveMessages k
let msgsLen = either (const 0) length res
msgsLen `shouldSatisfy` (> 0)
let timestamps = crTimestamp <$> either (const []) id res
forM_ timestamps $ \ts ->
ts `shouldNotBe` NoTimestamp
comRes <- commitAllOffsets OffsetCommit k
comRes `shouldBe` Nothing
it "should get committed" $ \k -> do
res <- committed k (Timeout 1000) [(testTopic, PartitionId 0)]
res `shouldSatisfy` isRight
it "should get position" $ \k -> do
res <- position k [(testTopic, PartitionId 0)]
res `shouldSatisfy` isRight
it "should get watermark offsets" $ \k -> do
res <- sequence <$> watermarkOffsets k (Timeout 1000) testTopic
res `shouldSatisfy` isRight
length <$> res `shouldBe` (Right 1)
it "should return subscription" $ \k -> do
res <- subscription k
res `shouldSatisfy` isRight
length <$> res `shouldBe` Right 1
it "should return assignment" $ \k -> do
res <- assignment k
res `shouldSatisfy` isRight
res `shouldBe` Right (fromList [(testTopic, [PartitionId 0])])
it "should return all topics metadata" $ \k -> do
res <- allTopicsMetadata k (Timeout 1000)
res `shouldSatisfy` isRight
let filterUserTopics m = m { kmTopics = filter (\t -> topicType (tmTopicName t) == User) (kmTopics m) }
let res' = fmap filterUserTopics res
length . kmBrokers <$> res' `shouldBe` Right 1
let topicsLen = either (const 0) (length . kmTopics) res'
let hasTopic = either (const False) (any (\t -> tmTopicName t == testTopic) . kmTopics) res'
topicsLen `shouldSatisfy` (>0)
hasTopic `shouldBe` True
it "should return topic metadata" $ \k -> do
res <- topicMetadata k (Timeout 1000) testTopic
res `shouldSatisfy` isRight
length . kmBrokers <$> res `shouldBe` Right 1
length . kmTopics <$> res `shouldBe` Right 1
it "should describe all consumer groups" $ \k -> do
res <- allConsumerGroupsInfo k (Timeout 1000)
let groups = either (const []) (fmap giGroup) res
let prefixedGroups = filter isTestGroupId groups
let resLen = length prefixedGroups
resLen `shouldSatisfy` (>0)
-- fmap giGroup <$> res `shouldBe` Right [testGroupId]
it "should describe a given consumer group" $ \k -> do
res <- consumerGroupInfo k (Timeout 1000) testGroupId
fmap giGroup <$> res `shouldBe` Right [testGroupId]
it "should describe non-existent consumer group" $ \k -> do
res <- consumerGroupInfo k (Timeout 1000) "does-not-exist"
res `shouldBe` Right []
it "should read topic offsets for time" $ \k -> do
res <- topicOffsetsForTime k (Timeout 1000) (Millis 1904057189508) testTopic
res `shouldSatisfy` isRight
fmap tpOffset <$> res `shouldBe` Right [PartitionOffsetEnd]
it "should seek and return no error" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) (PartitionOffset 1)]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 1)
it "should seek to the beginning" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetBeginning]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
it "should seek to the end" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetEnd]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldSatisfy` (\x ->
x == Left (KafkaResponseError RdKafkaRespErrPartitionEof)
|| x == Left (KafkaResponseError RdKafkaRespErrTimedOut))
it "should respect out-of-bound offsets (invalid offset)" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetInvalid]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
it "should respect out-of-bound offsets (huge offset)" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) (PartitionOffset 123456)]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
|
haskell-works/kafka-client
|
tests-it/Kafka/IntegrationSpec.hs
|
mit
| 12,117 | 0 | 24 | 3,539 | 3,486 | 1,696 | 1,790 | 227 | 3 |
module Handler.AnalogOutSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getAnalogOutR" $ do
error "Spec not implemented: getAnalogOutR"
|
aufheben/lambda-arduino
|
test/Handler/AnalogOutSpec.hs
|
mit
| 183 | 0 | 11 | 39 | 44 | 23 | 21 | 6 | 1 |
{-# htermination addToFM :: FiniteMap () b -> () -> b -> FiniteMap () b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addToFM_2.hs
|
mit
| 94 | 0 | 3 | 19 | 5 | 3 | 2 | 1 | 0 |
-- ex3.1.hs
swapTriple :: (a,b,c) -> (b,c,a)
swapTriple (x,y,z) = (y,z,x)
duplicate :: a -> (a, a)
duplicate x = (x, x)
nothing :: a -> Maybe a
nothing _ = Nothing
index :: [a] -> [ (Int, a) ]
index [] = []
index [x] = [(0,x)]
index (x:xs) = let indexed@((n,_):_) = index xs
in (n+1,x):indexed
maybeA :: [a] -> Char
maybeA [] = 'a'
|
hnfmr/beginning_haskell
|
ex3.1.hs
|
mit
| 352 | 0 | 12 | 87 | 239 | 136 | 103 | 13 | 1 |
module Proteome.Grep.Parse where
import Chiasma.Data.Ident (generateIdent, identText)
import Data.Attoparsec.Text (parseOnly)
import qualified Data.Text as Text (strip, stripPrefix)
import Ribosome.Menu.Data.MenuItem (MenuItem(MenuItem))
import Text.Parser.Char (anyChar, char, noneOf)
import Text.Parser.Combinators (manyTill)
import Text.Parser.Token (TokenParsing, natural)
import Proteome.Data.GrepOutputLine (GrepOutputLine(GrepOutputLine))
import Proteome.Grep.Syntax (lineNumber)
grepParser ::
TokenParsing m =>
m GrepOutputLine
grepParser =
GrepOutputLine <$> path <*> (subtract 1 <$> number) <*> optional number <*> (toText <$> many anyChar)
where
path =
toText <$> manyTill (noneOf ":") (char ':')
number =
(fromInteger <$> natural) <* char ':'
formatGrepLine :: Text -> GrepOutputLine -> Text
formatGrepLine cwd (GrepOutputLine path line col text') =
relativePath <> " " <> lineNumber <> " " <> show line <> ":" <> show (fromMaybe 1 col) <> " " <> Text.strip text'
where
relativePath =
fromMaybe path (Text.stripPrefix (cwd <> "/") path)
parseGrepOutput ::
MonadRibo m =>
Text ->
Text ->
m (Maybe (MenuItem GrepOutputLine))
parseGrepOutput cwd =
item . parseOnly grepParser
where
item (Right a) = do
ident <- identText <$> generateIdent
return (Just (convert ident a))
item (Left err) =
Nothing <$ logDebug ("parsing grep output failed: " <> err)
convert _ file =
MenuItem file text' text'
where
text' =
formatGrepLine cwd file
|
tek/proteome
|
packages/proteome/lib/Proteome/Grep/Parse.hs
|
mit
| 1,558 | 0 | 13 | 314 | 507 | 271 | 236 | 40 | 2 |
module Language.Dash.VM.VMSpec where
import Data.Word
import Language.Dash.Asm.Assembler
import Language.Dash.IR.Opcode
import Language.Dash.IR.Data
import Language.Dash.VM.DataEncoding
import Language.Dash.VM.VM
import Language.Dash.Limits
import Test.Hspec
import Test.QuickCheck
runProg :: [[Opcode]] -> IO Word32
runProg = runProgTbl []
runProgTbl :: [Word32] -> [[Opcode]] -> IO Word32
runProgTbl tbl prog = do
(value, _, _) <- execute asm tbl' []
return value
where
(asm, tbl', _) =
let encProg = map EncodedFunction prog in
let resultOrError = assembleWithEncodedConstTable encProg tbl (fromIntegral.constAddrToInt) [] in
case resultOrError of
Left err -> error $ show err -- TODO do this without an error
Right result -> result
spec :: Spec
spec = do
describe "Virtual Machine" $ do
it "loads a number into a register" $ do
let prog = [[ OpcLoadI 0 55,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 55)
it "adds two numbers" $ do
let prog = [[ OpcLoadI 1 5,
OpcLoadI 2 32,
OpcAdd 0 1 2,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 37)
it "moves a register" $ do
let prog = [[ OpcLoadI 2 37,
OpcMove 0 2,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 37)
it "directly calls a function" $ do
let prog = [[ OpcLoadI 1 15,
OpcLoadI 2 23,
OpcAdd 4 1 2,
OpcLoadF 3 (mkFuncAddr 1),
OpcSetArg 0 4 0,
OpcAp 0 3 1,
OpcRet 0 ], [
OpcFunHeader 1,
OpcLoadI 1 100,
OpcAdd 2 0 1,
OpcRet 2]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 138)
it "calls a closure downwards" $ do
let prog = [[ OpcLoadF 2 (mkFuncAddr 2),
OpcLoadI 3 80,
OpcSetArg 0 3 0,
OpcPartAp 2 2 1,
OpcLoadF 1 (mkFuncAddr 1),
OpcSetArg 0 2 0,
OpcAp 0 1 1,
OpcRet 0 ], [
-- fun1
OpcFunHeader 2,
OpcLoadI 2 115,
OpcLoadI 3 23,
OpcAdd 2 2 3,
OpcSetArg 0 2 0,
OpcGenAp 2 0 1,
OpcRet 2 ], [
-- fun2
-- fun_header 1 1, -- (* 1 closed over value, 1 parameter *)
OpcFunHeader 2,
OpcSub 2 1 0,
OpcRet 2 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 58) -- 115 + 23 - 80
it "calls a closure upwards" $ do
let prog = [[ OpcLoadF 1 (mkFuncAddr 1),
OpcAp 1 1 0,
OpcLoadI 2 80,
OpcSetArg 0 2 0,
OpcGenAp 0 1 1,
OpcRet 0 ], [
-- fun 1
OpcFunHeader 1,
OpcLoadF 1 (mkFuncAddr 2),
OpcLoadI 2 24,
OpcSetArg 0 2 0,
OpcPartAp 0 1 1,
OpcRet 0 ], [
-- fun 2
OpcFunHeader 2,
OpcSub 2 1 0,
OpcRet 2 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 56) -- 80 - 24
it "modifies a closure" $ do
let prog = [[ OpcLoadF 1 (mkFuncAddr 1),
OpcAp 1 1 0,
OpcLoadI 2 80,
OpcSetArg 0 2 0,
OpcGenAp 0 1 1,
OpcRet 0 ], [
-- fun 1
OpcFunHeader 1,
OpcLoadF 1 (mkFuncAddr 2),
OpcLoadI 2 77,
OpcLoadI 3 55,
OpcSetArg 0 2 1,
OpcPartAp 0 1 2,
OpcLoadI 7 33,
OpcSetClVal 0 7 1,
OpcRet 0 ], [
-- fun 2
OpcFunHeader 3,
OpcSub 3 0 1,
OpcRet 3 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 44) -- 77 - 33
{-
it "applies a number tag to a value" $ do
let original = 44
let symbol = make_vm_value original vm_tag_number
(tag_of_vm_value symbol) vm_tag_number,
assert_equal (value_of_vm_value symbol) original
),
it "applies a symbol tag to a value" $ do
let original = 12
let symbol = make_vm_value original vm_tag_symbol
assert_equal (tag_of_vm_value symbol) vm_tag_symbol,
assert_equal (value_of_vm_value symbol) original
),
-}
it "loads a symbol into a register" $ do
let sym = mkSymId 12
let prog = [[ OpcLoadPS 0 sym,
OpcRet 0]]
(runProg prog) `shouldReturn` (encodePlainSymbol sym)
it "loads a compound symbol" $ do
let ctable = [ encodeNumber 1,
encodeCompoundSymbolHeader (mkSymId 5) 1,
encodeNumber 3
]
let prog = [[ OpcLoadCS 0 (mkConstAddr 1),
OpcRet 0 ]]
(runProgTbl ctable prog) `shouldReturn` (encodeCompoundSymbolRef $ mkConstAddr 1)
it "jumps forward" $ do
let prog = [[ OpcLoadI 0 66,
OpcJmp 1,
OpcRet 0,
OpcLoadI 0 70,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 70)
it "jumps if condition true" $ do
let prog = [[ OpcLoadI 1 2, -- counter
OpcLoadI 2 5, -- target value
OpcLoadI 5 0, -- accumulator
OpcLoadI 3 1,
OpcEq 4 1 2,
OpcJmpTrue 4 3,
OpcAdd 5 5 1,
OpcAdd 1 1 3,
OpcJmp (-5),
OpcMove 0 5,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
-- result: 2 + 3 + 4 = 9
decodedResult `shouldBe` (VMNumber 9)
it "matches a number" $ do
let ctable = [ encodeMatchHeader 2,
encodeNumber 11,
encodeNumber 22 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadI 1 22,
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "matches a symbol" $ do
let ctable = [ encodeMatchHeader 2,
encodePlainSymbol (mkSymId 11),
encodePlainSymbol (mkSymId 22) ]
let prog = [[ OpcLoadI 0 600,
OpcLoadPS 1 (mkSymId 22),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "matches a data symbol" $ do
let ctable = [ encodeMatchHeader 2,
encodeCompoundSymbolRef (mkConstAddr 3),
encodeCompoundSymbolRef (mkConstAddr 6),
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadCS 1 (mkConstAddr 9),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "binds a value in a match" $ do
let ctable = [ encodeMatchHeader 2,
encodeCompoundSymbolRef (mkConstAddr 3),
encodeCompoundSymbolRef (mkConstAddr 6),
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeMatchVar 1,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadI 4 66,
OpcLoadCS 1 (mkConstAddr 9),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 3,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 22,
OpcRet 0,
OpcMove 0 4, -- reg 4 contains match var 1 (see pattern in ctable)
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 77)
it "loads a symbol on the heap" $ do
let ctable = [ encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 3) 2,
encodeNumber 33,
encodeNumber 44 ]
let prog = [[ OpcLoadCS 0 (mkConstAddr 0),
OpcLoadCS 1 (mkConstAddr 3),
OpcCopySym 0 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
let symNames = ["X", "A", "Y", "B"]
let decodeResult = decode result ctable symNames
decodeResult `shouldReturn` (VMSymbol "B" [VMNumber 33, VMNumber 44])
it "modifies a heap symbol" $ do
let ctable = [ encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 3) 2,
encodeNumber 33,
encodeNumber 44 ]
let prog = [[ OpcLoadCS 0 (mkConstAddr 0),
OpcLoadCS 1 (mkConstAddr 3),
OpcCopySym 0 1,
OpcLoadPS 5 (mkSymId 6),
OpcSetSymField 0 5 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
let symNames = ["X", "A", "Y", "B", "Z", "W", "success"]
let decodeResult = decode result ctable symNames
decodeResult `shouldReturn` (VMSymbol "B" [VMNumber 33, VMSymbol "success" []])
it "loads a string into a register" $ do
let prog = [[ OpcLoadStr 0 (mkConstAddr 55),
OpcRet 0 ]]
(runProg prog) `shouldReturn` (encodeStringRef $ mkConstAddr 55)
it "determines the length of a string" $ do
let ctable = [ encodeStringHeader 5 2,
encodeStringChunk 'd' 'a' 's' 'h',
encodeStringChunk '!' '\0' '\0' '\0' ]
let prog = [[ OpcLoadStr 1 (mkConstAddr 0),
OpcStrLen 0 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 5)
it "creates a new string" $ do
let prog = [[ OpcLoadI 1 8,
OpcNewStr 0 1,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMString "")
it "copies a string" $ do
let loop = (-6);
let end = 4;
let ctable = [ encodeStringHeader 5 2,
encodeStringChunk 'd' 'a' 's' 'h',
encodeStringChunk '!' '\0' '\0' '\0' ]
let prog = [[ OpcLoadStr 6 (mkConstAddr 0),
OpcStrLen 1 6,
OpcLoadI 2 0, -- index
OpcLoadI 5 1,
OpcNewStr 3 1,
-- loop:
OpcEq 7 2 1,
OpcJmpTrue 7 end,
OpcGetChar 4 6 2,
OpcPutChar 4 3 2,
OpcAdd 2 2 5,
OpcJmp loop,
-- end:
OpcMove 0 3,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMString "dash!")
it "looks up a value in a module" $ do
let ctable = [ encodeOpaqueSymbolHeader (mkSymId 10) 2
, encodePlainSymbol (mkSymId 0)
, encodePlainSymbol (mkSymId 5)
, encodeNumber 33
]
let prog = [[ OpcLoadOS 1 (mkConstAddr 0)
, OpcLoadPS 2 (mkSymId 5)
, OpcGetField 0 1 2
, OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 33)
|
arne-schroppe/dash
|
test/Language/Dash/VM/VMSpec.hs
|
mit
| 14,363 | 0 | 20 | 6,563 | 3,861 | 1,907 | 1,954 | 334 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Reader.Parser
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein ([email protected])
--
-- Parsing module containing all neccessary parsers.
--
-----------------------------------------------------------------------------
module Reader.Parser
( parse
) where
-----------------------------------------------------------------------------
import Data.Enum
( EnumDefinition(..)
)
import Data.Error
( Error
, parseError
)
import Reader.Error
( errEnumConflict
)
import Reader.Parser.Data
( Specification(..)
)
import Reader.Parser.Info
( infoParser
)
import Reader.Parser.Global
( globalParser
)
import Reader.Parser.Component
( componentParser
)
import Text.Parsec
( (<|>)
)
import qualified Text.Parsec as P
( parse
)
import Text.Parsec.String
( Parser
)
-----------------------------------------------------------------------------
-- | @parseSpecification str @ parses a specification from the string @str@.
parse
:: String -> Either Error Specification
parse str =
case P.parse specificationParser "Syntax Error" str of
Left err -> parseError err
Right x -> do
mapM_ checkEnum $ enumerations x
return x
-----------------------------------------------------------------------------
specificationParser
:: Parser Specification
specificationParser = do
(i,d,s,r,a) <- infoParser
(ps,vs,ms) <- globalParser <|> return ([],[],[])
(is,os,es,ss,rs,as,ns,gs) <- componentParser
return Specification
{ title = i
, description = d
, semantics = s
, target = r
, tags = a
, enumerations = ms
, parameters = ps
, definitions = vs
, inputs = is
, outputs = os
, initially = es
, preset = ss
, requirements = rs
, assumptions = as
, invariants = ns
, guarantees = gs
}
-----------------------------------------------------------------------------
checkEnum
:: EnumDefinition String -> Either Error ()
checkEnum e = case eDouble e of
Just ((m,p),(x,_),(y,_),f) -> errEnumConflict m x y (toStr (eSize e) f) p
Nothing -> return ()
where
toStr n f = map (toS . f) [0,1..n-1]
toS (Right ()) = '*'
toS (Left True) = '1'
toS (Left False) = '0'
-----------------------------------------------------------------------------
|
reactive-systems/syfco
|
src/lib/Reader/Parser.hs
|
mit
| 2,459 | 0 | 12 | 519 | 619 | 358 | 261 | 63 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html
module Stratosphere.ResourceProperties.KinesisAnalyticsApplicationCSVMappingParameters where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- KinesisAnalyticsApplicationCSVMappingParameters. See
-- 'kinesisAnalyticsApplicationCSVMappingParameters' for a more convenient
-- constructor.
data KinesisAnalyticsApplicationCSVMappingParameters =
KinesisAnalyticsApplicationCSVMappingParameters
{ _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter :: Val Text
, _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsApplicationCSVMappingParameters where
toJSON KinesisAnalyticsApplicationCSVMappingParameters{..} =
object $
catMaybes
[ (Just . ("RecordColumnDelimiter",) . toJSON) _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter
, (Just . ("RecordRowDelimiter",) . toJSON) _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter
]
-- | Constructor for 'KinesisAnalyticsApplicationCSVMappingParameters'
-- containing required fields as arguments.
kinesisAnalyticsApplicationCSVMappingParameters
:: Val Text -- ^ 'kaacsvmpRecordColumnDelimiter'
-> Val Text -- ^ 'kaacsvmpRecordRowDelimiter'
-> KinesisAnalyticsApplicationCSVMappingParameters
kinesisAnalyticsApplicationCSVMappingParameters recordColumnDelimiterarg recordRowDelimiterarg =
KinesisAnalyticsApplicationCSVMappingParameters
{ _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter = recordColumnDelimiterarg
, _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter = recordRowDelimiterarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html#cfn-kinesisanalytics-application-csvmappingparameters-recordcolumndelimiter
kaacsvmpRecordColumnDelimiter :: Lens' KinesisAnalyticsApplicationCSVMappingParameters (Val Text)
kaacsvmpRecordColumnDelimiter = lens _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter (\s a -> s { _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html#cfn-kinesisanalytics-application-csvmappingparameters-recordrowdelimiter
kaacsvmpRecordRowDelimiter :: Lens' KinesisAnalyticsApplicationCSVMappingParameters (Val Text)
kaacsvmpRecordRowDelimiter = lens _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter (\s a -> s { _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsApplicationCSVMappingParameters.hs
|
mit
| 2,947 | 0 | 13 | 222 | 267 | 153 | 114 | 29 | 1 |
type Fname = String
type Var = String
data Program = Prog [Fundef] Exp deriving Show
data Fundef = Fun String [String] Exp deriving Show
data Exp = I Int | V Var | B Bool | Nil | Fname String | App Exp Exp deriving Show
type Code = [Instn]
data Instn =
PUSH Int
| PUSHINT Int
| PUSHGLOBAL String
| PUSHBOOL Bool
| PUSHNIL
| POP Int
| EVAL
| UNWIND
| MKAP
| UPDATE Int
| RETURN
| LABEL String
| JUMP String
| JFALSE String
| ADD
| SUB
| MUL
| DIV
| CONS
| HEAD
| TAIL
| IF
| EQU
| GLOBSTART String Int
| PRINT
| STOP
instance Show Instn where
show (PUSH i) = "PUSH " ++ show i ++ "\n"
show (PUSHINT i) = "PUSHINT " ++ show i ++ "\n"
show (PUSHGLOBAL str) = "PUSHGLOBAL " ++ show str ++ "\n"
show (PUSHBOOL b) = "PUSHBOOL " ++ show b ++ "\n"
show PUSHNIL = "PUSHNIL " ++ "\n"
show (POP i) = "POP " ++ show i ++ "\n"
show EVAL = "EVAL" ++ "\n"
show UNWIND = "UNWIND" ++ "\n"
show MKAP = "MKAP" ++ "\n"
show RETURN = "RETURN" ++ "\n"
show (UPDATE i) = "UPDATE " ++ show i ++ "\n"
show (LABEL str) = "LABEL " ++ show str ++ "\n"
show (JUMP str) = "JUMP " ++ show str ++ "\n"
show (JFALSE str) = "JFALSE " ++ show str ++ "\n"
show ADD = "ADD" ++ "\n"
show SUB = "SUB" ++ "\n"
show MUL = "MUL" ++ "\n"
show DIV = "DIV" ++ "\n"
show CONS = "CONS" ++ "\n"
show HEAD = "HEAD" ++ "\n"
show TAIL = "TAIL" ++ "\n"
show IF = "IF" ++ "\n"
show EQU = "EQU" ++ "\n"
show (GLOBSTART str i) = "\n GLOBSTART " ++ show str ++ " " ++ show i ++ "\n"
show PRINT = "PRINT" ++ "\n"
show STOP = "STOP" ++ "\n"
start (Prog fdefList term) = (foldr func g fdefList)
where
g = LABEL "MAIN":(expr term (\x -> 999) 999) (EVAL:PRINT:STOP:hardCoded) --here the 999 are arbitrary
hardCoded = concat (map (getCode) builtins)
func (Fun fname argList fdef) codeTillNow = GLOBSTART fname n:(body fdef r n codeTillNow)
where
n = length argList
tupleList = foldr (\x acc -> (x, (length acc)+1):acc) [] argList
r ident = snd$head (filter (\(v,x) -> v==ident) tupleList)
body fbody r d codeTillNow= expr fbody r d ((UPDATE (d+1)):POP d:UNWIND:codeTillNow)
builtins = [("+",ADD), ("-",SUB), ("*",MUL), ("/",DIV), ("cons",CONS), ("head",HEAD), ("car",HEAD), ("cdr",HEAD), ("==",EQU), ("null",HEAD), ("if",IF)]
expr (I x) r d codeTillNow = PUSHINT x:codeTillNow
expr (B x) r d codeTillNow = PUSHBOOL x:codeTillNow
expr (Fname fname) _ _ codeTillNow = PUSHGLOBAL fname:codeTillNow
expr (V v) r d codeTillNow = PUSH (d - (r v)):codeTillNow
expr (App (x) (y)) r d codeTillNow = expr (y) r d (expr (x) r (d+1) (MKAP:codeTillNow))
expr (Nil) r d codeTillNow = PUSHNIL:codeTillNow
getCode (mini,gcode)
|(mini `elem` ["+","-","*","/","=="]) = [GLOBSTART mini 2, PUSH 1, EVAL, PUSH 1, EVAL, gcode, UPDATE 3]
|(mini=="cons") = [GLOBSTART mini 1, gcode, UPDATE 1, RETURN]
|(mini `elem` ["head","car","cdr","null"]) = [GLOBSTART mini 1, EVAL, gcode, EVAL, UPDATE 1, UNWIND]
|(mini=="if") = [GLOBSTART mini 3, PUSH 0, EVAL, JFALSE "l1", PUSH 1, JUMP "l2", LABEL "l1", PUSH 2, LABEL "l2", EVAL, UPDATE 4, POP 3, UNWIND]
|otherwise = []
gencpgm :: Program -> Code
gencpgm p = start p
|
sushantmahajan/programs
|
haskell/assign2.hs
|
cc0-1.0
| 3,308 | 0 | 14 | 857 | 1,551 | 828 | 723 | 83 | 1 |
module Flowskell.SchemeUtils where
import Language.Scheme.Types
import Data.Array
extractFloat (Float n) = realToFrac n
extractFloat (Number n) = realToFrac n
extractFloat (Rational n) = realToFrac n
makeFloatVector lst = Vector $ listArray (0, length lst - 1) (map Float lst)
|
lordi/flowskell
|
src/Flowskell/SchemeUtils.hs
|
gpl-2.0
| 279 | 0 | 9 | 41 | 106 | 54 | 52 | 7 | 1 |
{-# OPTIONS_GHC -O3 -optc-O3 #-}
wheelList = [1, 7, 11, 13, 17, 19, 23, 29]
wheel n=map (m+) wheelList
where m=30*n
candidates =concatMap wheel [1..]
isPrime n=not.(any isDivicible)$ rootCandidates
where
rootCandidates =takeWhile (<=(ceiling.sqrt.fromIntegral$ n)) primes
isDivicible x= n`mod`x==0
primes = [2, 3, 5]++(tail wheelList)++(filter isPrime candidates)
main =print.sum.takeWhile (<2000000)$ primes
|
kumar0ed/euler
|
10.hs
|
gpl-2.0
| 429 | 1 | 12 | 70 | 195 | 109 | 86 | 10 | 1 |
module Tree where
data Tree a = Tree a [Tree a]
deriving (Show)
instance Functor Tree where
fmap fn (Tree x [])= Tree (fn x) []
fmap fn (Tree x ts) = Tree (fn x) (map (fmap fn) ts)
makeTree :: t -> Tree t
makeTree x = Tree x []
--Extend the tip of every branch
grow :: (a -> [a]) -> Tree a -> Tree a
grow fn (Tree a []) = Tree a (map makeTree $ fn a)
grow fn (Tree a ts) = (Tree a (map (grow fn) ts))
--WARNING HIGHLY DANGEROUS - Infinite loop, insufficiantly lazy
iterateGrowth :: (a -> [a]) -> (Tree a) -> (Tree a)
iterateGrowth fn tree = iterateGrowth fn (grow fn tree)
iterateToDepth :: (a->[a]) -> Int -> (Tree a) -> (Tree a)
iterateToDepth fn n t =
if n == 0 then t
else iterateToDepth fn (n-1) (grow fn t)
flatten :: Tree a -> [a]
flatten (Tree v ts) = [v] ++ (concat $ map flatten ts)
--Given a function that maps v->t,
--turn it into a function that returns Tree v -> t
makePruner :: (t->Bool) -> (Tree t -> Bool)
makePruner fn = \(Tree t _) -> fn t
--Remove all trees that match the predicate,
--and all their subtrees without inspection
prune :: (a->Bool) -> Tree a -> Tree a
prune fn (Tree v ts) =
Tree v (map (prune fn)
(filter (makePruner fn) ts))
---Now some growth functions
extendBinary :: String -> [String]
extendBinary b = ['0':b, '1':b]
binaryTree = makeTree ""
b2 = (grow extendBinary) $ (grow extendBinary) $ grow extendBinary binaryTree
b3 = iterateToDepth extendBinary 10 b2
b4 = prune (\x -> (length x) < 6) b3
|
CharlesRandles/HaskellUtils
|
tree.hs
|
gpl-3.0
| 1,537 | 0 | 11 | 384 | 686 | 355 | 331 | 31 | 2 |
module DrawingTest where
import Graphics.SpriteKit hiding (Path,Point)
import Codec.Picture
import LineGraphics
import LineIntersect
makeVectroLinePicture :: Colour -> LineVector -> Float -> Picture
makeVectroLinePicture c lineWithVector t =
[(c, linePath) , (c, circle)]
where
linePath = vectorLinePath lineWithVector t
circle = pointMarkerPoint $ endPoint lineWithVector t
makeInterasctionPointPicture :: Colour -> LineVector -> LineVector -> Picture
makeInterasctionPointPicture col line1 line2
= [(col, cir)]
where
point = intersectionPoint line1 line2
cir = pointMarkerPoint point
makeIntersectionPicture :: Picture
makeIntersectionPicture =
(makeVectroLinePicture blue line1 1)
++ (makeVectroLinePicture red line2 1)
++ (makeInterasctionPointPicture green line1 line2)
where
line1 = ((100 , 0) , (300 , 300))
line2 = ((150 , 200) , (200 , 0))
test = drawPicture 2.0 $ makeIntersectionPicture
|
Terry-Weymouth/haskell-line-intersect
|
LineIntersect.hsproj/DrawingTest.hs
|
gpl-3.0
| 977 | 0 | 8 | 194 | 268 | 152 | 116 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Kevin.Util.Token (
getToken
) where
import Control.Applicative
import Control.Arrow
import Crypto.Random.AESCtr (makeSystem)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as LB
import Data.List
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import Network.HTTP.Base
import Network.TLS
import Network.TLS.Extra
import Text.Printf
recvUntil :: TLSCtx -> B.ByteString -> IO B.ByteString
recvUntil ctx str = do
line <- recvData ctx
if str `B.isInfixOf` line
then return line
else (line <>) <$> recvUntil ctx str
concatHeaders :: [(String,String)] -> String
concatHeaders = intercalate "\r\n" . map (\(x,y) -> x ++ ": " ++ y)
scrapeFormValue :: String -> B.ByteString -> B.ByteString
scrapeFormValue key bs = B.takeWhile (/='"') . B.drop 7 . head
. filter (\l -> "value=" `B.isPrefixOf` l) . B.words . head
. filter (\l -> B.pack key `B.isInfixOf` l) . B.lines $ bs
scrapeCookies :: B.ByteString -> B.ByteString
scrapeCookies bs = B.intercalate ";" . map snd
. filter ((== "Set-Cookie") . fst)
. map (second (B.drop 2 . B.takeWhile (/=';'))
. B.breakSubstring ": ")
. B.lines $ bs
getToken :: T.Text -> T.Text -> IO (Maybe T.Text)
getToken uname pass = do
let params = defaultParamsClient {
pCiphers = ciphersuite_all
, onCertificatesRecv = certificateChecks
[return . certificateVerifyDomain "chat.deviantart.com"]
}
headers :: [(String, String)]
headers = [("Connection", "closed"), ("Content-Type", "application/x-www-form-urlencoded")]
gen <- makeSystem
ctv <- connectionClient "www.deviantart.com" "443" params gen
handshake ctv
sendData ctv . LB.pack $ "GET /users/login HTTP/1.1\r\nHost: www.deviantart.com\r\n\r\n"
bl <- recvUntil ctv "validate_key"
bye ctv
let payload = urlEncodeVars [ ("username", T.unpack uname)
, ("password", T.unpack pass)
, ("validate_token", B.unpack (scrapeFormValue "validate_token" bl))
, ("validate_key", B.unpack (scrapeFormValue "validate_key" bl))
, ("remember_me","1")
]
ctx <- connectionClient "www.deviantart.com" "443" params gen
handshake ctx
sendData ctx . LB.pack $ printf
"POST /users/login HTTP/1.1\r\n%s\r\ncookie: %s\r\nContent-Length: %d\r\n\r\n%s"
(concatHeaders $ ("Host", "www.deviantart.com"):headers)
(B.unpack (scrapeCookies bl))
(length payload)
payload
bs <- recvData ctx
if "wrong-password" `B.isInfixOf` bs
then return Nothing
else do
let s = printf "GET /chat/Botdom HTTP/1.1\r\n%s\r\ncookie: %s\r\n\r\n"
(concatHeaders [("Host", "chat.deviantart.com")])
(B.unpack (scrapeCookies bs))
sendData ctx $ LB.pack s
bq <- recvUntil ctx "dAmnChat_Init"
return . (Just . decodeUtf8 . B.take 32 . B.tail
. B.dropWhile (/='"') . B.dropWhile (/=',') . snd)
. B.breakSubstring "dAmn_Login" $ bq
|
pikajude/kevin
|
src/Kevin/Util/Token.hs
|
gpl-3.0
| 3,544 | 0 | 20 | 1,134 | 987 | 520 | 467 | 73 | 2 |
-- Copyright (C) 2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
--
module BDCS.RPM.SourcesSpec(spec)
where
import Control.Exception(evaluate)
import Test.Hspec
import BDCS.DB(Projects(..))
import BDCS.Exceptions(DBException(..))
import BDCS.RPM.Sources(mkSource)
import RPM.Tags(Tag(..))
import Utils(fakeKey)
spec :: Spec
spec = describe "BDCS.RPM.Sources Tests" $ do
it "No License raises" $
evaluate (mkSource [ Version "" ] fakeKey) `shouldThrow` (== MissingRPMTag "License")
it "No Version raises" $
evaluate (mkSource [ License "" ] fakeKey) `shouldThrow` (== MissingRPMTag "Version")
|
dashea/bdcs
|
importer/tests/BDCS/RPM/SourcesSpec.hs
|
lgpl-2.1
| 1,265 | 0 | 14 | 214 | 200 | 120 | 80 | 14 | 1 |
-- {-# OPTIONS_GHC -fno-warn-unused-imports -fno-warn-unused-binds #-}
import LLVM.General.AST
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.IntegerPredicate as IP
import LLVM.General.AST.Type (ptr)
import LLVM.General.Module
import LLVM.General.Context
import Data.Traversable (for)
import Control.Monad.State
import Control.Monad.Except
import Debug.Trace (trace)
import Syntax
import Codegen
-------------------------------------------------------------------------------
malloc :: Int -> Codegen AST.Operand
malloc bytes = call (externf $ Name "malloc") [i32c $ bytes * uintSizeBytes]
codegenFunction :: SymName -> [AST.Type] -> Codegen a
-> [SymName] -> Expr -> LLVM ()
codegenFunction funcname argTys prologue args expr = do
defs <- gets moduleDefinitions
let nxtLambdaIx = length defs
cgst' = cgst nxtLambdaIx
defineFunc uint funcname fnargs (createBlocks cgst')
-- lift up all lambdas
sequence_ (extraFuncs cgst')
where
fnargs = zip argTys $ map Name args
cgst nxtLambdaIx = execCodegen funcname nxtLambdaIx $ do
blk <- addBlock entryBlockName
setBlock blk
-- alloca all arguments
for args $ \a -> do
var <- alloca uint
store var (local (Name a))
assign a var
prologue
val <- codegenExpr expr
ret val
codegenTop :: Expr -> LLVM()
codegenTop = codegenFunction "entryFunc" [] (return ()) []
liftError :: Show b => ExceptT b IO a -> IO a
liftError = runExceptT >=> either (fail . show) return
codegen :: Expr -> IO String
codegen expr = withContext $ \ctx ->
liftError $ withModuleFromLLVMAssembly ctx (File "prelude.ll") $ \prelude ->
liftError $ withModuleFromAST ctx ast $ \m -> do
liftError $ linkModules False m prelude
moduleLLVMAssembly m
where mallocDef = external (ptr i8) "malloc" [(i32, UnName 0)]
ast = runLLVM (emptyModule "entryModule") $ mallocDef >> codegenTop expr
-------------------------------------------------------------------------------
loadEnv :: Type -> [String] -> Codegen [()]
loadEnv envType freeVars = do
let envInt = AST.LocalReference uint $ AST.Name envVarName
envPtr <- inttoptr envInt $ ptr envType
for (zip [0..] freeVars) $ \(ix, var) -> do
varPos <- getelementptr envPtr ix
varPosPtr <- inttoptr varPos $ ptr uint
varInt <- load varPosPtr
varPtr <- inttoptr varInt $ ptr uint
assign var varPtr
genEnv :: Type -> [String] -> Codegen Operand
genEnv envType freeVars = do
envRaw <- malloc $ length freeVars
envInt <- ptrtoint envRaw uint
envPtr <- inttoptr envInt $ ptr envType
for (zip [0..] freeVars) $ \(ix, var) -> do
fvPtr <- getvar var
fvVal <- load fvPtr
varRaw <- malloc 1
varInt <- ptrtoint varRaw uint
varPtr <- inttoptr varInt $ ptr uint
store varPtr fvVal
varPos <- getelementptr envPtr ix
store varPos varInt
modify $ \s -> s { envBindings = (fvPtr, varPtr) : envBindings s }
return envInt
codegenExpr :: Expr -> Codegen AST.Operand
codegenExpr (Num n) = return . constUint $ n
codegenExpr (Var sym) = getvar sym >>= load
codegenExpr (BinOp op e1 e2) = do
val1 <- codegenExpr e1
val2 <- codegenExpr e2
genBinOp op val1 val2
codegenExpr (App func arg) = do
closInt <- codegenExpr func
closPtr <- inttoptr closInt $ ptr closType
first <- getelementptr closPtr 0
envInt <- load first
second <- getelementptr closPtr 1
funcInt <- load second
funcPtr <- inttoptr funcInt $ ptr funcType
operand <- codegenExpr arg
call funcPtr [envInt, operand]
codegenExpr (Ifz cond tr fl) = do
ifthen <- addBlock "if.then"
ifelse <- addBlock "if.else"
ifexit <- addBlock "if.exit"
condVal <- codegenExpr cond
test <- icmp IP.EQ (constUint 0) condVal
cbr test ifthen ifelse -- Branch based on the condition
trval <- branch ifthen tr ifexit
flval <- branch ifelse fl ifexit
setBlock ifexit
phi uint [(trval, ifthen), (flval, ifelse)]
where
branch block expr ifexit = do
setBlock block
val <- codegenExpr expr
br ifexit
return val
codegenExpr (Let sym e1 e2) = do
-- symPtr <- alloca uint
symRaw <- malloc 1
symPtr <- bitcast symRaw $ ptr uint
assign sym symPtr
val <- codegenExpr e1
store symPtr val
-- refill
bindings <- gets envBindings
sequence_ [store valPtr val | (fvPtr, valPtr) <- bindings, fvPtr == symPtr]
codegenExpr e2
codegenExpr e@(Lam sym expr) = do
cgst <- get
let lambdaName = "lambda" ++ show (lambdaCnt cgst)
freeVars = findFreeVars e -- \\ letVars cgst
envType = structType $ replicate (length freeVars) uint
let createFunc =
codegenFunction lambdaName lambdaSig (loadEnv envType freeVars)
[envVarName, sym] expr
put $ cgst { extraFuncs = createFunc : extraFuncs cgst
, lambdaCnt = lambdaCnt cgst + 1 }
closRaw <- malloc 2
closInt <- ptrtoint closRaw uint
closPtr <- inttoptr closInt $ ptr closType
envInt <- genEnv envType freeVars
first <- getelementptr closPtr 0
store first envInt
second <- getelementptr closPtr 1
funcInt <- ptrtoint (funcOpr uint (AST.Name lambdaName) lambdaSig) uint
store second funcInt
return closInt
-------------------------------------------------------------------------------
prog0 :: Expr
prog0 = Num 42
-- 42
prog1 :: Expr
prog1 = App (App (App (Lam "x" (Lam "y" (Lam "z" (Var "y")))) (Num 1)) (Num 2)) (Num 3)
-- 2
prog2 :: Expr
prog2 = Let "x" (BinOp "+" (Num 1) (Num 1))
(BinOp "+" (Var "x") (Num 1))
-- 3
prog3 :: Expr
prog3 = Ifz (Num 1) (Num 0) (Num 1)
-- 1
prog :: Expr
prog = Let "fact" (Lam "x"
(Ifz (Var "x") (Num 1)
(BinOp "*" (Var "x")
(App (Var "fact") (BinOp "-" (Var "x") (Num 1))))))
(App (Var "fact") (Num 5))
-- 120
main :: IO ()
main = putStrLn =<< codegen prog
|
scturtle/turtlelang
|
Main.hs
|
unlicense
| 5,889 | 0 | 18 | 1,351 | 2,176 | 1,040 | 1,136 | 150 | 1 |
{-# LANGUAGE BangPatterns #-}
module Ylang.Primitive
(
addBin,
andBin,
orBin,
xorBin,
notUnary,
adds,
ands,
ors,
xors,
nots
) where
import Ylang.Display
import Ylang.Value
type BinOp a = a -> a -> Either String a
type Variadic a = [a] -> Either String a
variadic :: BinOp Val -> Variadic Val
variadic _ [x] = Right x
variadic f (x1:x2:[]) = f x1 x2
variadic f (x1:x2:xs) = case (f x1 x2) of
Right x -> variadic f (x:xs)
Left e -> Left e
variadicHalt :: BinOp Val -> Val -> Variadic Val
variadicHalt _ _ [x] = Right x
variadicHalt f _ (x1:x2:[]) = f x1 x2
variadicHalt f t (x1:x2:xs) = case (f x1 x2) of
Right x
| x == t -> Right t
| otherwise -> variadic f (x:xs)
Left e -> Left e
undefinedFound :: Either String Val
undefinedFound = Left "Undefined"
unknownImplError :: String -> Val -> Either String Val
unknownImplError fn x = Left $
"Undefined Implement " ++ fn ++ " for " ++ toString (getType x) ++ " type"
typeNotMatch :: Either String Val
typeNotMatch = Left "Type Not Match"
-- |
-- (+ <ylang-value> <ylang-value>)
addBin :: BinOp Val
addBin x y = case (x, y) of
-- Numbers
(ValIntn i, ValIntn j) -> Right $ ValIntn (i + j)
(ValFlon i, ValFlon j) -> Right $ ValFlon (i + j)
(ValRatn i, ValRatn j) -> Right $ ValRatn (i + j)
(ValIntn i, ValFlon j) -> Right $ ValRatn (fromInteger i + toRational j)
(ValIntn i, ValRatn j) -> Right $ ValRatn (fromInteger i + j)
(ValFlon i, ValRatn j) -> Right $ ValRatn (toRational i + j)
(ValFlon _, ValIntn _) -> addBin y x
(ValRatn _, ValIntn _) -> addBin y x
(ValRatn _, ValFlon _) -> addBin y x
(ValChr a, ValChr b) -> Right $ ValStr (a:b:[])
(ValChr a, ValStr b) -> Right $ ValStr (a:b)
(ValStr a, ValChr b) -> Right $ ValStr (a ++ [b])
(ValStr a, ValStr b) -> Right $ ValStr (a ++ b)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(+)" x
| otherwise -> typeNotMatch
adds :: Variadic Val
adds = variadic addBin
andBin :: BinOp Val
andBin x y = case (x, y) of
(ValBool i, ValBool j) -> Right $ ValBool (i && j)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(&)" x
| otherwise -> typeNotMatch
ands :: Variadic Val
ands = variadicHalt andBin $ ValBool False
orBin :: BinOp Val
orBin x y = case (x, y) of
(ValBool i, ValBool j) -> Right $ ValBool (i || j)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(|)" x
| otherwise -> typeNotMatch
ors :: Variadic Val
ors = variadicHalt orBin $ ValBool True
xorBin :: BinOp Val
xorBin x y = case (x, y) of
(ValBool True, ValBool False) -> Right $ ValBool True
(ValBool False, ValBool True) -> Right $ ValBool True
(ValBool _, ValBool _) -> Right $ ValBool False
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(^)" x
| otherwise -> typeNotMatch
xors :: Variadic Val
xors = variadic xorBin
notUnary :: Val -> Either String Val
notUnary (ValBool i) = Right $ ValBool (not i)
notUnary ValBotm = undefinedFound
notUnary _ = typeNotMatch
nots :: Variadic Val
nots (e:[]) = notUnary e
nots (_:_) = Left "Too Parameter"
|
VoQn/ylang
|
Ylang/Primitive.hs
|
apache-2.0
| 3,350 | 0 | 12 | 784 | 1,572 | 790 | 782 | 99 | 16 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE BangPatterns #-}
module Lib.LiftedIO
(print, putStrLn, newIORef, modifyIORef', readIORef,
writeIORef, IOREF.IORef)
where
------------------------------------------------------------------------------------
import qualified Prelude as P
import Control.Monad.State (MonadIO)
import Control.Monad.IO.Class (liftIO)
import Prelude ((.), Show, ($))
import qualified Data.IORef as IOREF
------------------------------------------------------------------------------------
print :: (Show a, MonadIO m) => a -> m ()
print = liftIO . P.print
putStrLn :: MonadIO m => P.String -> m ()
putStrLn = liftIO . P.putStrLn
newIORef :: MonadIO m => a -> m (IOREF.IORef a)
newIORef = liftIO. IOREF.newIORef
modifyIORef' :: MonadIO m => IOREF.IORef a -> (a -> a) -> m ()
modifyIORef' x y = liftIO $ IOREF.modifyIORef' x y
readIORef :: MonadIO m => IOREF.IORef a -> m a
readIORef = liftIO . IOREF.readIORef
writeIORef :: MonadIO m => IOREF.IORef a -> a -> m ()
writeIORef x !y = liftIO $ IOREF.writeIORef x y
|
kernelim/gitomail
|
src/Lib/LiftedIO.hs
|
apache-2.0
| 1,159 | 0 | 10 | 228 | 348 | 190 | 158 | 23 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Risk where
import Control.Monad.Random
import Control.Monad
import Data.List (sort, foldr)
import Data.Ratio
------------------------------------------------------------
-- Die values
newtype DieValue = DV { unDV :: Int }
deriving (Eq, Ord, Show, Num)
first :: (a -> b) -> (a, c) -> (b, c)
first f (a, c) = (f a, c)
instance Random DieValue where
random = first DV . randomR (1,6)
randomR (low,hi) = first DV . randomR (max 1 (unDV low), min 6 (unDV hi))
die :: Rand StdGen DieValue
die = getRandom
------------------------------------------------------------
-- Risk
type Army = Int
data Battlefield = Battlefield { attackers :: Army, defenders :: Army }
deriving Show
-- Exercise #2:
battle :: Battlefield -> Rand StdGen Battlefield
battle bf = do
atkDice <- roll attackForce
defDice <- roll defenseForce
let lineUp = zip (sort atkDice) (sort defDice)
winners = map (uncurry (>)) lineUp
defenseLost = length $ filter id winners
offenseLost = length $ filter not winners
return (Battlefield (atk - offenseLost) (def - defenseLost))
where
atk = attackers bf
def = defenders bf
attackForce = if atk > 3 then 3 else min (atk - 1) 3
defenseForce = if def > 2 then 2 else def
roll n = replicateM n die
-- Exercise #3:
invade :: Battlefield -> Rand StdGen Battlefield
invade bf = do
newBf <- battle bf
if attackers newBf > 1 && defenders newBf > 0
then invade newBf
else return newBf
-- Exercise #4:
successProb :: Battlefield -> Rand StdGen Double
successProb bf = do
battlefields <- replicateM 1000 (invade bf)
let wins = foldr (\b acc -> acc + if (0 == defenders b) then 1 else 0) 0 battlefields
battles = length battlefields
return $ (wins / fromIntegral battles)
|
parsonsmatt/cis194
|
hw12/Risk.hs
|
apache-2.0
| 1,882 | 0 | 17 | 458 | 655 | 346 | 309 | 44 | 3 |
{-
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE TupleSections #-}
module Camfort.Specification.Stencils.CheckBackend
(
-- * Classes
SynToAst(..)
-- * Errors
, SynToAstError
, regionNotInScope
-- * Helpers
, checkOffsetsAgainstSpec
) where
import Algebra.Lattice (joins1)
import Control.Arrow (second)
import Data.Function (on)
import Data.Int (Int64)
import Data.List (sort)
import qualified Data.Set as S
import qualified Camfort.Helpers.Vec as V
import qualified Camfort.Specification.Stencils.Consistency as C
import Camfort.Specification.Stencils.Model
import qualified Camfort.Specification.Stencils.Parser.Types as SYN
import Camfort.Specification.Stencils.Syntax
data SynToAstError = RegionNotInScope String
deriving (Eq)
regionNotInScope :: String -> SynToAstError
regionNotInScope = RegionNotInScope
instance Show SynToAstError where
show (RegionNotInScope r) = "Error: region " ++ r ++ " is not in scope."
-- Class for functions converting from Parser parse
-- syntax to the AST representation of the Syntax module
class SynToAst s t | s -> t where
synToAst :: (?renv :: RegionEnv) => s -> Either SynToAstError t
-- Top-level conversion of declarations
instance SynToAst SYN.Specification (Either RegionDecl SpecDecl) where
synToAst (SYN.SpecDec spec vars) = do
spec' <- synToAst spec
return $ Right (vars, spec')
synToAst (SYN.RegionDec rvar region) = do
spec' <- synToAst region
return $ Left (rvar, spec')
-- Convert temporal or spatial specifications
instance SynToAst SYN.SpecInner Specification where
synToAst (SYN.SpecInner spec isStencil) = do
spec' <- synToAst spec
return $ Specification spec' isStencil
instance SynToAst (Multiplicity (Approximation SYN.Region)) (Multiplicity (Approximation Spatial)) where
synToAst (Once a) = fmap Once . synToAst $ a
synToAst (Mult a) = fmap Mult . synToAst $ a
instance SynToAst (Approximation SYN.Region) (Approximation Spatial) where
synToAst (Exact s) = fmap (Exact . Spatial) . synToAst $ s
synToAst (Bound s1 s2) = (Bound `on` (fmap Spatial)) <$> synToAst s1 <*> synToAst s2
instance SynToAst (Maybe SYN.Region) (Maybe RegionSum) where
synToAst Nothing = pure Nothing
synToAst (Just r) = fmap Just . synToAst $ r
-- Convert region definitions into the DNF-form used internally
instance SynToAst SYN.Region RegionSum where
synToAst = dnf
-- Convert a grammar syntax to Disjunctive Normal Form AST
dnf :: (?renv :: RegionEnv) => SYN.Region -> Either SynToAstError RegionSum
dnf (SYN.RegionConst rconst) = pure . Sum $ [Product [rconst]]
-- Distributive law
dnf (SYN.And r1 r2) = do
r1' <- dnf r1
r2' <- dnf r2
return $ Sum $ unSum r1' >>= (\(Product ps1) ->
unSum r2' >>= (\(Product ps2) ->
return $ Product $ ps1 ++ ps2))
-- Coalesce sums
dnf (SYN.Or r1 r2) = do
r1' <- dnf r1
r2' <- dnf r2
return $ Sum $ unSum r1' ++ unSum r2'
-- Region conversion
dnf (SYN.Var v) =
case lookup v ?renv of
Nothing -> Left (RegionNotInScope v)
Just rs -> return rs
-- *** Other Helpers
checkOffsetsAgainstSpec :: [(Variable, Multiplicity [[Int]])]
-> [(Variable, Specification)]
-> Bool
checkOffsetsAgainstSpec offsetMaps specMaps =
variablesConsistent && all specConsistent specToVecList
where
variablesConsistent =
let vs1 = sort . fmap fst $ offsetMaps
vs2 = sort . fmap fst $ specMaps
in vs1 == vs2
specConsistent spec =
case spec of
(spec', Once (V.VL vs)) -> spec' `C.consistent` (Once . toUNF) vs == C.Consistent
(spec', Mult (V.VL vs)) -> spec' `C.consistent` (Mult . toUNF) vs == C.Consistent
toUNF :: [ V.Vec n Int64 ] -> UnionNF n Offsets
toUNF = joins1 . map (return . fmap intToSubscript)
-- This function generates the special offsets subspace, subscript,
-- that either had one element or is the whole set.
intToSubscript :: Int64 -> Offsets
intToSubscript i
| fromIntegral i == absoluteRep = SetOfIntegers
| otherwise = Offsets . S.singleton $ i
-- Convert list of list of indices into vectors and wrap them around
-- existential so that we don't have to prove they are all of the same
-- size.
specToVecList :: [ (Specification, Multiplicity (V.VecList Int64)) ]
specToVecList = map (second (fmap V.fromLists)) specToIxs
specToIxs :: [ (Specification, Multiplicity [ [ Int64 ] ]) ]
specToIxs = pairWithFst specMaps (map (second toInt64) offsetMaps)
toInt64 :: Multiplicity [ [ Int ] ] -> Multiplicity [ [ Int64 ] ]
toInt64 = fmap (map (map fromIntegral))
-- Given two maps for each key in the first map generate a set of
-- tuples matching the (val,val') where val and val' are corresponding
-- values from each set.
pairWithFst :: Eq a => [ (a, b) ] -> [ (a, c) ] -> [ (b, c) ]
pairWithFst [] _ = []
pairWithFst ((key, val):xs) ys =
map ((val,) . snd) (filter ((key ==) . fst) ys) ++ pairWithFst xs ys
-- Local variables:
-- mode: haskell
-- haskell-program-name: "cabal repl"
-- End:
|
dorchard/camfort
|
src/Camfort/Specification/Stencils/CheckBackend.hs
|
apache-2.0
| 5,966 | 0 | 15 | 1,379 | 1,534 | 818 | 716 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
module BasicSpec where
import LoadTestCallbacks()
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck.Monadic
import Test.QuickCheck.Property (rejected)
import Bio.Motions.Types
import Bio.Motions.Common
import Bio.Motions.Representation.Class
import Bio.Motions.Representation.Chain.Internal
import Bio.Motions.Representation.Chain.Slow
import Bio.Motions.Callback.Class
import Bio.Motions.Callback.StandardScore
import Bio.Motions.Callback.GyrationRadius
import Bio.Motions.Callback.Parser.TH
import Bio.Motions.Representation.Dump
import Bio.Motions.Utils.Random
import Control.Monad
import Control.Monad.Trans
import Control.Lens
import Data.Maybe
import Data.MonoTraversable
import Data.Proxy
import Linear
shouldAlmostBe :: (Fractional a, Ord a, Show a) => a -> a -> Expectation
x `shouldAlmostBe` y = abs (x - y) `shouldSatisfy` (< 1e-7)
instance MonadRandom m => MonadRandom (PropertyM m) where
type Random (PropertyM m) = Random m
getRandom = lift getRandom
getRandomR = lift . getRandomR
testRepr :: _ => proxy repr -> Int -> Spec
testRepr (_ :: _ repr) maxMoveQd = before (loadDump dump freezePredicate :: IO repr) $ do
context "when redumping" $
beforeWith makeDump testRedump
context "when inspecting the data"
testInspect
context "when computing callbacks"
testCallbacks
context "when generating a move"
testGenerateMove
beforeWith (performMove beadMove) $
context "after making a bead move" $ do
testAfterBeadMove
beforeWith (performMove binderMove) $
context "after making a binder move"
testAfterBinderMove
where
beads = sum . map length $ dumpIndexedChains dump
binders = length $ dumpBinders dump
freezePredicate b = b ^. beadChain == 0
dump = Dump
{ dumpBinders =
[ BinderInfo (V3 0 1 2) bi0
, BinderInfo (V3 0 1 3) bi0
, BinderInfo (V3 5 5 5) bi1
]
, dumpChains =
[ [ DumpBeadInfo (V3 0 1 1) ev0
, DumpBeadInfo (V3 5 6 6) ev1
, DumpBeadInfo (V3 5 5 6) ev0
]
, [ DumpBeadInfo (V3 0 0 2) ev0
, DumpBeadInfo (V3 5 4 5) ev1
]
, [ DumpBeadInfo (V3 7 7 7) ev0
, DumpBeadInfo (V3 7 8 8) ev0
]
]
}
[bi0, bi1] = map BinderType [0, 1]
(ev0, ev1) = ([1, 0], [0, 1000])
complexFunctionResult = 45117.35291086203
beadMove = Move (V3 5 6 6) (V3 0 0 (-1))
binderMove = Move (V3 0 1 2) (V3 1 0 0)
updatedChain = [ BeadInfo (V3 0 1 1) ev0 0 0 0
, BeadInfo (V3 5 6 5) ev1 1 0 1
, BeadInfo (V3 5 5 6) ev0 2 0 2
]
updatedChains = updatedChain : tail (dumpIndexedChains dump)
updatedBinders = [ BinderInfo (V3 1 1 2) bi0
, BinderInfo (V3 0 1 3) bi0
, BinderInfo (V3 5 5 5) bi1
]
testRedump :: SpecWith Dump
testRedump = do
it "yields the same chains" $ \dump' ->
dumpChains dump' `shouldBe` dumpChains dump
it "yields the same binders" $ \dump' ->
dumpBinders dump' `shouldMatchList` dumpBinders dump
testInspect :: SpecWith repr
testInspect = do
it "yields the same number of chains" $
getNumberOfChains >=> (`shouldBe` length (dumpChains dump))
it "yields the same binders" $ \repr -> do
binders <- getBinders repr (pure . otoList)
binders `shouldBe` dumpBinders dump
it "yields the same beads" $ \repr -> do
beads <- forM [0..length (dumpChains dump) - 1] $
\idx -> getChain repr idx (pure . otoList)
beads `shouldBe` dumpIndexedChains dump
context "when using getAtomAt" $ do
it "returns binders" $ \repr ->
forM_ (dumpBinders dump) $ \binder -> do
atom <- getAtomAt (binder ^. position) repr
atom `shouldBe` Just (asAtom binder)
it "returns beads" $ \repr ->
forM_ (concat $ dumpIndexedChains dump) $ \bead -> do
atom <- getAtomAt (bead ^. position) repr
atom `shouldBe` Just (asAtom bead)
it "returns Nothing" $ \repr -> do
atom <- getAtomAt (V3 0 0 0) repr
atom `shouldBe` Nothing
testCallbacks :: SpecWith repr
testCallbacks = do
it "has the correct score" $ \repr -> do
score :: StandardScore <- runCallback repr
score `shouldBe` 1002
it "has the correct score after a bead move" $ \repr -> do
score :: StandardScore <- updateCallback repr 1002 beadMove
score `shouldBe` 2002
it "has the correct score after a binder move" $ \repr -> do
score :: StandardScore <- updateCallback repr 1002 binderMove
score `shouldBe` 1000
it "has the correct gyration radii" $ \repr -> do
GyrationRadius [c1, c2, c3] <- runCallback repr
c1 `shouldAlmostBe` 5.92809748
c2 `shouldAlmostBe` 7.07106781
c3 `shouldAlmostBe` 1.41421356
it "has the same gyration radii afer a binder move" $ \repr -> do
oldRadii :: GyrationRadius <- runCallback repr
newRadii :: GyrationRadius <- updateCallback repr oldRadii $ Move (V3 0 1 2) (V3 1 0 0)
oldRadii `shouldBe` newRadii
it "has the correct gyradion radii afer a bead move" $ \repr -> do
oldRadii :: GyrationRadius <- runCallback repr
GyrationRadius [c1, c2, c3] <- updateCallback repr oldRadii $ Move (V3 0 1 1) (V3 0 0 (-1))
c1 `shouldAlmostBe` 6.34952763
c2 `shouldAlmostBe` 7.07106781
c3 `shouldAlmostBe` 1.41421356
context "when computing the template haskell callbacks" $ do
it "has the correct sum42-beads" $ \repr -> do
res :: THCallback "sum42-beads" <- runCallback repr
res `shouldBe` THCallback (42 * beads)
it "has the correct prod2-all" $ \repr -> do
res :: THCallback "prod2-all" <- runCallback repr
res `shouldBe` THCallback (2 ^ (beads + binders))
it "has the correct list42-binders" $ \repr -> do
res :: THCallback "list42-binders" <- runCallback repr
res `shouldBe` THCallback (replicate binders 42)
it "has the correct prod-binders-beads" $ \repr -> do
res :: THCallback "prod-binders-beads" <- runCallback repr
res `shouldBe` THCallback (binders * beads)
it "has the correct list-11" $ \repr -> do
res :: THCallback "list-11" <- runCallback repr
res `shouldBe` THCallback [sqrt 2, 1]
it "has the correct sum-11" $ \repr -> do
res :: THCallback "sum-11" <- runCallback repr
res `shouldBe` THCallback (1 + sqrt 2)
it "has the correct pairs-dist<2" $ \repr -> do
res :: THCallback "pairs-dist<2" <- runCallback repr
res `shouldBe` THCallback 22
it "has the correct complex-function" $ \repr -> do
res :: THCallback "complex-function" <- runCallback repr
res `shouldBe` complexFunctionResult
it "has the correct count-lamins" $ \repr -> do
res :: THCallback "count-lamins" <- runCallback repr
res `shouldBe` THCallback 2
it "has the correct score" $ \repr -> do
res :: THCallback "score" <- runCallback repr
res `shouldBe` THCallback 1002
testAfterBeadMove :: SpecWith repr
testAfterBeadMove = do
it "reports the old location to be empty" $ \repr -> do
matom <- getAtomAt (V3 5 6 6) repr
matom `shouldBe` Nothing
it "reports the new location to contain the bead" $ \repr -> do
matom <- getAtomAt (V3 5 6 5) repr
matom `shouldBe` Just (asAtom $ BeadInfo (V3 5 6 5) ev1 1 0 1)
it "reports the updated chain" $ \repr -> do
chain <- getChain repr 0 $ pure . otoList
chain `shouldBe` updatedChain
it "reports the binders to be unchanged" $ \repr -> do
binders <- getBinders repr $ pure . otoList
binders `shouldMatchList` dumpBinders dump
context "when dumping" $ beforeWith makeDump $ do
it "reports the updated chain" $ \dump' ->
dumpIndexedChains dump' `shouldBe` updatedChains
it "reports the binders to be unchanged" $ \dump' ->
dumpBinders dump' `shouldMatchList` dumpBinders dump
context "when updating callbacks" $ do
it "has the correct sum-11" $ \repr -> do
res :: THCallback "sum-11" <- updateCallback repr (THCallback (1 + sqrt 2)) beadMove
corrRes <- runCallback repr
res `shouldAlmostBe` corrRes
it "has the correct pairs-dist<2" $ \repr -> do
res :: THCallback "pairs-dist<2" <- updateCallback repr (THCallback 22) beadMove
corrRes <- runCallback repr
res `shouldBe` corrRes
it "has the correct complex-function" $ \repr -> do
res <- updateCallback repr complexFunctionResult beadMove
corrRes <- runCallback repr
res `shouldAlmostBe` corrRes
it "has the correct count-lamins" $ \repr -> do
res :: THCallback "count-lamins" <- updateCallback repr (THCallback 2) beadMove
res `shouldBe` THCallback 2
it "has the correct score" $ \repr -> do
res :: THCallback "score" <- updateCallback repr (THCallback 1002) beadMove
res `shouldBe` THCallback 2002
context "when generating a move"
testGenerateMove
testAfterBinderMove :: SpecWith repr
testAfterBinderMove = do
it "reports the old location to be empty" $ \repr -> do
matom <- getAtomAt (V3 0 1 2) repr
matom `shouldBe` Nothing
it "reports the new location to contain the binder" $ \repr -> do
matom <- getAtomAt (V3 1 1 2) repr
matom `shouldBe` Just (asAtom $ BinderInfo (V3 1 1 2) bi0)
it "reports the updated binders" $ \repr -> do
binders <- getBinders repr $ pure . otoList
binders `shouldMatchList` updatedBinders
context "when dumping" $ beforeWith makeDump $ do
it "reports the beads to be unchanged" $ \dump' ->
dumpIndexedChains dump' `shouldBe` updatedChains
it "reports the updated binders" $ \dump' ->
dumpBinders dump' `shouldMatchList` updatedBinders
context "when generating a move"
testGenerateMove
testGenerateMove :: SpecWith repr
testGenerateMove = modifyMaxSuccess (const 1000) $ do
it "moves an existing atoms" $ \repr -> monadicIO $ do
MoveFromTo from _ <- genMove repr
atom <- getAtomAt from repr
assert $ isJust atom
it "moves an atom into an unoccupied position" $ \repr -> monadicIO $ do
MoveFromTo _ to <- genMove repr
atom <- getAtomAt to repr
assert $ isNothing atom
it "performs only moves with the correct length" $ \repr -> monadicIO $ do
Move _ diff <- genMove repr
assert $ quadrance diff `elem` ([1..maxMoveQd] :: [_])
context "when generating many moves" $
beforeWith prepareMoves $ do
it "fails reasonably rarely" $ \(_, moves) ->
length moves `shouldSatisfy` (> 100)
beforeWith getAtoms $ do
it "moves binders sufficiently often" $ \atoms ->
length [x | Just (Binder x) <- atoms] `shouldSatisfy` (> 50)
it "moves beads sufficiently often" $ \atoms ->
length [x | Just (Bead x) <- atoms] `shouldSatisfy` (> 50)
it "does not move any lamins or frozen beads" $ \repr -> monadicIO $ do
MoveFromTo from _ <- genMove repr
Just atom <- getAtomAt from repr
case atom ^. located of
BinderSig binder -> assert $ binder ^. binderType /= laminType
BeadSig bead -> assert . not . freezePredicate $ bead ^. beadSignature
where
prepareMoves repr = (repr,) . catMaybes <$> replicateM 1000 (runWithRandom $ generateMove repr)
getAtoms (repr, moves) = forM moves $ flip getAtomAt repr . moveFrom
genMove repr = lift (runWithRandom $ generateMove repr) >>= maybe (stop rejected) pure
spec :: Spec
spec = do
context "the pure chain representation" $
testRepr (Proxy :: Proxy PureChainRepresentation) 2
context "the IO chain representation" $
testRepr (Proxy :: Proxy IOChainRepresentation) 2
context "the Slow chain representation" $
testRepr (Proxy :: Proxy (SlowChainRepresentation 4 4)) 4
|
Motions/motions
|
test/BasicSpec.hs
|
apache-2.0
| 13,670 | 0 | 25 | 4,485 | 3,798 | 1,870 | 1,928 | 267 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
module Web.Twitter.Conduit.Request (
HasParam,
APIRequest (..),
) where
import Data.Aeson
import GHC.TypeLits (Symbol)
import Network.HTTP.Client.MultipartFormData
import qualified Network.HTTP.Types as HT
import Web.Twitter.Conduit.Request.Internal
-- $setup
-- >>> :set -XOverloadedStrings -XDataKinds -XTypeOperators
-- >>> import Control.Lens
-- >>> import Web.Twitter.Conduit.Parameters
-- >>> type SampleId = Integer
-- >>> type SampleApi = '["count" ':= Integer, "max_id" ':= Integer]
-- >>> let sampleApiRequest :: APIRequest SampleApi [SampleId]; sampleApiRequest = APIRequest "GET" "https://api.twitter.com/sample/api.json" []
-- | API request. You should use specific builder functions instead of building this directly.
--
-- For example, if there were a @SampleApi@ type and a builder function which named @sampleApiRequest@.
--
-- @
-- type SampleId = 'Integer'
-- sampleApiRequest :: 'APIRequest' SampleApi [SampleId]
-- sampleApiRequest = 'APIRequest' \"GET\" \"https:\/\/api.twitter.com\/sample\/api.json\" []
-- type SampleApi = '[ "count" ':= Integer
-- , "max_id" ':= Integer
-- ]
--
-- @
--
-- We can obtain request params from @'APIRequest' SampleApi [SampleId]@ :
--
-- >>> sampleApiRequest ^. params
-- []
--
-- The second type parameter of the APIRequest represents the allowed parameters for the APIRequest.
-- For example, @sampleApiRequest@ has 2 @Integer@ parameters, that is "count" and "max_id".
-- You can update those parameters by label lenses (@#count@ and @#max_id@ respectively)
--
-- >>> (sampleApiRequest & #count ?~ 100 & #max_id ?~ 1234567890) ^. params
-- [("max_id",PVInteger {unPVInteger = 1234567890}),("count",PVInteger {unPVInteger = 100})]
-- >>> (sampleApiRequest & #count ?~ 100 & #max_id ?~ 1234567890 & #count .~ Nothing) ^. params
-- [("max_id",PVInteger {unPVInteger = 1234567890})]
data APIRequest (supports :: [Param Symbol *]) responseType
= APIRequest
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
}
| APIRequestMultipart
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _part :: [Part]
}
| APIRequestJSON
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _body :: Value
}
instance Parameters (APIRequest supports responseType) where
type SupportParameters (APIRequest supports responseType) = supports
params f (APIRequest m u pa) = APIRequest m u <$> f pa
params f (APIRequestMultipart m u pa prt) =
(\p -> APIRequestMultipart m u p prt) <$> f pa
params f (APIRequestJSON m u pa body) = (\p -> APIRequestJSON m u p body) <$> f pa
instance Show (APIRequest apiName responseType) where
show (APIRequest m u p) = "APIRequest " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestMultipart m u p _) = "APIRequestMultipart " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestJSON m u p _) = "APIRequestJSON " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
|
himura/twitter-conduit
|
src/Web/Twitter/Conduit/Request.hs
|
bsd-2-clause
| 3,255 | 0 | 11 | 675 | 544 | 313 | 231 | 37 | 0 |
{-# LANGUAGE FlexibleInstances #-}
module Util.UnixDiff where
import Data.Algorithm.Diff
import qualified Data.Algorithm.DiffOutput as O
import Language.Clojure.AST
data GroupDiffAction = OMod LineRange LineRange
| OIns LineRange Int
| ODel LineRange Int
deriving (Show)
data DiffAction = Copy (Int, Int)
| Ins Int
| Del Int
deriving Eq
preprocess :: String -> String -> [DiffAction]
preprocess s1 s2 = map processDiff (diff s1 s2)
preprocessGrouped :: String -> String -> [GroupDiffAction]
preprocessGrouped s1 s2 = map processGroupedDiff (groupedDiff s1 s2)
diff :: String -> String -> [Diff (String, Int)]
diff s1 s2 = getDiffBy eqIgnoringLines (withLineN s1) (withLineN s2)
groupedDiff :: String -> String -> [O.DiffOperation O.LineRange]
groupedDiff f1 f2 = O.diffToLineRanges $ getGroupedDiff (lines f1) (lines f2)
withLineN :: String -> [(String, Int)]
withLineN s = zip (lines s) [1..]
eqIgnoringLines s1 s2 = fst s1 == fst s2
ppPDiff :: [DiffAction] -> String
ppPDiff = foldl (\d a -> d ++ "\n" ++ show a) ""
processDiff :: Diff (String, Int) -> DiffAction
processDiff (Both (_, i1) (_, i2)) = (Copy (i1, i2))
processDiff (First (_, i)) = (Del i)
processDiff (Second (_, i)) = (Ins i)
processGroupedDiff :: O.DiffOperation O.LineRange -> GroupDiffAction
processGroupedDiff (O.Change srcR dstR) = OMod (extractLineRange srcR) (extractLineRange dstR)
processGroupedDiff (O.Addition lr line) = OIns (extractLineRange lr) line
processGroupedDiff (O.Deletion lr line) = ODel (extractLineRange lr) line
extractLineRange :: O.LineRange -> LineRange
extractLineRange lr = Range start end
where
(start, end) = O.lrNumbers lr
instance Show DiffAction where
show (Copy (i1, i2)) = show i1 ++ " % " ++ show i2
show (Ins i) = show i ++ "+"
show (Del i) = show i ++ "-"
|
nazrhom/vcs-clojure
|
src/Util/UnixDiff.hs
|
bsd-3-clause
| 1,872 | 0 | 9 | 379 | 725 | 382 | 343 | 41 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module MVC.EventHandler where
import Control.Lens
import Control.Monad
import Control.Monad.Reader (MonadReader (..))
import Control.Monad.State (MonadState (..))
import Control.Monad.Trans.Reader (ReaderT)
import qualified Control.Monad.Trans.Reader as R
import Control.Monad.Trans.State.Strict (State, StateT)
import qualified Control.Monad.Trans.State.Strict as S
import Pipes
import MVC.Event (EitherSomeEvent,Event,Msg(..),SomeEvent(..),toEitherSomeEvent)
-----------------------------------------------------------------------------
type ModelP a b s = Pipe a b (State s)
newtype EventHandlerP a b s r =
EventHandlerP (StateT (EventHandler a b s) (ModelP a b s) r)
deriving (Functor,Applicative,Monad,MonadState (EventHandler a b s))
newtype SomeEventHandlerP a b s r =
SomeEventHandlerP (StateT (SomeEventHandler a b s) (ModelP a b s) r)
deriving (Functor,Applicative,Monad,MonadState (SomeEventHandler a b s))
newtype HandleEvent v r =
HandleEvent (ReaderT (Int,AppStateAPI v) (StateT v (State (AppState v))) r)
deriving (Functor,Applicative,Monad,MonadReader (Int,AppStateAPI v))
type HandleEventResult a b v = HandleEvent v [Either a b]
class HandlesEvent v where
type AppState v :: *
type EventIn v :: *
type EventOut v :: *
data AppStateAPI v :: *
handleEvent :: v -> EventIn v -> HandleEventResult (EventIn v) (EventOut v) v
data SomeEventHandler :: * -> * -> * -> * where
SomeEventHandler :: (HandlesEvent v, AppState v ~ s, EventIn v ~ a, EventOut v ~ b) =>
{ _ehId :: Int
, _ehAPI :: AppStateAPI v
, _ehEventIn :: a' -> Maybe a
, _ehEventOut :: Either a b -> Either a' b'
, _ehEventHandler :: v
} -> SomeEventHandler a' b' s
ehId :: Lens' (SomeEventHandler a b v) Int
ehId f (SomeEventHandler i a ein eout s) = (\i' -> SomeEventHandler i' a ein eout s) <$> f i
newtype EventHandler a b s =
EventHandler { _eventHandlers :: [SomeEventHandler a b s] }
deriving (Monoid)
mkEventHandler :: SomeEventHandler a b s -> EventHandler a b s
mkEventHandler = EventHandler . (:[])
eventHandlers :: Lens' (EventHandler a b s) [SomeEventHandler a b s]
eventHandlers f (EventHandler h) = (\h' -> EventHandler h') <$> f h
initialiseEventHandler :: EventHandler a b s -> EventHandler a b s
initialiseEventHandler = over eventHandlers (zipWith (set ehId) [1..])
-----------------------------------------------------------------------------
runRecursiveEventHandler :: EventHandler a b s -> ModelP a b s ()
runRecursiveEventHandler = flip runEventHandlerP recursiveEventHandlerP
runEventHandlerP :: EventHandler a b s -> EventHandlerP a b s r -> ModelP a b s r
runEventHandlerP eventhandler (EventHandlerP eventHandlerP) = S.evalStateT eventHandlerP eventhandler
recursiveEventHandlerP :: EventHandlerP a b s ()
recursiveEventHandlerP = forever $ EventHandlerP (lift await) >>= go
where
go e = do
r <- forEventHandlers $ do
eventHandler <- getEventHandlerP
appState <- getAppStateP
let (r,appSvc',appState') = runEventHandler eventHandler appState e
putEventHandlerP appSvc'
putAppStateP appState'
return r
mapM_ (either go releaseP) r
runEventHandler :: SomeEventHandler a b s -> s -> a -> ([Either a b],SomeEventHandler a b s,s)
runEventHandler eventHandler@SomeEventHandler{..} appstate event =
maybe ignore process (_ehEventIn event)
where
ignore = ([],eventHandler,appstate)
process event' =
let
(HandleEvent handleEvent') = handleEvent _ehEventHandler event'
((events,eventHandler'),appstate') = S.runState (S.runStateT (R.runReaderT handleEvent' (_ehId,_ehAPI)) _ehEventHandler) appstate
in
(map _ehEventOut events,(SomeEventHandler _ehId _ehAPI _ehEventIn _ehEventOut eventHandler'),appstate')
-----------------------------------------------------------------------------
forEventHandlers :: (Monoid r) => SomeEventHandlerP a b s r -> EventHandlerP a b s r
forEventHandlers (SomeEventHandlerP handler) = EventHandlerP $ zoom (eventHandlers . traverse) handler
releaseP :: b -> EventHandlerP a b s ()
releaseP = EventHandlerP . lift . yield
getEventHandlerP :: SomeEventHandlerP a b s (SomeEventHandler a b s)
getEventHandlerP = SomeEventHandlerP S.get
putEventHandlerP :: SomeEventHandler a b s -> SomeEventHandlerP a b s ()
putEventHandlerP = SomeEventHandlerP . S.put
getAppStateP :: SomeEventHandlerP a b s s
getAppStateP = SomeEventHandlerP $ lift $ lift S.get
putAppStateP :: s -> SomeEventHandlerP a b s ()
putAppStateP = SomeEventHandlerP . lift . lift . S.put
-----------------------------------------------------------------------------
getEventHandlerId :: HandleEvent a Int
getEventHandlerId = HandleEvent (R.asks fst)
getAppStateAPI :: HandleEvent a (AppStateAPI a)
getAppStateAPI = HandleEvent (R.asks snd)
getEventHandler :: HandlesEvent a => HandleEvent a a
getEventHandler = HandleEvent $ lift $ S.get
putEventHandler :: HandlesEvent a => a -> HandleEvent a ()
putEventHandler = HandleEvent . lift . S.put
getAppState :: HandleEvent a (AppState a)
getAppState = HandleEvent $ lift $ lift $ S.get
putAppState :: AppState a -> HandleEvent a ()
putAppState = HandleEvent . lift . lift . S.put
getsAppState :: (AppState a -> r) -> HandleEvent a r
getsAppState f = liftM f getAppState
modifyAppState :: (AppState a -> AppState a) -> HandleEvent a ()
modifyAppState = HandleEvent . lift . lift . S.modify
modifyAppState' :: (AppState a -> (r,AppState a)) -> HandleEvent a r
modifyAppState' f = do
s <- getAppState
let (r,s') = f s
putAppState s'
return r
modifyAppState'' :: (AppState a -> Maybe (r,AppState a)) -> HandleEvent a (Maybe r)
modifyAppState'' f = do
s <- getAppState
maybe (return Nothing) (\(r,s') -> putAppState s' >> return (Just r)) (f s)
withAppState :: (AppState a -> HandleEvent a r) -> HandleEvent a r
withAppState f = getAppState >>= f
noEvents :: HandleEvent a [b]
noEvents = return []
propagate :: b -> Either b c
propagate = Left
propagate' :: Event b => b -> EitherSomeEvent
propagate' = propagate . SomeEvent
release :: c -> Either b c
release = Right
release' :: Event c => c -> EitherSomeEvent
release' = release . SomeEvent
-----------------------------------------------------------------------------
data LogEventHandler s = LogEventHandler
instance HandlesEvent (LogEventHandler s) where
type AppState (LogEventHandler s) = s
type EventIn (LogEventHandler s) = String
type EventOut (LogEventHandler s) = Msg
data AppStateAPI (LogEventHandler s) = LogEventHandlerAPI
handleEvent _ = return . (:[]) . release . Msg
newLogEventHandler :: (a -> Maybe String) -> (Either String Msg -> Either a b) -> EventHandler a b s
newLogEventHandler ein eout = EventHandler [SomeEventHandler 0 LogEventHandlerAPI ein eout LogEventHandler]
newLogEventHandler' :: EventHandler SomeEvent SomeEvent s
newLogEventHandler' = newLogEventHandler (Just . show) toEitherSomeEvent
|
cmahon/mvc-service
|
library/MVC/EventHandler.hs
|
bsd-3-clause
| 7,244 | 0 | 16 | 1,325 | 2,386 | 1,251 | 1,135 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Main
-- Description : Copies all files with the specified filename beginning
-- from the first directory to the second directory
-- Copyright : (c) Artem Tsushko, 2015
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : POSIX
--
-- Copies all files with the specified filename beginning
-- from the first directory to the second directory
-- /Usage/
-- Provide the input directory and the output directory as command line args.
-- Then enter a filename beginning pattern.
-----------------------------------------------------------------------------
module Main (
main
) where
import System.Directory
import System.Environment
import System.FilePath.Posix (combine)
import System.IO
import System.IO.Error
import System.Posix
import Control.Monad
import Data.List
main :: IO ()
main = copyMatchingFiles `catchIOError` handleError
{- | Asks user for filename beginning and then copies all files matching this
filename pattern from the first directory to the second one. Directory
paths are provided as command line arguments
-}
copyMatchingFiles :: IO ()
copyMatchingFiles = do
(inputDir:outputDir:_) <- mapM canonicalizePath =<< getArgs
if inputDir /= outputDir
then do
beginning <- putStr "enter file beginning: " >> hFlush stdout >> getLine
filesToCopy <- filterM (doesFileExist . combine inputDir)
. filter (beginning `isPrefixOf`)
=<< getDirectoryContents inputDir
sizes <- forM filesToCopy (\filename -> do
let inputFile = combine inputDir filename
outputFile = combine outputDir filename
copyFile inputFile outputFile
getFileSize inputFile )
putStrLn $ "Copied " ++ show (sum sizes) ++ " bytes."
else
putStrLn "Both directories are the same"
-- | Takes a path to file and returns the file's size in bytes
getFileSize :: String -> IO FileOffset
getFileSize path = do
stat <- getFileStatus path
return (fileSize stat)
{- | Handles errors in such cases:
* User didn't pass at least 2 command line arguments
* The specified paths don't point to existing directories
-}
handleError :: IOError -> IO ()
handleError e
| isUserError e = do -- triggered if user didn't at least 2 arguments
progName <- getProgName
putStrLn $ "Usage: " ++ progName ++ " " ++ "<inputDirPath> <outputDirPath>"
| isDoesNotExistError e = case ioeGetFileName e of
Just path -> putStrLn $ "Directory does not exist at: " ++ path
Nothing -> putStrLn "File or directory does not exist"
| otherwise = ioError e
|
artemtsushko/matching-files-copier
|
src/Main.hs
|
bsd-3-clause
| 2,799 | 0 | 19 | 648 | 455 | 233 | 222 | 41 | 2 |
module MultiVersion where
import Data.List
import qualified Data.Map as Map
import Data.Version
import Distribution.Simple.PackageIndex (allPackages)
import Distribution.Package (PackageIdentifier(..), PackageName(..))
import Distribution.InstalledPackageInfo (InstalledPackageInfo(..))
import Config
import InstalledPackages
main :: Config -> [String] -> IO ()
main config _args = do
pkgs <- getAllPackages config
putStr
. unlines
. concatMap (\(p,vs) -> map (\v -> p ++ "-" ++ showVersion v) vs)
. Map.toList
. Map.filter (not . null)
. fmap removeMaximum
. Map.fromListWith (++)
. map (\(x,y) -> (x,[y]))
. map toPkgTuple
$ allPackages pkgs
toPkgTuple :: InstalledPackageInfo -> (String, Version)
toPkgTuple pkg = (name, currentVersion)
where
pkgId = sourcePackageId pkg
name = unPackageName (pkgName pkgId)
currentVersion = pkgVersion pkgId
removeMaximum :: [Version] -> [Version]
removeMaximum xs = delete (maximum xs) xs
|
glguy/GhcPkgUtils
|
MultiVersion.hs
|
bsd-3-clause
| 990 | 0 | 22 | 184 | 346 | 191 | 155 | 29 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Spec.Helpers where
import Base
import Models (true, false)
import Util (toSqlKey)
fromBool :: Bool -> TValueId
fromBool True = true
fromBool _ = false
(==.) :: Int64 -> Bool -> Formula PropertyId
(==.) p v = Atom (toSqlKey p) v
(&&.) :: Formula a -> Formula a -> Formula a
(&&.) f (And sf) = And (f:sf)
(&&.) (And sf) f = And (f:sf)
(&&.) f1 f2 = And [f1, f2]
(||.) :: Formula a -> Formula a -> Formula a
(||.) f (Or sf) = Or (f:sf)
(||.) (Or sf) f = Or (f:sf)
(||.) f1 f2 = Or [f1, f2]
(~.) :: (Int64, Int64) -> Bool -> Trait
(~.) (sid,pid) tf = Trait (toSqlKey sid) (toSqlKey pid) (fromBool tf) "" False
(=>.) :: Formula a -> Formula a -> Implication a
(=>.) a c = Implication a c ""
(|=) :: SpaceId -> PropertyId -> Trait
(|=) s p = Trait s p true "" False
|
jamesdabbs/pi-base-2
|
test/Spec/Helpers.hs
|
bsd-3-clause
| 813 | 0 | 7 | 174 | 446 | 243 | 203 | 24 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Package
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Defines a package identifier along with a parser and pretty printer for it.
-- 'PackageIdentifier's consist of a name and an exact version. It also defines
-- a 'Dependency' data type. A dependency is a package name and a version
-- range, like @\"foo >= 1.2 && < 2\"@.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Package (
-- * Package ids
PackageName(..),
PackageIdentifier(..),
PackageId,
-- * Package dependencies
Dependency(..),
thisPackageVersion,
notThisPackageVersion,
-- * Package classes
Package(..), packageName, packageVersion,
PackageFixedDeps(..),
) where
import Distribution.Version
( Version(..), VersionRange, anyVersion, thisVersion, notThisVersion )
import Distribution.Text (Text(..))
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP ((<++))
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>), (<+>))
import qualified Data.Char as Char ( isDigit, isAlphaNum )
import Data.List ( intersperse )
newtype PackageName = PackageName String
deriving (Read, Show, Eq, Ord)
instance Text PackageName where
disp (PackageName n) = Disp.text n
parse = do
ns <- Parse.sepBy1 component (Parse.char '-')
return (PackageName (concat (intersperse "-" ns)))
where
component = do
cs <- Parse.munch1 Char.isAlphaNum
if all Char.isDigit cs then Parse.pfail else return cs
-- each component must contain an alphabetic character, to avoid
-- ambiguity in identifiers like foo-1 (the 1 is the version number).
-- | Type alias so we can use the shorter name PackageId.
type PackageId = PackageIdentifier
-- | The name and version of a package.
data PackageIdentifier
= PackageIdentifier {
pkgName :: PackageName, -- ^The name of this package, eg. foo
pkgVersion :: Version -- ^the version of this package, eg 1.2
}
deriving (Read, Show, Eq, Ord)
instance Text PackageIdentifier where
disp (PackageIdentifier n v) = case v of
Version [] _ -> disp n -- if no version, don't show version.
_ -> disp n <> Disp.char '-' <> disp v
parse = do
n <- parse
v <- (Parse.char '-' >> parse) <++ return (Version [] [])
return (PackageIdentifier n v)
-- ------------------------------------------------------------
-- * Package dependencies
-- ------------------------------------------------------------
data Dependency = Dependency PackageName VersionRange
deriving (Read, Show, Eq)
instance Text Dependency where
disp (Dependency name ver) =
disp name <+> disp ver
parse = do name <- parse
Parse.skipSpaces
ver <- parse <++ return anyVersion
Parse.skipSpaces
return (Dependency name ver)
thisPackageVersion :: PackageIdentifier -> Dependency
thisPackageVersion (PackageIdentifier n v) =
Dependency n (thisVersion v)
notThisPackageVersion :: PackageIdentifier -> Dependency
notThisPackageVersion (PackageIdentifier n v) =
Dependency n (notThisVersion v)
-- | Class of things that can be identified by a 'PackageIdentifier'
--
-- Types in this class are all notions of a package. This allows us to have
-- different types for the different phases that packages go though, from
-- simple name\/id, package description, configured or installed packages.
--
class Package pkg where
packageId :: pkg -> PackageIdentifier
packageName :: Package pkg => pkg -> PackageName
packageName = pkgName . packageId
packageVersion :: Package pkg => pkg -> Version
packageVersion = pkgVersion . packageId
instance Package PackageIdentifier where
packageId = id
-- | Subclass of packages that have specific versioned dependencies.
--
-- So for example a not-yet-configured package has dependencies on version
-- ranges, not specific versions. A configured or an already installed package
-- depends on exact versions. Some operations or data structures (like
-- dependency graphs) only make sense on this subclass of package types.
--
class Package pkg => PackageFixedDeps pkg where
depends :: pkg -> [PackageIdentifier]
|
dcreager/cabal
|
Distribution/Package.hs
|
bsd-3-clause
| 5,898 | 0 | 14 | 1,205 | 832 | 464 | 368 | 68 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.IHO.S52.SVG.Renderer (renderVectorInstructions) where
import Data.Text (Text)
import Data.Monoid
import Data.Either
import Data.Map (Map)
import qualified Data.Map as Map
import Text.Blaze.Svg (Svg)
import Text.Blaze.Internal ((!))
import qualified Text.Blaze.Internal as SVG
import qualified Text.Blaze.Svg11 as SVG
import qualified Text.Blaze.Svg11.Attributes as A
import Control.Monad.RWS
import Data.IHO.S52.Types
import Data.IHO.S52.SVG.Helper
import Data.Int
data RendererConfig =
RendererConfig {
penFactor :: Integer,
lookupColour :: Char -> Text
}
renderConfig :: Map Char Text -> RendererConfig
renderConfig ctbl = RendererConfig {
penFactor = 30,
lookupColour =
\c -> if (c == '@') then "none"
else maybe (error $ "lookupColor: undefined color: " ++ show c) id $
Map.lookup c ctbl
}
type PolygonBuffer = (Vector2, [Either VectorInstruction (Int16, Int16, Int16)])
data RendererState =
RendererState {
config :: RendererConfig,
penPos :: Vector2,
penWidth :: Integer,
penColour :: Text,
inPolygon :: Bool,
fillTrans :: Float,
lineBuffer :: [VectorInstruction],
polygonBuffers :: [PolygonBuffer]
}
type RenderAction = RWS VectorInstruction Svg RendererState
renderState :: RendererConfig -> RendererState
renderState cfg = RendererState {
config = cfg,
penPos = (0,0),
penWidth = penFactor cfg,
penColour = "none",
inPolygon = False,
fillTrans = 1,
lineBuffer = mempty,
polygonBuffers = mempty
}
renderVectorInstructions :: Map Char Text -> [VectorInstruction] -> Svg
renderVectorInstructions cmap is =
let cfg = renderConfig cmap
s0 = renderState cfg
in SVG.g $ renderVectorInstructions' s0 is
renderVectorInstructions' :: RendererState -> [VectorInstruction] -> Svg
renderVectorInstructions' s0 [] =
snd $ evalRWS renderLineBuffer undefined s0
renderVectorInstructions' s0 (i:is) =
let (s1, w) = execRWS renderVectorInstruction i s0
in w `mappend` (renderVectorInstructions' s1 is)
renderVectorInstruction :: RenderAction ()
renderVectorInstruction = ask >>= evalVectorInstruction
evalVectorInstruction :: VectorInstruction -> RenderAction ()
evalVectorInstruction (SetPenColour c) = do
renderLineBuffer
setPenColour c
evalVectorInstruction (SetPenWidth w) = do
renderLineBuffer
setPenWidth w
evalVectorInstruction (SetPenTransparency t) = setFillTrans t
evalVectorInstruction i@(PenUp v) = do
addBufferInstruction i
modify (\s -> s { penPos = v } )
evalVectorInstruction i@(PenDraw v) = do
addBufferInstruction i
modify (\s -> s { penPos = v } )
evalVectorInstruction i@(Circle r) = do
st <- get
if (inPolygon st)
then addBufferInstruction i
else do renderLineBuffer
let (x, y) = penPos st
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
classA = A.class_ . SVG.textValue . mconcat $ ["s52 fill_none stroke_"
, penColour st]
tell $ svgCircle r x y ! classA ! strokeWA
evalVectorInstruction (PolygonMode EnterPolygonMode) = do
st <- get
if (inPolygon $ st)
then fail "eval EntePolygonMode: must be in line mode"
else createNewPolygonBuffer
evalVectorInstruction (PolygonMode SubPolygon) = createNewPolygonBuffer
evalVectorInstruction (PolygonMode PolygonDone) = do
st <- get
let v = fst . last . polygonBuffers $ st
put st { penPos = v }
evalVectorInstruction OutlinePolygon =
renderPolygonBuffers False
evalVectorInstruction FillPolygon =
renderPolygonBuffers True
evalVectorInstruction (SymbolCall sy o) = do
(x,y) <- fmap penPos get
tell $ useSymbol x y sy -- TODO: handle orientation o
createNewPolygonBuffer :: RenderAction ()
createNewPolygonBuffer = do
st <- get
let newPBuffers = (penPos st, mempty) : (polygonBuffers st)
put st { polygonBuffers = newPBuffers
, inPolygon = True
}
addBufferInstruction :: VectorInstruction -> RenderAction ()
addBufferInstruction i = do
st <- get
if (inPolygon st)
then let pbuffers = polygonBuffers st
(v0, pb0) = head pbuffers
_i = case i of
(Circle r) -> let (x, y) = penPos st in Right (r, x, y)
inst -> Left inst
newPBuffers = (v0, pb0 ++ [_i]) : (drop 1 pbuffers)
in modify (\s -> s { polygonBuffers = newPBuffers})
else let newLBuffer = (lineBuffer st) ++ [i]
in modify (\s -> s { lineBuffer = newLBuffer})
setPenColour :: Char -> RenderAction ()
setPenColour c =
modify (\st -> st { penColour = (lookupColour . config $ st) c } )
setPenWidth :: Integral i => i -> RenderAction ()
setPenWidth w =
modify (\st -> st { penWidth = toInteger w * (penFactor . config $ st) } )
setFillTrans :: (Show i, Integral i) => i -> RenderAction ()
setFillTrans t
| ((t < 0) || (t > 4)) = fail $ "setFillTrans: 0 >= t <= 4. t=" ++ show t
| otherwise = modify (\st -> st { fillTrans = 1.0 - (0.25) } )
renderPathCmd :: VectorInstruction -> SVG.Path
renderPathCmd (PenUp (x, y)) = SVG.m x y
renderPathCmd (PenDraw (x, y)) = SVG.l x y
renderPathCmd _c = fail $ "undefined Path Command: " ++ show _c
-- | renders and clears the (non empty) Line Buffer
renderLineBuffer :: RenderAction ()
renderLineBuffer = do
st <- get
let lb = lineBuffer st
case lb of
[] -> return ()
_ ->
let _is = map renderPathCmd lb
pathA = A.d . SVG.mkPath $ sequence _is >> return ()
classA = A.class_ . SVG.textValue . mconcat $ ["s52 fill_none stroke_", penColour st]
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
in do tell $ SVG.path ! classA ! strokeWA ! pathA
put st { lineBuffer = mempty }
return ()
-- | renders and clear the Polygon Buffers
renderPolygonBuffers :: Bool -> RenderAction ()
renderPolygonBuffers fill = do
st <- get
let pBuffers = polygonBuffers st
_ <- sequence $ map (renderPolygonBuffer fill) pBuffers
let v = fst . last . polygonBuffers $ st
put st { penPos = v , polygonBuffers = mempty, inPolygon = False }
renderPolygonBuffer :: Bool -> PolygonBuffer -> RenderAction ()
renderPolygonBuffer fill (p0, xs) =
let _cs = rights xs
_is = (map renderPathCmd ((PenUp p0) : (lefts xs))) ++ [SVG.z]
pathA = A.d . SVG.mkPath $ sequence _is >> return ()
in do
st <- get
let classA = A.class_ . SVG.textValue . mconcat $
if (fill)
then ["s52 stroke_none fill_", penColour st]
else ["s52 fill_none stroke_", penColour st]
fillOA = A.fillOpacity . SVG.toValue . fillTrans $ st
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
if (fill)
then do tell $ SVG.path ! fillOA ! classA ! pathA
_ <- sequence $ map (\(r, cx, cy) ->
tell $ svgCircle r cx cy ! classA ! fillOA) _cs
return ()
else do tell $ SVG.path ! classA ! strokeWA ! pathA
_ <- sequence $ map (\(r, cx, cy) ->
tell $ svgCircle r cx cy ! classA) _cs
return ()
|
alios/iho-presentation
|
Data/IHO/S52/SVG/Renderer.hs
|
bsd-3-clause
| 7,278 | 0 | 20 | 1,850 | 2,421 | 1,271 | 1,150 | 179 | 3 |
{-
PickSquare.hs (adapted from picksquare.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2003 <[email protected]>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
Use of multiple names and picking are demonstrated. A 3x3 grid of squares is
drawn. When the left mouse button is pressed, all squares under the cursor
position have their color changed.
-}
import Data.Array ( Array, listArray, (!) )
import Data.IORef ( IORef, newIORef, readIORef, modifyIORef )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
type Board = Array (GLint,GLint) (IORef Int)
-- Clear color value for every square on the board
myInit :: IO Board
myInit = do
clearColor $= Color4 0 0 0 0
refs <- sequence . replicate 9 . newIORef $ 0
return $ listArray ((0,0),(2,2)) refs
-- The nine squares are drawn. Each square is given two names: one for the row
-- and the other for the column on the grid. The color of each square is
-- determined by its position on the grid, and the value in the board array.
-- Note: In contrast to the the original example, we always give names to
-- squares, regardless of the render mode. This simplifies the code a bit and
-- is even suggested by the Red Book.
drawSquares :: Board -> IO ()
drawSquares board =
flip mapM_ [ 0 .. 2 ] $ \i -> do
loadName (Name (fromIntegral i))
flip mapM_ [ 0 .. 2 ] $ \j ->
withName (Name (fromIntegral j)) $ do
val <- readIORef (board ! (i,j))
-- resolve overloading, not needed in "real" programs
let color3f = color :: Color3 GLfloat -> IO ()
color3f (Color3 (fromIntegral i / 3.0)
(fromIntegral j / 3.0)
(fromIntegral val / 3.0))
rect (Vertex2 i j) (Vertex2 (i + 1) (j + 1))
-- processHits prints the hit records and updates the board array.
processHits :: Maybe[HitRecord] -> Board -> IO ()
processHits Nothing _ = putStrLn "selection buffer overflow"
processHits (Just hitRecords) board = do
putStrLn ("hits = " ++ show (length hitRecords))
mapM_ (\(HitRecord z1 z2 names) -> do
putStrLn (" number of names for this hit = " ++ show (length names))
putStr (" z1 is " ++ show z1)
putStrLn ("; z2 is " ++ show z2)
putStr " names are"
sequence_ [ putStr (" " ++ show n) | Name n <- names ]
putChar '\n'
let [i, j] = [ fromIntegral n | Name n <- names ]
modifyIORef (board ! (i,j)) (\x -> (x + 1) `mod` 3))
hitRecords
-- pickSquares sets up selection mode, name stack, and projection matrix for
-- picking. Then the objects are drawn.
bufSize :: GLsizei
bufSize = 512
pickSquares :: Board -> KeyboardMouseCallback
pickSquares board (MouseButton LeftButton) Down _ (Position x y) = do
vp@(_, (Size _ height)) <- get viewport
(_, maybeHitRecords) <- getHitRecords bufSize $
withName (Name 0) $ do
matrixMode $= Projection
preservingMatrix $ do
loadIdentity
-- create 5x5 pixel picking region near cursor location
pickMatrix (fromIntegral x, fromIntegral height - fromIntegral y) (5, 5) vp
ortho2D 0 3 0 3
drawSquares board
flush
processHits maybeHitRecords board
postRedisplay Nothing
pickSquares _ (Char '\27') Down _ _ = exitWith ExitSuccess
pickSquares _ _ _ _ _ = return ()
display :: Board -> DisplayCallback
display board = do
clear [ ColorBuffer ]
drawSquares board
flush
reshape :: ReshapeCallback
reshape size = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
ortho2D 0 3 0 3
matrixMode $= Modelview 0
loadIdentity
-- Main Loop
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode ]
initialWindowSize $= Size 100 100
initialWindowPosition $= Position 100 100
createWindow progName
board <- myInit
reshapeCallback $= Just reshape
displayCallback $= display board
keyboardMouseCallback $= Just (pickSquares board)
mainLoop
|
OS2World/DEV-UTIL-HUGS
|
demos/GLUT/examples/RedBook/PickSquare.hs
|
bsd-3-clause
| 4,192 | 0 | 19 | 1,069 | 1,147 | 568 | 579 | 79 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{- |
Module : Text.Garnett.Completers.BashCompleter
Copyright : Copyright (C) 2014 Julian K. Arni
License : BSD3
Maintainer : Julian K. Arni <[email protected]
Stability : alpha
Convert a GarnettFile to a bash completion script.
-}
module Text.Garnett.Completers.BashCompleter where
import Data.List hiding (group)
import Control.Lens
import Control.Arrow ((&&&))
import Control.Monad.Free
import qualified Data.Map as Map
import qualified Data.Text as T
import Text.Garnett.Definition
import Text.Garnett.Completers.ShellDSL
--------------------------------------------------------------------------
-- BashWriter
--
-- In broad outlines, we do the following: For each subparser, create a
-- bash function that can handle completion for that parser. Then create
-- another function that can pick the right subparser based on completion
-- so far, as well as complete parsers. Add a few helper predefined bash
-- functions.
--------------------------------------------------------------------------
data Bash
instance GarnettWriter Bash where
fmt _ = Fmt "bash"
fromGarnett = toDoc . allBash
--------------------------------------------------------------------------
-- Generate appropriate code
--
--------------------------------------------------------------------------
lkupBash :: Map.Map Fmt a -> Maybe a
lkupBash = lkup (undefined::Bash)
-- | Create a completion function for a given parser.
eachParser :: GParser -> Free ShellF ()
eachParser gp = do
let shorts = foldr (\a b -> b ++ " -" ++ return a) "" $ getShortCompls gp
let longs = " --" ++ (intercalate " --" $ map T.unpack $ getLongCompls gp)
let compgen = shorts ++ longs
define ('_':(T.unpack $ gp ^. parserName)) $ do
stmt "local cur prev words"
stmt "COMPREPLY=()"
stmt "_get_comp_words_by_ref cur prev words"
caseStmt "cur" [( "-*", stmt ("COMPREPLY=( $( compgen -W '"
++ compgen ++ "' -- $cur ))"))
]
-- | A function that checks what subparser we're in, and delegates
-- completion to the corresponding bash function.
delegator :: GarnettFile -> Free ShellF ()
delegator gf = do
let myName = '_':(T.unpack $ gf ^. progName)
let pNames = fmap T.unpack $ gf ^. mainParser . subparsers ^.. folded . parserName
comment $ "Handles completion on subparser names, and delegates "
comment $ "completion to the subparser completion functions."
define myName $ do
stmt "local cur prev words"
stmt "COMPREPLY=()"
stmt "_get_comp_words_by_ref cur prev words"
ifStmt (stmt "${words[@]} -eq 2")
(stmt $ "COMPREPLY=( $( compgen -W '" ++ unwords pNames ++ "' -- $cur ))")
(caseStmt "words[1]" $ fmap (id &&& \x -> stmt $ '_':x ++ "()")
pNames)
stmt "return 0"
-- | The complete bash script.
allBash :: GarnettFile -> Free ShellF ()
allBash gf = do
comment $ "Completion script for " ++ (T.unpack $ gf ^. progName)
comment $ "Generated with Garnett"
mapM_ eachParser (gf ^. mainParser . subparsers)
delegator gf
-- stmt "complete -F
|
jkarni/Garnett
|
src/Text/Garnett/Completers/BashCompleter.hs
|
bsd-3-clause
| 3,256 | 0 | 19 | 741 | 634 | 327 | 307 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Data.Binary.Indexed where
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.ByteString.Lazy
class HBinary phi h where
hput :: phi ix -> h ix -> Put
hget :: phi ix -> Get (h ix)
hdecode :: HBinary phi h => phi ix -> ByteString -> h ix
hdecode phi = runGet (hget phi)
hencode :: HBinary phi h => phi ix -> h ix -> ByteString
hencode phi = runPut . hput phi
|
sebastiaanvisser/islay
|
src/Data/Binary/Indexed.hs
|
bsd-3-clause
| 438 | 0 | 10 | 86 | 170 | 86 | 84 | 13 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-}
module Rawr.Note where
import GHC.Generics
import Text.Printf
import Data.Aeson
import qualified Data.List as DL
import Data.Maybe
import qualified Data.ByteString.Lazy.Char8 as DB8
data Note = Func String
| Note String
| Call String
| CallEdge String String
| EndFunc
deriving (Generic, Show, Eq, ToJSON)
isFunc (Func _) = True
isFunc _ = False
isCallEdge (CallEdge _ _) = True
isCallEdge _ = False
isEndFunc EndFunc = True
isEndFunc _ = False
isNote (Note _) = True
isNote _ = False
-- what is this for?
notesToString :: [Note] -> String
notesToString notes = DB8.unpack $ encode $ map notesToString' notes
replaceChar oldChar newChar cs = [if c == oldChar then newChar else c | c <- cs]
notesToString' :: Note -> String
notesToString' (Func name) =
let template = "{ \"function\" : \"%s\""
in printf template name
notesToString' EndFunc = "}"
notesToString' (Note note) = note
example = [ Func "f1"
, Note "asdf"
, Note "zxcv"
, Func "f2"
, Note "c"
, EndFunc
, Note "d"
, Func "f3"
, Func "f4"
, EndFunc
, EndFunc
, Note "e"
, EndFunc
]
callCollect [] = []
callCollect (Func caller : EndFunc : rest) = (Call caller : rest)
callCollect (Func caller1 : Func caller2 : rest) = callCollect (Func caller1 : (callCollect (Func caller2 : rest)))
callCollect (Func caller : Call callee : rest) = (CallEdge caller callee) : callCollect (Func caller : rest)
callCollect (Func caller : ce@(CallEdge _ _) : rest) = ce : callCollect (Func caller : rest)
callCollect (EndFunc : rest) = callCollect rest
callCollect xs = [EndFunc]
dropNotes xs = filter (not . isNote) xs
dotGraph :: [Note] -> String
dotGraph notes = let template = unlines [ "digraph {"
, "rankdir=\"LR\";"
, " %s "
, "}"
]
callEdges = DL.nub $ filter isCallEdge (callCollect (dropNotes notes))
nodeTxt = concat [printf "%s -> %s;\n" x y | (CallEdge x y) <- callEdges]
fixedTxt = replaceChar ':' ' '
$ replaceChar '\'' '_'
$ replaceChar '/' '_'
$ replaceChar '.' '_' nodeTxt
in printf template fixedTxt
|
drhodes/jade2hdl
|
rawr/src/Rawr/Note.hs
|
bsd-3-clause
| 2,631 | 0 | 14 | 898 | 782 | 407 | 375 | 67 | 2 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module VectorTests.Vector3 where
import Geometry.Space
import Test.Framework
import VectorTests.VectorGenerators ()
prop_vectorProductAnticommutativity :: Vector3 Double -> Vector3 Double -> Bool
prop_vectorProductAnticommutativity a b = cross a b == neg (cross b a)
prop_vectorProductZeroAngle :: Vector3 Double -> Bool
prop_vectorProductZeroAngle a = cross a a == Vector3 0 0 0
prop_vectorProductLength :: Vector3 Double -> Vector3 Double -> Bool
prop_vectorProductLength a b = normL2Squared (cross a b) <= normL2Squared a * normL2Squared b
|
achirkin/fgeom
|
test/VectorTests/Vector3.hs
|
bsd-3-clause
| 600 | 0 | 9 | 99 | 164 | 81 | 83 | 11 | 1 |
module OTPSpec (main, spec) where
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
True `shouldBe` False
|
pbogdan/otp-auth
|
test/OTPSpec.hs
|
bsd-3-clause
| 200 | 0 | 13 | 49 | 74 | 39 | 35 | 9 | 1 |
----------------------------------------------------------------------------
-- |
-- Module : H
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
-- Created : Sunday, 23 October 2016
----------------------------------------------------------------------------
module H where
import G (foo, bar)
data FooTyp = Foo Int
data BarTyp = Bar Double
data BazTyp = Baz String
|
sergv/tags-server
|
test-data/0007resolvable_import_cycle/H.hs
|
bsd-3-clause
| 459 | 0 | 6 | 82 | 50 | 33 | 17 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Cataskell.Game
import Cataskell.Serialize
import Control.Monad.Random
import Control.Monad.State
import Control.Lens
import System.Random
import System.Environment (getArgs)
import Control.Exception (assert)
import Data.List (findIndex)
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import System.IO (openBinaryFile, IOMode(WriteMode), hClose)
getToEnd :: Int -> Int -> Int -> (Int, [Game], Bool)
getToEnd maxIter maxSeed seed
= let (initialGame, r') = runRand (newGame ["1","2","3"]) (mkStdGen seed)
gs = map fst $ iterate (\(x, r) -> runRand (execStateT randomActGoodInitial x) r) (initialGame, r')
endsAt = findIndex ((== End) . view phase) gs
in if seed < maxSeed
then maybe (getToEnd maxIter maxSeed (seed+1)) (\end -> (seed, take (end+1) gs, True)) endsAt
else (seed, take maxIter gs, False)
main :: IO ()
main = do
args <- getArgs
let (iters, maxSeed) = case args of
[] -> (1000, 1000)
[i] -> (read i, 1000)
(i:s:_) -> (read i, read s)
let (seed, allGs, success) = getToEnd iters maxSeed 0
print (seed, success)
let gs' = encode allGs
h <- openBinaryFile "games.js" WriteMode
B.hPut h "var data="
B.hPut h gs'
B.hPut h ";"
hClose h
|
corajr/cataskell
|
cataskell-example/Main.hs
|
bsd-3-clause
| 1,372 | 0 | 15 | 354 | 537 | 289 | 248 | 36 | 3 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Database schema versioning and Migration
--
--
module NejlaCommon.Persistence.Migration
( sql
, sqlFile
, migrate
, M
, SchemaVersion
, Migration(..)
-- * Helper functions
, schemaEmptyP
-- ** Re-exports
, gitHash
, P.rawExecute
, P.PersistValue(..)
, P.Single(..)
-- * Internal functions
, setupMetaSchema
, currentSchemaVersion
, registerMigration
) where
import Control.Monad.Logger
import Control.Monad.Reader
import qualified Data.List as List
import Data.Maybe ( fromMaybe )
import Data.Text ( Text )
import qualified Database.Persist.Sql as P
import Development.GitRev
import NejlaCommon.Persistence.Util ( sql, sqlFile )
import System.Exit ( exitFailure )
type M a = ReaderT P.SqlBackend (LoggingT IO) a
type SchemaVersion = Text
-- | Check if a schema is empty (e.g. hasn't been initualized)
-- Postgres-specfic
schemaEmptyP :: Text -- ^ Name of the schema
-> M Bool
schemaEmptyP schema = do
res <- P.rawSql [sql|
SELECT relname
FROM pg_class c
INNER JOIN pg_namespace s
ON s.oid = c.relnamespace
WHERE s.nspname=?
|]
[ P.PersistText schema ] :: M [P.Single Text]
return $ List.null res
-- | Setup the metadata schema "_meta" and register an empty migration as a
-- starting point
setupMetaSchema :: M ()
setupMetaSchema = schemaEmptyP "_meta" >>= \case
-- DB versioning not initialized
True -> do
$logInfo "Schema versioning not found. Initializing now."
P.rawExecute $(sqlFile "src/NejlaCommon/Persistence/sql/initialize_versioning.sql")
[]
-- Schema versioning already installed
False -> return ()
-- | Query the current schema version
currentSchemaVersion :: M (Maybe SchemaVersion)
currentSchemaVersion = do
P.rawSql [sql|
SELECT _meta.schema_version();
|]
[] >>= \case
[ Nothing ] -> return Nothing
[ Just (P.Single (P.PersistText i)) ] -> return (Just i)
[ Just (P.Single P.PersistNull) ] -> return Nothing
_ -> error "currentSchemaVersion: wrong number of results"
-- | Register a migration. Shouldn't be used manually
registerMigration
:: Text -- ^ Program revision (e.g. git revision)
-> Maybe SchemaVersion -- ^ Expected schema version before migration
-> SchemaVersion -- ^ Schema version after the migration
-> Text -- ^ Description of the migration changes
-> M ()
registerMigration revision expect to description = do
_ <- P.rawSql [sql| SELECT _meta.add_migration(?, ?, ?, ?);
|]
[ maybe P.PersistNull P.PersistText expect
, P.PersistText to
, P.PersistText description
, P.PersistText revision
] :: M [P.Single P.PersistValue]
return ()
-- | Run a migration
runMigration :: Text -- ^ Program revision
-> Migration
-> M ()
runMigration revision Migration{..} = do
$logInfo $ "Migrating database schema from " <> fromMaybe "<None>" expect
<> " to " <> to <> " (" <> description <> ")"
script
registerMigration revision expect to description
data Migration =
Migration
{ expect :: Maybe SchemaVersion
-- ^ Expected schema version before the migration (Nothing if no migrations exist)
, to :: SchemaVersion
-- ^ Schema version after the migration
, description :: Text
-- ^ Description of the migration
, script :: M ()
}
findMigration :: Text -> Maybe SchemaVersion -> [Migration] -> M ()
findMigration _r (Just v) [ Migration{..} ]
| v == to =
$logInfo $ "Already in schema version " <> v <> "; nothing to do."
-- Already in final schema version
findMigration revision v ms@(Migration{..} : mss)
| v == expect = runMigrations revision v ms
| otherwise = findMigration revision v mss
findMigration _r v _ = do
$logError $ "Unknown schema version " <> fromMaybe "<None>" v
liftIO exitFailure
runMigrations :: Text -> Maybe SchemaVersion -> [Migration] -> M ()
runMigrations _ v [] = do
$logInfo $ "Finished migrations. Final schema: " <> fromMaybe "<None>" v
return ()
runMigrations revision v (m@Migration{..} : ms)
| v == expect = do
runMigration revision m >> runMigrations revision (Just to) ms
| otherwise = do
$logError $
"runMigrations: Unknown schema version " <> fromMaybe "<None>" v
liftIO exitFailure
-- | Finds the current schema version and runs all migrations linearly starting
-- from that version.
--
-- Takes a list of migrations. Each migration should leave the schema in the
-- version the next migration expects (that is, the @to@-field of migration @n@
-- should match the @expect@-field of migration @n+1@)
--
-- The first time this function runs it sets up a metadata schema "_meta" that
-- logs the migrations that have been run in the past. The initial schema
-- version before any migrations are registered is the empty string "", so the
-- first migration should expect this schema.
migrate :: Text -- ^ Program revision (e.g. $(gitHash) from gitrev)
-> [Migration]
-> M ()
migrate _ [] = do
$logError "List of migrations is empty, can't migrate"
liftIO exitFailure
migrate revision migrations = do
setupMetaSchema
sv <- currentSchemaVersion
findMigration revision sv migrations
|
nejla/nejla-common
|
src/NejlaCommon/Persistence/Migration.hs
|
bsd-3-clause
| 5,718 | 0 | 17 | 1,470 | 1,143 | 599 | 544 | 112 | 4 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (catchAny)
import Control.Monad
import Control.Monad.Catch (MonadMask, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.Foldable as F
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List (intersect, maximumBy)
import Data.List.Extra (nubOrd)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Stack.BuildPlan
import Stack.Config (getSnapshots,
makeConcreteResolver)
import Stack.Constants
import Stack.Solver
import Stack.Types
import Stack.Types.Internal (HasLogLevel, HasReExec,
HasTerminal)
import qualified System.FilePath as FP
-- | Generate stack.yaml
initProject
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs Dir
-> InitOpts
-> Maybe AbstractResolver
-> m ()
initProject currDir initOpts mresolver = do
let dest = currDir </> stackDotYaml
reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest
exists <- doesFileExist dest
when (not (forceOverwrite initOpts) && exists) $ do
error ("Stack configuration file " <> reldest <>
" exists, use 'stack solver' to fix the existing config file or \
\'--force' to overwrite it.")
dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts)
let noPkgMsg = "In order to init, you should have an existing .cabal \
\file. Please try \"stack new\" instead."
find = findCabalFiles (includeSubDirs initOpts)
dirs' = if null dirs then [currDir] else dirs
cabalfps <- liftM concat $ mapM find dirs'
(bundle, dupPkgs) <- cabalPackagesCheck cabalfps noPkgMsg Nothing
(r, flags, extraDeps, rbundle) <- getDefaultResolver dest initOpts
mresolver bundle
let ignored = Map.difference bundle rbundle
dupPkgMsg
| (dupPkgs /= []) =
"Warning: Some packages were found to have names conflicting \
\with others and have been commented out in the \
\packages section.\n"
| otherwise = ""
missingPkgMsg
| (Map.size ignored > 0) =
"Warning: Some packages were found to be incompatible with \
\the resolver and have been left commented out in the \
\packages section.\n"
| otherwise = ""
extraDepMsg
| (Map.size extraDeps > 0) =
"Warning: Specified resolver could not satisfy all \
\dependencies. Some external packages have been added \
\as dependencies.\n"
| otherwise = ""
makeUserMsg msgs =
let msg = concat msgs
in if msg /= "" then
msg <> "You can suppress this message by removing it from \
\stack.yaml\n"
else ""
userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg]
gpds = Map.elems $ fmap snd rbundle
p = Project
{ projectUserMsg = if userMsg == "" then Nothing else Just userMsg
, projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = removeSrcPkgDefaultFlags gpds flags
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
makeRelDir dir =
case stripDir currDir dir of
Nothing
| currDir == dir -> "."
| otherwise -> assert False $ toFilePath dir
Just rel -> toFilePath rel
makeRel = fmap toFilePath . makeRelativeToCurrentDir
pkgs = map toPkg $ Map.elems (fmap (parent . fst) rbundle)
toPkg dir = PackageEntry
{ peValidWanted = Nothing
, peExtraDepMaybe = Nothing
, peLocation = PLFilePath $ makeRelDir dir
, peSubdirs = []
}
indent t = T.unlines $ fmap (" " <>) (T.lines t)
$logInfo $ "Initialising configuration using resolver: " <> resolverName r
$logInfo $ "Total number of user packages considered: "
<> (T.pack $ show $ (Map.size bundle + length dupPkgs))
when (dupPkgs /= []) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ length dupPkgs)
<> " duplicate packages:"
rels <- mapM makeRel dupPkgs
$logWarn $ indent $ showItems rels
when (Map.size ignored > 0) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ Map.size ignored)
<> " packages due to dependency conflicts:"
rels <- mapM makeRel (Map.elems (fmap fst ignored))
$logWarn $ indent $ showItems $ rels
when (Map.size extraDeps > 0) $ do
$logWarn $ "Warning! " <> (T.pack $ show $ Map.size extraDeps)
<> " external dependencies were added."
$logInfo $
(if exists then "Overwriting existing configuration file: "
else "Writing configuration to file: ")
<> T.pack reldest
liftIO $ L.writeFile (toFilePath dest)
$ B.toLazyByteString
$ renderStackYaml p
(Map.elems $ fmap (makeRelDir . parent . fst) ignored)
(map (makeRelDir . parent) dupPkgs)
$logInfo "All done."
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder
renderStackYaml p ignoredPackages dupPackages =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString "# This file was automatically generated by stack init\n" <>
B.byteString "# For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration.html\n\n" <>
F.foldMap (goComment o) comments <>
goOthers (o `HM.difference` HM.fromList comments) <>
B.byteString
"# Control whether we use the GHC we find on the path\n\
\# system-ghc: true\n\n\
\# Require a specific version of stack, using version ranges\n\
\# require-stack-version: -any # Default\n\
\# require-stack-version: >= 1.0.0\n\n\
\# Override the architecture used by stack, especially useful on Windows\n\
\# arch: i386\n\
\# arch: x86_64\n\n\
\# Extra directories used by stack for building\n\
\# extra-include-dirs: [/path/to/dir]\n\
\# extra-lib-dirs: [/path/to/dir]\n\n\
\# Allow a newer minor version of GHC than the snapshot specifies\n\
\# compiler-check: newer-minor\n"
comments =
[ ("user-message", "A message to be displayed to the user. Used when autogenerated config ignored some packages or added extra deps.")
, ("resolver", "Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)")
, ("packages", "Local packages, usually specified by relative directory name")
, ("extra-deps", "Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)")
, ("flags", "Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "Extra package databases containing global packages")
]
commentedPackages =
let ignoredComment = "# The following packages have been ignored \
\due to incompatibility with the resolver compiler or \
\dependency conflicts with other packages"
dupComment = "# The following packages have been ignored due \
\to package name conflict with other packages"
in commentPackages ignoredComment ignoredPackages
<> commentPackages dupComment dupPackages
commentPackages comment pkgs
| pkgs /= [] =
B.byteString (BC.pack $ comment ++ "\n")
<> (B.byteString $ BC.pack $ concat
$ (map (\x -> "#- " ++ x ++ "\n") pkgs) ++ ["\n"])
| otherwise = ""
goComment o (name, comment) =
case HM.lookup name o of
Nothing -> assert (name == "user-message") mempty
Just v ->
B.byteString "# " <>
B.byteString comment <>
B.byteString "\n" <>
B.byteString (Yaml.encode $ Yaml.object [(name, v)]) <>
if (name == "packages") then commentedPackages else "" <>
B.byteString "\n"
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m Snapshots
getSnapshots' =
getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " http://docs.haskellstack.org/en/stable/yaml_configuration.html"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
error ""
-- | Get the default resolver value
getDefaultResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Maybe AbstractResolver
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getDefaultResolver stackYaml initOpts mresolver bundle =
maybe selectSnapResolver makeConcreteResolver mresolver
>>= getWorkingResolverPlan stackYaml initOpts bundle
where
-- TODO support selecting best across regular and custom snapshots
selectSnapResolver = do
let gpds = Map.elems (fmap snd bundle)
snaps <- getSnapshots' >>= getRecommendedSnapshots
(s, r) <- selectBestSnapshot gpds snaps
case r of
BuildPlanCheckFail {} | not (omitPackages initOpts)
-> throwM (NoMatchingSnapshot snaps)
_ -> return $ ResolverSnapshot s
getWorkingResolverPlan
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getWorkingResolverPlan stackYaml initOpts bundle resolver = do
$logInfo $ "Selected resolver: " <> resolverName resolver
go bundle
where
go info = do
eres <- checkBundleResolver stackYaml initOpts info resolver
-- if some packages failed try again using the rest
case eres of
Right (f, edeps)-> return (resolver, f, edeps, info)
Left ignored
| Map.null available -> do
$logWarn "*** Could not find a working plan for any of \
\the user packages.\nProceeding to create a \
\config anyway."
return (resolver, Map.empty, Map.empty, Map.empty)
| otherwise -> do
when ((Map.size available) == (Map.size info)) $
error "Bug: No packages to ignore"
if length ignored > 1 then do
$logWarn "*** Ignoring packages:"
$logWarn $ indent $ showItems ignored
else
$logWarn $ "*** Ignoring package: "
<> (T.pack $ packageNameString (head ignored))
go available
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
isAvailable k _ = not (k `elem` ignored)
available = Map.filterWithKey isAvailable info
checkBundleResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m (Either [PackageName] ( Map PackageName (Map FlagName Bool)
, Map PackageName Version))
checkBundleResolver stackYaml initOpts bundle resolver = do
result <- checkResolverSpec gpds Nothing resolver
case result of
BuildPlanCheckOk f -> return $ Right (f, Map.empty)
BuildPlanCheckPartial f _
| needSolver resolver initOpts -> do
$logWarn $ "*** Resolver " <> resolverName resolver
<> " will need external packages: "
$logWarn $ indent $ T.pack $ show result
solve f
| otherwise -> throwM $ ResolverPartial resolver (show result)
BuildPlanCheckFail _ e _
| (omitPackages initOpts) -> do
$logWarn $ "*** Resolver compiler mismatch: "
<> resolverName resolver
$logWarn $ indent $ T.pack $ show result
let failed = Map.unions (Map.elems (fmap deNeededBy e))
return $ Left (Map.keys failed)
| otherwise -> throwM $ ResolverMismatch resolver (show result)
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
gpds = Map.elems (fmap snd bundle)
solve flags = do
let cabalDirs = map parent (Map.elems (fmap fst bundle))
srcConstraints = mergeConstraints (gpdPackages gpds) flags
eresult <- solveResolverSpec stackYaml cabalDirs
(resolver, srcConstraints, Map.empty)
case eresult of
Right (src, ext) ->
return $ Right (fmap snd (Map.union src ext), fmap fst ext)
Left packages
| omitPackages initOpts, srcpkgs /= []-> do
pkg <- findOneIndependent srcpkgs flags
return $ Left [pkg]
| otherwise -> throwM (SolverGiveUp giveUpMsg)
where srcpkgs = intersect (Map.keys bundle) packages
-- among a list of packages find one on which none among the rest of the
-- packages depend. This package is a good candidate to be removed from
-- the list of packages when there is conflict in dependencies among this
-- set of packages.
findOneIndependent packages flags = do
platform <- asks (configPlatform . getConfig)
(compiler, _) <- getResolverConstraints stackYaml resolver
let getGpd pkg = snd (fromJust (Map.lookup pkg bundle))
getFlags pkg = fromJust (Map.lookup pkg flags)
deps pkg = gpdPackageDeps (getGpd pkg) compiler platform
(getFlags pkg)
allDeps = concat $ map (Map.keys . deps) packages
isIndependent pkg = not $ pkg `elem` allDeps
-- prefer to reject packages in deeper directories
path pkg = fst (fromJust (Map.lookup pkg bundle))
pathlen = length . FP.splitPath . toFilePath . path
maxPathlen = maximumBy (compare `on` pathlen)
return $ maxPathlen (filter isIndependent packages)
giveUpMsg = concat
[ " - Use '--omit-packages to exclude conflicting package(s).\n"
, " - Tweak the generated "
, toFilePath stackDotYaml <> " and then run 'stack solver':\n"
, " - Add any missing remote packages.\n"
, " - Add extra dependencies to guide solver.\n"
, " - Update external packages with 'stack update' and try again.\n"
]
needSolver _ (InitOpts {useSolver = True}) = True
needSolver (ResolverCompiler _) _ = True
needSolver _ _ = False
getRecommendedSnapshots :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Snapshots
-> m [SnapName]
getRecommendedSnapshots snapshots = do
-- in order - Latest LTS, Latest Nightly, all LTS most recent first
return $ nubOrd $ concat
[ map (uncurry LTS)
(take 1 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
, [Nightly $ snapshotsNightly snapshots]
, map (uncurry LTS)
(drop 1 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
]
data InitOpts = InitOpts
{ searchDirs :: ![T.Text]
-- ^ List of sub directories to search for .cabal files
, useSolver :: Bool
-- ^ Use solver to determine required external dependencies
, omitPackages :: Bool
-- ^ Exclude conflicting or incompatible user packages
, forceOverwrite :: Bool
-- ^ Overwrite existing stack.yaml
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
|
harendra-kumar/stack
|
src/Stack/Init.hs
|
bsd-3-clause
| 20,472 | 0 | 21 | 7,008 | 4,315 | 2,183 | 2,132 | 346 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module Dhall.Test.Schemas where
import Data.Text (Text)
import Dhall.Parser (Header (..))
import Prelude hiding (FilePath)
import Test.Tasty (TestTree)
import Turtle (FilePath)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified Dhall.Core as Core
import qualified Dhall.Parser as Parser
import qualified Dhall.Pretty as Pretty
import qualified Dhall.Schemas as Schemas
import qualified Dhall.Test.Util as Test.Util
import qualified Prettyprinter as Doc
import qualified Prettyprinter.Render.Text as Doc.Render.Text
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.HUnit as Tasty.HUnit
import qualified Turtle
schemasDirectory :: FilePath
schemasDirectory = "./tests/schemas"
getTests :: IO TestTree
getTests = do
schemasTests <- Test.Util.discover (Turtle.chars <* "A.dhall") schemaTest (Turtle.lstree schemasDirectory)
return (Tasty.testGroup "schemas tests" [ schemasTests ])
format :: Header -> Core.Expr Parser.Src Core.Import -> Text
format (Header header) expr =
let doc = Doc.pretty header
<> Pretty.prettyCharacterSet Pretty.Unicode expr
<> "\n"
docStream = Pretty.layout doc
in
Doc.Render.Text.renderStrict docStream
schemaTest :: Text -> TestTree
schemaTest prefix =
Tasty.HUnit.testCase (Text.unpack prefix) $ do
let inputFile = Text.unpack (prefix <> "A.dhall")
let outputFile = Text.unpack (prefix <> "B.dhall")
inputText <- Text.IO.readFile inputFile
(header, parsedInput) <- Core.throws (Parser.exprAndHeaderFromText mempty inputText)
parsedSchema <- Core.throws (Parser.exprFromText mempty (Test.Util.toDhallPath (prefix <> "Schema.dhall")))
actualExpression <- Schemas.rewriteWithSchemas parsedSchema parsedInput
let actualText = format header actualExpression
expectedText <- Text.IO.readFile outputFile
let message = "The rewritten expression did not match the expected output"
Tasty.HUnit.assertEqual message expectedText actualText
|
Gabriel439/Haskell-Dhall-Library
|
dhall/tests/Dhall/Test/Schemas.hs
|
bsd-3-clause
| 2,250 | 0 | 15 | 525 | 547 | 303 | 244 | 45 | 1 |
{-|
Module : Grammar.Size
Description : Definition of the Size datatype.
Copyright : (c) Davide Mancusi, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
This module exports the 'Size' data type.
-}
module Grammar.Size
( Size
, mean
) where
-- system imports
import Data.Foldable
-- | A type alias for sized random expansion
type Size = Double
mean :: (Foldable t, Fractional a) => t a -> a
mean xs = let meanAcc (tot, n) x = (tot+x, n+1)
(total, len) = foldl' meanAcc (0, 0::Int) xs
in total / fromIntegral len
|
arekfu/grammar-haskell
|
src/Grammar/Size.hs
|
bsd-3-clause
| 603 | 0 | 10 | 143 | 134 | 75 | 59 | 9 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module Web.Slack.Types.Topic where
import Data.Aeson
import Data.Text (Text)
import Control.Applicative
import Control.Lens.TH
import Prelude
type Purpose = Topic
data Topic = Topic
{ _topicValue :: Text
, _topicCreator :: Text
, _topicLastSet :: Int
} deriving (Show)
makeLenses ''Topic
instance FromJSON Topic where
parseJSON = withObject "topic" (\o ->
Topic <$> o .: "value" <*> o .: "creator" <*> o .: "last_set")
|
madjar/slack-api
|
src/Web/Slack/Types/Topic.hs
|
mit
| 540 | 0 | 14 | 136 | 136 | 79 | 57 | 17 | 0 |
module Server where
import Control.Exception (bracket, finally, handleJust, tryJust)
import Control.Monad (guard)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import GHC.IO.Exception (IOErrorType(ResourceVanished))
import Network (PortID(UnixSocket), Socket, accept, listenOn, sClose)
import System.Directory (removeFile)
import System.Exit (ExitCode(ExitSuccess))
import System.IO (Handle, hClose, hFlush, hGetLine, hPutStrLn)
import System.IO.Error (ioeGetErrorType, isAlreadyInUseError, isDoesNotExistError)
import CommandLoop (newCommandLoopState, Config, updateConfig, startCommandLoop)
import Types (ClientDirective(..), Command, CommandExtra(..), ServerDirective(..))
import Util (readMaybe)
createListenSocket :: FilePath -> IO Socket
createListenSocket socketPath = do
r <- tryJust (guard . isAlreadyInUseError) $ listenOn (UnixSocket socketPath)
case r of
Right socket -> return socket
Left _ -> do
removeFile socketPath
listenOn (UnixSocket socketPath)
startServer :: FilePath -> Maybe Socket -> CommandExtra -> IO ()
startServer socketPath mbSock cmdExtra = do
case mbSock of
Nothing -> bracket (createListenSocket socketPath) cleanup go
Just sock -> (go sock) `finally` (cleanup sock)
where
cleanup :: Socket -> IO ()
cleanup sock = do
sClose sock
removeSocketFile
go :: Socket -> IO ()
go sock = do
state <- newCommandLoopState
currentClient <- newIORef Nothing
configRef <- newIORef Nothing
config <- updateConfig Nothing cmdExtra
startCommandLoop state (clientSend currentClient) (getNextCommand currentClient sock configRef) config Nothing
removeSocketFile :: IO ()
removeSocketFile = do
-- Ignore possible error if socket file does not exist
_ <- tryJust (guard . isDoesNotExistError) $ removeFile socketPath
return ()
clientSend :: IORef (Maybe Handle) -> ClientDirective -> IO ()
clientSend currentClient clientDirective = do
mbH <- readIORef currentClient
case mbH of
Just h -> ignoreEPipe $ do
hPutStrLn h (show clientDirective)
hFlush h
Nothing -> return ()
where
-- EPIPE means that the client is no longer there.
ignoreEPipe = handleJust (guard . isEPipe) (const $ return ())
isEPipe = (==ResourceVanished) . ioeGetErrorType
getNextCommand :: IORef (Maybe Handle) -> Socket -> IORef (Maybe Config) -> IO (Maybe (Command, Config))
getNextCommand currentClient sock config = do
checkCurrent <- readIORef currentClient
case checkCurrent of
Just h -> hClose h
Nothing -> return ()
(h, _, _) <- accept sock
writeIORef currentClient (Just h)
msg <- hGetLine h -- TODO catch exception
let serverDirective = readMaybe msg
case serverDirective of
Nothing -> do
clientSend currentClient $ ClientUnexpectedError $
"The client sent an invalid message to the server: " ++ show msg
getNextCommand currentClient sock config
Just (SrvCommand cmd cmdExtra) -> do
curConfig <- readIORef config
config' <- updateConfig curConfig cmdExtra
writeIORef config (Just config')
return $ Just (cmd, config')
Just SrvStatus -> do
mapM_ (clientSend currentClient) $
[ ClientStdout "Server is running."
, ClientExit ExitSuccess
]
getNextCommand currentClient sock config
Just SrvExit -> do
mapM_ (clientSend currentClient) $
[ ClientStdout "Shutting down server."
, ClientExit ExitSuccess
]
-- Must close the handle here because we are exiting the loop so it
-- won't be closed in the code above
hClose h
return Nothing
|
pacak/hdevtools
|
src/Server.hs
|
mit
| 3,921 | 0 | 15 | 1,044 | 1,078 | 538 | 540 | 82 | 5 |
data A = A Int
| B Float
deriving (Koe)
|
roberth/uu-helium
|
test/parser/DerivingUnknownClass.hs
|
gpl-3.0
| 50 | 2 | 6 | 20 | 27 | 13 | 14 | 3 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudFront.UpdateStreamingDistribution
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update a streaming distribution.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/UpdateStreamingDistribution.html AWS API Reference> for UpdateStreamingDistribution.
module Network.AWS.CloudFront.UpdateStreamingDistribution
(
-- * Creating a Request
updateStreamingDistribution
, UpdateStreamingDistribution
-- * Request Lenses
, usdIfMatch
, usdStreamingDistributionConfig
, usdId
-- * Destructuring the Response
, updateStreamingDistributionResponse
, UpdateStreamingDistributionResponse
-- * Response Lenses
, usdrsETag
, usdrsStreamingDistribution
, usdrsResponseStatus
) where
import Network.AWS.CloudFront.Types
import Network.AWS.CloudFront.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The request to update a streaming distribution.
--
-- /See:/ 'updateStreamingDistribution' smart constructor.
data UpdateStreamingDistribution = UpdateStreamingDistribution'
{ _usdIfMatch :: !(Maybe Text)
, _usdStreamingDistributionConfig :: !StreamingDistributionConfig
, _usdId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateStreamingDistribution' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usdIfMatch'
--
-- * 'usdStreamingDistributionConfig'
--
-- * 'usdId'
updateStreamingDistribution
:: StreamingDistributionConfig -- ^ 'usdStreamingDistributionConfig'
-> Text -- ^ 'usdId'
-> UpdateStreamingDistribution
updateStreamingDistribution pStreamingDistributionConfig_ pId_ =
UpdateStreamingDistribution'
{ _usdIfMatch = Nothing
, _usdStreamingDistributionConfig = pStreamingDistributionConfig_
, _usdId = pId_
}
-- | The value of the ETag header you received when retrieving the streaming
-- distribution\'s configuration. For example: E2QWRUHAPOMQZL.
usdIfMatch :: Lens' UpdateStreamingDistribution (Maybe Text)
usdIfMatch = lens _usdIfMatch (\ s a -> s{_usdIfMatch = a});
-- | The streaming distribution\'s configuration information.
usdStreamingDistributionConfig :: Lens' UpdateStreamingDistribution StreamingDistributionConfig
usdStreamingDistributionConfig = lens _usdStreamingDistributionConfig (\ s a -> s{_usdStreamingDistributionConfig = a});
-- | The streaming distribution\'s id.
usdId :: Lens' UpdateStreamingDistribution Text
usdId = lens _usdId (\ s a -> s{_usdId = a});
instance AWSRequest UpdateStreamingDistribution where
type Rs UpdateStreamingDistribution =
UpdateStreamingDistributionResponse
request = putXML cloudFront
response
= receiveXML
(\ s h x ->
UpdateStreamingDistributionResponse' <$>
(h .#? "ETag") <*> (parseXML x) <*>
(pure (fromEnum s)))
instance ToElement UpdateStreamingDistribution where
toElement
= mkElement
"{http://cloudfront.amazonaws.com/doc/2015-04-17/}StreamingDistributionConfig"
.
_usdStreamingDistributionConfig
instance ToHeaders UpdateStreamingDistribution where
toHeaders UpdateStreamingDistribution'{..}
= mconcat ["If-Match" =# _usdIfMatch]
instance ToPath UpdateStreamingDistribution where
toPath UpdateStreamingDistribution'{..}
= mconcat
["/2015-04-17/streaming-distribution/", toBS _usdId,
"/config"]
instance ToQuery UpdateStreamingDistribution where
toQuery = const mempty
-- | The returned result of the corresponding request.
--
-- /See:/ 'updateStreamingDistributionResponse' smart constructor.
data UpdateStreamingDistributionResponse = UpdateStreamingDistributionResponse'
{ _usdrsETag :: !(Maybe Text)
, _usdrsStreamingDistribution :: !(Maybe StreamingDistribution)
, _usdrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateStreamingDistributionResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usdrsETag'
--
-- * 'usdrsStreamingDistribution'
--
-- * 'usdrsResponseStatus'
updateStreamingDistributionResponse
:: Int -- ^ 'usdrsResponseStatus'
-> UpdateStreamingDistributionResponse
updateStreamingDistributionResponse pResponseStatus_ =
UpdateStreamingDistributionResponse'
{ _usdrsETag = Nothing
, _usdrsStreamingDistribution = Nothing
, _usdrsResponseStatus = pResponseStatus_
}
-- | The current version of the configuration. For example: E2QWRUHAPOMQZL.
usdrsETag :: Lens' UpdateStreamingDistributionResponse (Maybe Text)
usdrsETag = lens _usdrsETag (\ s a -> s{_usdrsETag = a});
-- | The streaming distribution\'s information.
usdrsStreamingDistribution :: Lens' UpdateStreamingDistributionResponse (Maybe StreamingDistribution)
usdrsStreamingDistribution = lens _usdrsStreamingDistribution (\ s a -> s{_usdrsStreamingDistribution = a});
-- | The response status code.
usdrsResponseStatus :: Lens' UpdateStreamingDistributionResponse Int
usdrsResponseStatus = lens _usdrsResponseStatus (\ s a -> s{_usdrsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-cloudfront/gen/Network/AWS/CloudFront/UpdateStreamingDistribution.hs
|
mpl-2.0
| 6,092 | 0 | 13 | 1,165 | 785 | 463 | 322 | 102 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Form Handler | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/formhandler/src/main/javahelp/org/zaproxy/zap/extension/formhandler/resources/help_zh_CN/helpset_zh_CN.hs
|
apache-2.0
| 974 | 85 | 52 | 160 | 398 | 210 | 188 | -1 | -1 |
module Main (main) where
import Network.Info
main = do
ns <- getNetworkInterfaces
mapM (putStrLn . showInterface) ns
showInterface :: NetworkInterface -> String
showInterface n = name n ++ "\n"
++ "IPv4 Address: " ++ show (ipv4 n) ++ "\n"
++ "IPv6 Address: " ++ show (ipv6 n) ++ "\n"
++ "MAC Address: " ++ show (mac n) ++ "\n"
|
olorin/network-info
|
test/src/Main.hs
|
bsd-3-clause
| 397 | 0 | 15 | 129 | 128 | 64 | 64 | 10 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
#if __GLASGOW_HASKELL__ >= 706
{-# LANGUAGE PolyKinds #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Generics.Lens
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : GHC
--
-- Note: @GHC.Generics@ exports a number of names that collide with @Control.Lens@.
--
-- You can use hiding or imports to mitigate this to an extent, and the following imports,
-- represent a fair compromise for user code:
--
-- > import Control.Lens hiding (Rep)
-- > import GHC.Generics hiding (from, to)
--
-- You can use 'generic' to replace 'GHC.Generics.from' and 'GHC.Generics.to' from @GHC.Generics@,
-- and probably won't be explicitly referencing 'Control.Lens.Representable.Rep' from @Control.Lens@
-- in code that uses generics.
--
-- This module provides compatibility with older GHC versions by using the
-- <http://hackage.haskell.org/package/generic-deriving generic-deriving>
-- package.
----------------------------------------------------------------------------
module GHC.Generics.Lens
(
generic
, generic1
, _V1
, _U1
, _Par1
, _Rec1
, _K1
, _M1
, _L1
, _R1
, _UAddr
, _UChar
, _UDouble
, _UFloat
, _UInt
, _UWord
) where
import Control.Lens
import GHC.Exts (Char(..), Double(..), Float(..),
Int(..), Ptr(..), Word(..))
import qualified GHC.Generics as Generic
import GHC.Generics hiding (from, to)
#if !(MIN_VERSION_base(4,9,0))
import Generics.Deriving.Base hiding (from, to)
#endif
-- $setup
-- >>> :set -XNoOverloadedStrings
-- | Convert from the data type to its representation (or back)
--
-- >>> "hello"^.generic.from generic :: String
-- "hello"
generic :: Generic a => Iso' a (Rep a b)
generic = iso Generic.from Generic.to
{-# INLINE generic #-}
-- | Convert from the data type to its representation (or back)
generic1 :: Generic1 f => Iso (f a) (f b) (Rep1 f a) (Rep1 f b)
generic1 = iso from1 to1
{-# INLINE generic1 #-}
_V1 :: Over p f (V1 s) (V1 t) a b
_V1 _ = absurd where
absurd !_a = undefined
{-# INLINE _V1 #-}
_U1 :: Iso (U1 p) (U1 q) () ()
_U1 = iso (const ()) (const U1)
{-# INLINE _U1 #-}
_Par1 :: Iso (Par1 p) (Par1 q) p q
_Par1 = iso unPar1 Par1
{-# INLINE _Par1 #-}
_Rec1 :: Iso (Rec1 f p) (Rec1 g q) (f p) (g q)
_Rec1 = iso unRec1 Rec1
{-# INLINE _Rec1 #-}
_K1 :: Iso (K1 i c p) (K1 j d q) c d
_K1 = iso unK1 K1
{-# INLINE _K1 #-}
_M1 :: Iso (M1 i c f p) (M1 j d g q) (f p) (g q)
_M1 = iso unM1 M1
{-# INLINE _M1 #-}
_L1 :: Prism' ((f :+: g) a) (f a)
_L1 = prism remitter reviewer
where
remitter = L1
reviewer (L1 l) = Right l
reviewer x = Left x
{-# INLINE _L1 #-}
-- | You can access fields of `data (f :*: g) p` by using it's `Field1` and `Field2` instances
_R1 :: Prism' ((f :+: g) a) (g a)
_R1 = prism remitter reviewer
where
remitter = R1
reviewer (R1 l) = Right l
reviewer x = Left x
{-# INLINE _R1 #-}
_UAddr :: Iso (UAddr p) (UAddr q) (Ptr c) (Ptr d)
_UAddr = iso remitter reviewer
where
remitter (UAddr a) = Ptr a
reviewer (Ptr a) = UAddr a
{-# INLINE _UAddr #-}
_UChar :: Iso (UChar p) (UChar q) Char Char
_UChar = iso remitter reviewer
where
remitter (UChar c) = C# c
reviewer (C# c) = UChar c
{-# INLINE _UChar #-}
_UDouble :: Iso (UDouble p) (UDouble q) Double Double
_UDouble = iso remitter reviewer
where
remitter (UDouble d) = D# d
reviewer (D# d) = UDouble d
{-# INLINE _UDouble #-}
_UFloat :: Iso (UFloat p) (UFloat q) Float Float
_UFloat = iso remitter reviewer
where
remitter (UFloat f) = F# f
reviewer (F# f) = UFloat f
{-# INLINE _UFloat #-}
_UInt :: Iso (UInt p) (UInt q) Int Int
_UInt = iso remitter reviewer
where
remitter (UInt i) = I# i
reviewer (I# i) = UInt i
{-# INLINE _UInt #-}
_UWord :: Iso (UWord p) (UWord q) Word Word
_UWord = iso remitter reviewer
where
remitter (UWord w) = W# w
reviewer (W# w) = UWord w
{-# INLINE _UWord #-}
|
ddssff/lens
|
src/GHC/Generics/Lens.hs
|
bsd-3-clause
| 4,250 | 0 | 9 | 921 | 1,193 | 645 | 548 | 97 | 2 |
-- | compute the cyclic tetris graph
-- corresponding to a (partial) PCP solution
module PCP.Paths where
import Autolib.Util.Splits
import Autolib.NFA.Type
import Autolib.NFA.Dot
import Autolib.NFA.Compact
import Autolib.Exp.Type
import Autolib.Informed
import Autolib.Schichten
import Autolib.Util.Sort
import Autolib.Util.Size
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Hash
import PCP.Type
import PCP.Examples
import Control.Monad.State
import Control.Monad.Identity
import Data.Maybe
-- | check dis
test = check simple $ take 4 ssimple
app p xs =
let top = do x <- xs ; fst $ p !! x
down = do x <- xs ; snd $ p !! x
in ( top, down )
emit fname pcp sol
= writeFile ( fname ++ ".dot" )
$ show $ toDot $ automate $ check pcp sol
check pcp (k : ks) = runIdentity
$ evalStateT ( do
let (l, r) = pcp !! k
p <- next
q <- next
path <- link (p, drop (length r) l, q)
put_border path
put_cut p
spirals ks
)
$ blank 'x' pcp
--------------------------------------------------------------------------
data Config c a = Config
{ separator :: c
, pcp :: PCP c
, store :: [(a, c, a)]
, supply :: [a] -- ^ lazy infinite
, used :: [a]
, from :: a, to :: a
, border :: [(a, c, a)]
, glue :: [(a, a)] -- ^ states should be identified
, cut :: a
, time :: Int
}
instance ToDoc [c] => ToDoc ( Config c a ) where
toDoc = toDoc . wesen
wesen :: Config c a -> (Int, [c])
wesen conf = ( time conf, strip $ border conf )
instance ( Eq c ) => Eq ( Config c a ) where
c == d = wesen c == wesen d
instance ( Ord c ) => Ord ( Config c a ) where
c `compare` d = wesen c `compare` wesen d
spirals :: ( Eq a, Monad m, Eq c )
=> [Int]
-> StateT (Config c a) m (Config c a)
spirals ks = do
sequence_ $ map spiral ks
get
-- | apply one pcp pair
spiral :: (Eq a, Monad m, Eq c)
=> Int
-> StateT (Config c a) m ()
spiral k = do
p <- gets pcp
let (l, r) = p !! k
s <- gets separator
bore <- gets border
c <- gets cut
jack <- if c `elem` states_on ( take (length r) bore )
then do -- start next level
jack <- next
put_cut $ jack
add_glue (jack, end bore)
when ( c == begin bore ) $ do
link (c, [s], jack)
return ()
return jack
else do -- continue old level
return $ end bore
-- add to border (on the right)
ext <- extend (jack, l)
let (pre, post) = splitAt (length r) (bore ++ ext)
if strip pre == r
then return ()
else error "PCP.Paths.spiral: does not match"
put_border post
link (end pre, [s], end ext)
return ()
states_on path = do
(p, c, q) <- path
return p
blank :: c -- ^ separator
-> PCP c -- ^ instance
-> Config c Int
blank s p = Config { separator = s, pcp = p
, store = []
, supply = [0 ..]
, used = []
-- questionable
, from = 0 , to = 0, cut = 0
, border = []
, glue = []
, time = 0
}
-- | get next item from supply
next :: Monad m
=> StateT (Config c a) m a
next = do
conf <- get
let (x : xs) = supply conf
put $ conf { supply = xs
, used = x : used conf
}
return x
-- | create new path into nothing
extend :: Monad m
=> (a, [c])
-> StateT (Config c a) m [(a,c,a)]
extend (p, w) = do
q <- next
link (p, w, q)
-- | create new path between existing states
-- (and add to store, but not to border)
link :: Monad m
=> (a, [c], a)
-> StateT (Config c a) m [(a,c,a)]
link (p, w, q) = do
this <- sequence $ replicate (pred $ length w) next
let pqs = p : this ++ [q]
let path = zip3 pqs w (tail pqs)
add_store path
add_glue g = do
conf <- get
put $ conf { glue = g : glue conf }
add_store :: Monad m
=> [(a, c, a)]
-> StateT (Config c a) m [(a,c,a)]
add_store path = do
conf <- get
put $ conf { store = path ++ store conf }
return path
put_cut c = do
conf <- get
put $ conf { cut = c }
put_border :: Monad m
=> [(a, c, a)]
-> StateT (Config c a) m ()
put_border path = do
conf <- get
put $ conf { border = path }
start :: Monad m
=> [c]
-> StateT (Config c a) m ()
start w = do
p <- next
q <- next
path <- link (p, w, q)
put_border path
conf <- get
put $ conf { from = p, to = q }
------------------------------------------------------------------------
-- state monad stufff
------------------------------------------------------------------------
-- | output all states (level order)
trace :: Ord s
=> StateT s [] a
-> s
-> [[s]]
trace f s = map setToList
$ schichten (mkSet . execStateT f) s
-- | execute compuations in parallel
fork :: [ StateT s [] a ] -> StateT s [] a
fork fs = StateT $ \ s -> do
f <- fs
runStateT f s
--------------------------------------------------------------------------
type Rule c = ( [c], [c] )
type SRS c = [ Rule c ]
type Step c a = ( a, c, a )
type Path c a = [ Step c a ]
strip :: Path c a -> [c]
strip = map ( \ (_,b,_) -> b)
begin :: Path c a -> a
begin path = case path of (a, _, _) : _ -> a
end :: Path c a -> a
end path = case last path of (_, _, c) -> c
exec :: ( Monad m )
=> StateT s m Bool
-> StateT s m a
-> StateT s m a
exec step final = do
cont <- step
if cont
then exec step final
else final
-- | execute one leftmost step
-- return success flag (False = reached normal form)
leftmost :: ( Monad m, Eq c )
=> Apps c a
-> StateT (Config c a) m Bool
leftmost apps = StateT $ \ s ->
case runStateT (results apps) s of
[] -> return ( False, s )
( _, t ) : _ -> return ( True , t )
-- | this function is questionable
everywhere :: ( Eq c )
=> Apps c a
-> StateT (Config c a) [] Bool
everywhere apps = StateT $ \ s ->
case runStateT (results apps) s of
[] -> return ( False, s )
things -> do
(_, t) <- things
return ( True , t )
-- | execute one step (all possible ways)
results :: ( Eq c )
=> Apps c a
-> StateT (Config c a) [] ()
results apps = do
bore <- gets border
fork $ do
(pre, mid, r, post) <- apps bore
return $ do
path <- link (begin mid, r, end mid)
put_border $ pre ++ path ++ post
type Apps c a = Path c a -> [(Path c a, Path c a, [c], Path c a)]
-- | find all possible rule applications (left to right)
applicables :: Eq c
=> SRS c
-> Apps c a
applicables srs w = do
( pre, midpost ) <- splits w
(l, r) <- srs
let (mid, post) = splitAt (length l) midpost
guard $ strip mid == l
return (pre, mid, r, post)
--------------------------------------------------------------------------
data Net c a = Net { net :: NFA c (Label a) }
instance NFAC c a => ToDoc (Net c a) where
toDoc = toDoc . net
instance NFAC c a => Show (Net c a) where
show = render . toDoc
instance NFAC c a => ToDot (Net c a) where
toDot = toDot . net
toDotProgram _ = Neato
toDotOptions _ = unwords
$ [ "-Nshape=point", "-Nfixedsize=true"
-- , "-Gsize=9,14", "-Gratio=fill"
, "-Gstart=10"
]
data Label a = Label { it :: a, tag :: Doc }
instance Eq a => Eq (Label a) where
x == y = it x == it y
instance Ord a => Ord (Label a) where
compare x y = compare (it x) (it y)
instance Hash a => Hash (Label a) where
hash = hash . it
instance ToDoc (Label a) where toDoc = tag
instance Reader (Label a) -- dummy
{-
automate :: ( NFAC c Int )
=> Config c Int
-> Net (RX c) (Label Int)
-}
automate conf =
let trs = store conf
fm = addListToFM_C (error "PCP.Paths.automate") emptyFM $ do
(k, (x, y)) <- zip [0 :: Int ..] $ glue conf
[ (x, toDoc k), (y, toDoc k) ]
label q = Label
{ it = q
, tag = lookupWithDefaultFM fm empty q
}
in Net $ Autolib.NFA.Compact.make
$ statemap label
$ NFA { nfa_info = funni "automate" [ ]
, states = mkSet $ used conf
, alphabet = mkSet $ strip trs
, starts = unitSet $ from conf
-- so that all glue points remain visible when compacting
, finals = mkSet $ keysFM fm
, trans = collect trs
}
|
Erdwolf/autotool-bonn
|
src/PCP/Paths.hs
|
gpl-2.0
| 8,442 | 50 | 20 | 2,726 | 3,467 | 1,824 | 1,643 | -1 | -1 |
-- |
-- Module : React
-- Copyright : (C) 2014-15 Joel Burget
-- License : MIT
-- Maintainer : Joel Burget <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Usage:
--
-- This tutorial assumes only a basic understanding of React, the DOM, and
-- browser events. I recomment at least skimming the [official React
-- tutorial](https://facebook.github.io/react/docs/tutorial.html).
--
-- Let's start with a basic example:
--
-- @
-- page_ :: ReactNode Void
-- page_ =
-- let cls = smartClass
-- { name = "page"
--
-- -- initially the input is empty
-- , initialState = ""
--
-- -- always transition to the input's new value
-- , transition = \(_, value) -> (value, Nothing)
--
-- , renderFn = \_ str -> div_ [ class_ "container" ] $ do
-- input_ [ value_ str, onChange (Just . value . target) ]
-- }
-- in classLeaf cls ()
--
-- main :: IO ()
-- main = do
-- Just doc <- currentDocument
-- Just elem <- documentGetElementById doc ("elem" :: JSString)
-- render page_ elem
-- @
--
-- In this example we defined a React class with 'Text' state, but taking only
-- @()@ as a prop. It's possible to use anything for props and state --
-- numbers, JSON, even React classes.
--
-- In the example the input always contains the state from the class, and the
-- state is updated on every input change event -- effectively, every
-- keystroke.
module React
(
-- * Classes
ReactClass()
, ClassConfig(..)
, ClassCtx
, smartClass
, dumbClass
-- * Rendering
, render
, debugRender
-- * React Nodes
, ReactNode
-- * Events
, module React.Events
-- * Local
, module React.Local
-- XXX(joel)
, AttrOrHandler()
-- TODO - create React.Internal module for these?
-- * Attributes
, module React.Attrs
-- * Creating Elements
, module React.Elements
-- * JS Interop
, ImportedClass
-- * PropTypes
, PropRequired(IsRequired, IsntRequired)
, PropType(..)
, PropTypable
) where
-- TODO
-- restricted monads
-- store elem in monad
-- escaping / dangerouslySetInnerHTML
import React.Class
import React.Imports
import React.Local
import React.Render
import React.Types
import React.Attrs
import React.Elements
import React.Events
import React.Rebindable
import React.PropTypes
|
joelburget/react-haskell
|
src/React.hs
|
mit
| 2,455 | 0 | 5 | 644 | 201 | 153 | 48 | 35 | 0 |
module ListImplicitUsed where
import Data.List
foo = sort
|
serokell/importify
|
test/test-data/base@basic/30-ListImplicitUsed.hs
|
mit
| 70 | 0 | 4 | 20 | 14 | 9 | 5 | 3 | 1 |
-- | Data.Graph is sorely lacking in several ways, This just tries to fill in
-- some holes and provide a more convinient interface
{-# LANGUAGE RecursiveDo #-}
module Util.Graph(
Graph(),
Util.Graph.components,
Util.Graph.dff,
Util.Graph.reachable,
Util.Graph.scc,
Util.Graph.topSort,
cyclicNodes,
findLoopBreakers,
fromGraph,
fromScc,
getBindGroups,
groupOverlapping,
mapGraph,
newGraph',
newGraph,
newGraphReachable,
reachableFrom,
restitchGraph,
sccForest,
easySCC,
sccGroups,
toDag,
transitiveClosure,
transitiveReduction
) where
import Control.Monad
import Control.Monad.ST
import Data.Array.IArray
import Data.Array.ST hiding(unsafeFreeze)
import Data.Array.Unsafe (unsafeFreeze)
import Data.Graph hiding(Graph)
import Data.Maybe
import Data.STRef
import GenUtil
import Data.List(sort,sortBy,group,delete)
import Util.UnionFindST
import qualified Data.Graph as G
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Traversable as A
data Graph n = Graph G.Graph (Table n)
instance Show n => Show (Graph n) where
showsPrec n g = showsPrec n (Util.Graph.scc g)
-- simple scc interface
easySCC :: Ord name => [node] -> (node -> name) -> (node -> [name]) -> [[node]]
easySCC ns fn fd = map f $ stronglyConnComp [ (n, fn n, fd n) | n <- ns] where
f (AcyclicSCC x) = [x]
f (CyclicSCC xs) = xs
fromGraph :: Graph n -> [(n,[n])]
fromGraph (Graph g lv) = [ (lv!v,map (lv!) vs) | (v,vs) <- assocs g ]
newGraph :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Graph n)
newGraph ns a b = snd $ newGraph' ns a b
newGraphReachable :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> ([k] -> [n],Graph n)
newGraphReachable ns fn fd = (rable,ng) where
(vmap,ng) = newGraph' ns fn fd
rable ks = Util.Graph.reachable ng [ v | Just v <- map (flip Map.lookup vmap) ks ]
reachableFrom :: Ord k => (n -> k) -> (n -> [k]) -> [n] -> [k] -> [n]
reachableFrom fn fd ns = fst $ newGraphReachable ns fn fd
-- | Build a graph from a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node should have edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; they are ignored.
newGraph' :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Map.Map k Vertex,Graph n)
newGraph' ns fn fd = (kmap,Graph graph nr) where
nr = listArray bounds0 ns
max_v = length ns - 1
bounds0 = (0,max_v) :: (Vertex, Vertex)
kmap = Map.fromList [ (fn n,i) | (i,n) <- zip [0 ..] ns ]
graph = listArray bounds0 [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- ns]
fromScc (Left n) = [n]
fromScc (Right n) = n
-- | determine a set of loopbreakers subject to a fitness function
-- loopbreakers have a minimum of their incoming edges ignored.
findLoopBreakers
:: (n -> Int) -- ^ fitness function, greater numbers mean more likely to be a loopbreaker
-> (n -> Bool) -- ^ whether a node is suitable at all for a choice as loopbreaker
-> Graph n -- ^ the graph
-> ([n],[n]) -- ^ (loop breakers,dependency ordered nodes after loopbreaking)
findLoopBreakers func ex (Graph g ln) = ans where
scc = G.scc g
ans = f g scc [] [] where
f g (Node v []:sccs) fs lb
| v `elem` g ! v = let ng = (fmap (Data.List.delete v) g) in f ng (G.scc ng) [] (v:lb)
| otherwise = f g sccs (v:fs) lb
f g (n:_) fs lb = f ng (G.scc ng) [] (mv:lb) where
mv = case sortBy (\ a b -> compare (snd b) (snd a)) [ (v,func (ln!v)) | v <- ns, ex (ln!v) ] of
((mv,_):_) -> mv
[] -> error "findLoopBreakers: no valid loopbreakers"
ns = dec n []
ng = fmap (Data.List.delete mv) g
f _ [] xs lb = (map ((ln!) . head) (group $ sort lb),reverse $ map (ln!) xs)
dec (Node v ts) vs = v:foldr dec vs ts
reachable :: Graph n -> [Vertex] -> [n]
reachable (Graph g ln) vs = map (ln!) $ snub $ concatMap (G.reachable g) vs
sccGroups :: Graph n -> [[n]]
sccGroups g = map fromScc (Util.Graph.scc g)
scc :: Graph n -> [Either n [n]]
scc (Graph g ln) = map decode forest where
forest = G.scc g
decode (Node v [])
| v `elem` g ! v = Right [ln!v]
| otherwise = Left (ln!v)
decode other = Right (dec other [])
dec (Node v ts) vs = ln!v:foldr dec vs ts
sccForest :: Graph n -> Forest n
sccForest (Graph g ln) = map (fmap (ln!)) forest where
forest = G.scc g
dff :: Graph n -> Forest n
dff (Graph g ln) = map (fmap (ln!)) forest where
forest = G.dff g
components :: Graph n -> [[n]]
components (Graph g ln) = map decode forest where
forest = G.components g
decode n = dec n []
dec (Node v ts) vs = ln!v:foldr dec vs ts
topSort :: Graph n -> [n]
topSort (Graph g ln) = map (ln!) $ G.topSort g
cyclicNodes :: Graph n -> [n]
cyclicNodes g = concat [ xs | Right xs <- Util.Graph.scc g]
toDag :: Graph n -> Graph [n]
toDag (Graph g lv) = Graph g' ns' where
ns' = listArray (0,max_v) [ map (lv!) ns | ns <- nss ]
g' = listArray (0,max_v) [ snub [ v | n <- ns, v <- g!n ] | ns <- nss ]
max_v = length nss - 1
nss = map (flip f []) (G.scc g) where
f (Node v ts) rs = v:foldr f rs ts
type AdjacencyMatrix s = STArray s (Vertex,Vertex) Bool
type IAdjacencyMatrix = Array (Vertex,Vertex) Bool
transitiveClosureAM :: AdjacencyMatrix s -> ST s ()
transitiveClosureAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
dij <- readArray arr (i,j)
dik <- readArray arr (i,k)
dkj <- readArray arr (k,j)
writeArray arr (i,j) (dij || (dik && dkj))
transitiveReductionAM :: AdjacencyMatrix s -> ST s ()
transitiveReductionAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
transitiveClosureAM arr
(farr :: IAdjacencyMatrix) <- freeze arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
if farr!(k,i) && farr!(i,j) then
writeArray arr (k,j) False
else return ()
toAdjacencyMatrix :: G.Graph -> ST s (AdjacencyMatrix s)
toAdjacencyMatrix g = do
let (0,max_v) = bounds g
arr <- newArray ((0,0),(max_v,max_v)) False :: ST s (STArray s (Vertex,Vertex) Bool)
sequence_ [ writeArray arr (v,u) True | (v,vs) <- assocs g, u <- vs ]
return arr
fromAdjacencyMatrix :: AdjacencyMatrix s -> ST s G.Graph
fromAdjacencyMatrix arr = do
bnds@(_,(max_v,_)) <- getBounds arr
rs <- getAssocs arr
let rs' = [ x | (x,True) <- rs ]
return (listArray (0,max_v) [ [ v | (n',v) <- rs', n == n' ] | n <- [ 0 .. max_v] ])
transitiveClosure :: Graph n -> Graph n
transitiveClosure (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveClosureAM a
fromAdjacencyMatrix a
transitiveReduction :: Graph n -> Graph n
transitiveReduction (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveReductionAM a
fromAdjacencyMatrix a
instance Functor Graph where
fmap f (Graph g n) = Graph g (fmap f n)
--mapT :: (Vertex -> a -> b) -> Table a -> Table b
--mapT f t = listArray (bounds t) [ (f v (t!v)) | v <- indices t ]
restitchGraph :: Ord k => (n -> k) -> (n -> [k]) -> Graph n -> Graph n
restitchGraph fn fd (Graph g nr) = Graph g' nr where
kmap = Map.fromList [ (fn n,i) | (i,n) <- assocs nr ]
g' = listArray (bounds g) [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- elems nr]
mapGraph :: forall a b . (a -> [b] -> b) -> Graph a -> Graph b
mapGraph f (Graph gr nr) = runST $ do
mnr <- thaw nr :: ST s (STArray s Vertex a)
mnr <- mapArray Left mnr
let g i = readArray mnr i >>= \v -> case v of
Right m -> return m
Left l -> mdo
writeArray mnr i (Right r)
rs <- mapM g (gr!i)
let r = f l rs
return r
mapM_ g (range $ bounds nr)
mnr <- mapArray fromRight mnr
mnr <- unsafeFreeze mnr
return (Graph gr mnr)
-- this uses a very efficient union-find algorithm.
groupOverlapping :: Ord b => (a -> [b]) -> [a] -> (Map.Map b Int,[(Int,[a])])
groupOverlapping fn xs = runUF $ do
es <- forM xs $ \x -> new_ x
mref <- liftST (newSTRef Map.empty)
forM_ es $ \e -> do
let bs = fn $ fromElement e
cmap <- liftST $ readSTRef mref
sequence_ [ union_ x e | b <- bs, Just x <- [Map.lookup b cmap]]
liftST $ modifySTRef mref (Map.union (Map.fromList [ (x,e) | x <- bs ]))
cmap <- liftST $ readSTRef mref
cmap' <- A.mapM getUnique cmap
es <- (Set.toList . Set.fromList) `liftM` mapM find es
es' <- forM es $ \e -> do
u <- getUnique e
es <- getElements e
return (u,map fromElement es)
return (cmap',es')
-- Given a list of nodes, a function to convert nodes to a list of its names,
-- function to convert nodes to a list of names on which the node is
-- dependendant, bindgroups will return a list of bind groups generater from the
-- list of nodes given. nodes with matching keys are placed in the same binding
-- groups.
getBindGroups :: Ord name =>
[node] -> -- List of nodes
(node -> [name]) -> -- Function to convert nodes to unique names
(node -> [name]) -> -- Function to return dependencies of this node
[Either [node] [[node]]] -- binding groups, collecting nodes with overlapping names together
getBindGroups ns fn fd = ans where
(mp,ns') = (groupOverlapping fn ns)
ans = map f $ stronglyConnComp
[(xs,Just n,map (`Map.lookup` mp) (concatMap fd xs)) | (n,xs) <- ns']
f (AcyclicSCC x) = Left x
f (CyclicSCC xs) = Right xs
|
hvr/jhc
|
src/Util/Graph.hs
|
mit
| 9,869 | 9 | 25 | 2,665 | 4,290 | 2,226 | 2,064 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Directory List v2.3 LC</title>
<maps>
<homeID>directorylistv2_3_lc</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/directorylistv2_3_lc/src/main/javahelp/help_sr_CS/helpset_sr_CS.hs
|
apache-2.0
| 984 | 78 | 66 | 158 | 414 | 210 | 204 | -1 | -1 |
{-|
This modules defines the 'QueryArr' arrow, which is an arrow that represents
selecting data from a database, and composing multiple queries together.
-}
module Opaleye.SQLite.QueryArr (QueryArr, Query) where
import Opaleye.SQLite.Internal.QueryArr (QueryArr, Query)
|
bergmark/haskell-opaleye
|
opaleye-sqlite/src/Opaleye/SQLite/QueryArr.hs
|
bsd-3-clause
| 283 | 0 | 5 | 46 | 31 | 21 | 10 | 2 | 0 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Read.Lex
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (uses Text.ParserCombinators.ReadP)
--
-- The cut-down Haskell lexer, used by Text.Read
--
-----------------------------------------------------------------------------
module Text.Read.Lex
-- lexing types
( Lexeme(..) -- :: *; Show, Eq
, Lexeme'(..)
, numberToInteger, numberToRangedRational
-- lexer
, lex -- :: ReadP Lexeme Skips leading spaces
, lex' -- :: ReadP Lexeme Skips leading spaces
, hsLex -- :: ReadP String
, lexChar -- :: ReadP Char Reads just one char, with H98 escapes
, readIntP -- :: Num a => a -> (Char -> Bool) -> (Char -> Int) -> ReadP a
, readOctP -- :: Num a => ReadP a
, readDecP -- :: Num a => ReadP a
, readHexP -- :: Num a => ReadP a
)
where
import Text.ParserCombinators.ReadP
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.Num( Num(..), Integer )
import GHC.Show( Show(..) )
#ifndef __HADDOCK__
import {-# SOURCE #-} GHC.Unicode ( isSpace, isAlpha, isAlphaNum )
#endif
import GHC.Real( Integral, Rational, (%), fromIntegral,
toInteger, (^), infinity, notANumber )
import GHC.List
import GHC.Enum( maxBound )
#else
import Prelude hiding ( lex )
import Data.Char( chr, ord, isSpace, isAlpha, isAlphaNum )
import Data.Ratio( Ratio, (%) )
#endif
#ifdef __HUGS__
import Hugs.Prelude( Ratio(..) )
#endif
import Data.Maybe
import Control.Monad
-- -----------------------------------------------------------------------------
-- Lexing types
-- ^ Haskell lexemes.
data Lexeme
= Char Char -- ^ Character literal
| String String -- ^ String literal, with escapes interpreted
| Punc String -- ^ Punctuation or reserved symbol, e.g. @(@, @::@
| Ident String -- ^ Haskell identifier, e.g. @foo@, @Baz@
| Symbol String -- ^ Haskell symbol, e.g. @>>@, @:%@
| Int Integer -- ^ Integer literal
| Rat Rational -- ^ Floating point literal
| EOF
deriving (Eq, Show)
data Lexeme' = Ident' String
| Punc' String
| Symbol' String
| Number Number
deriving (Eq, Show)
data Number = MkNumber Int -- Base
Digits -- Integral part
| MkDecimal Digits -- Integral part
(Maybe Digits) -- Fractional part
(Maybe Integer) -- Exponent
deriving (Eq, Show)
numberToInteger :: Number -> Maybe Integer
numberToInteger (MkNumber base iPart) = Just (val (fromIntegral base) 0 iPart)
numberToInteger (MkDecimal iPart Nothing Nothing) = Just (val 10 0 iPart)
numberToInteger _ = Nothing
numberToRangedRational :: (Int, Int) -> Number
-> Maybe Rational -- Nothing = Inf
numberToRangedRational (neg, pos) n@(MkDecimal iPart mFPart (Just exp))
= let mFirstDigit = case dropWhile (0 ==) iPart of
iPart'@(_ : _) -> Just (length iPart')
[] -> case mFPart of
Nothing -> Nothing
Just fPart ->
case span (0 ==) fPart of
(_, []) -> Nothing
(zeroes, _) ->
Just (negate (length zeroes))
in case mFirstDigit of
Nothing -> Just 0
Just firstDigit ->
let firstDigit' = firstDigit + fromInteger exp
in if firstDigit' > (pos + 3)
then Nothing
else if firstDigit' < (neg - 3)
then Just 0
else Just (numberToRational n)
numberToRangedRational _ n = Just (numberToRational n)
numberToRational :: Number -> Rational
numberToRational (MkNumber base iPart) = val (fromIntegral base) 0 iPart % 1
numberToRational (MkDecimal iPart mFPart mExp)
= let i = val 10 0 iPart
in case (mFPart, mExp) of
(Nothing, Nothing) -> i % 1
(Nothing, Just exp)
| exp >= 0 -> (i * (10 ^ exp)) % 1
| otherwise -> i % (10 ^ (- exp))
(Just fPart, Nothing) -> fracExp 0 i fPart
(Just fPart, Just exp) -> fracExp exp i fPart
-- -----------------------------------------------------------------------------
-- Lexing
lex :: ReadP Lexeme
lex = skipSpaces >> lexToken
lex' :: ReadP Lexeme'
lex' = skipSpaces >> lexToken'
hsLex :: ReadP String
-- ^ Haskell lexer: returns the lexed string, rather than the lexeme
hsLex = do skipSpaces
(s,_) <- gather lexToken
return s
lexToken :: ReadP Lexeme
lexToken = lexEOF +++
lexLitChar +++
lexString +++
lexPunc +++
lexSymbol +++
lexId +++
lexNumber
lexToken' :: ReadP Lexeme'
lexToken' = lexSymbol' +++
lexId' +++
fmap Number lexNumber'
-- ----------------------------------------------------------------------
-- End of file
lexEOF :: ReadP Lexeme
lexEOF = do s <- look
guard (null s)
return EOF
-- ---------------------------------------------------------------------------
-- Single character lexemes
lexPunc :: ReadP Lexeme
lexPunc =
do c <- satisfy isPuncChar
return (Punc [c])
where
isPuncChar c = c `elem` ",;()[]{}`"
-- ----------------------------------------------------------------------
-- Symbols
lexSymbol :: ReadP Lexeme
lexSymbol =
do s <- munch1 isSymbolChar
if s `elem` reserved_ops then
return (Punc s) -- Reserved-ops count as punctuation
else
return (Symbol s)
where
isSymbolChar c = c `elem` "!@#$%&*+./<=>?\\^|:-~"
reserved_ops = ["..", "::", "=", "\\", "|", "<-", "->", "@", "~", "=>"]
lexSymbol' :: ReadP Lexeme'
lexSymbol' =
do s <- munch1 isSymbolChar
if s `elem` reserved_ops then
return (Punc' s) -- Reserved-ops count as punctuation
else
return (Symbol' s)
where
isSymbolChar c = c `elem` "!@#$%&*+./<=>?\\^|:-~"
reserved_ops = ["..", "::", "=", "\\", "|", "<-", "->", "@", "~", "=>"]
-- ----------------------------------------------------------------------
-- identifiers
lexId :: ReadP Lexeme
lexId = lex_nan <++ lex_id
where
-- NaN and Infinity look like identifiers, so
-- we parse them first.
lex_nan = (string "NaN" >> return (Rat notANumber)) +++
(string "Infinity" >> return (Rat infinity))
lex_id = do c <- satisfy isIdsChar
s <- munch isIdfChar
return (Ident (c:s))
-- Identifiers can start with a '_'
isIdsChar c = isAlpha c || c == '_'
isIdfChar c = isAlphaNum c || c `elem` "_'"
lexId' :: ReadP Lexeme'
lexId' = do c <- satisfy isIdsChar
s <- munch isIdfChar
return (Ident' (c:s))
where
-- Identifiers can start with a '_'
isIdsChar c = isAlpha c || c == '_'
isIdfChar c = isAlphaNum c || c `elem` "_'"
#ifndef __GLASGOW_HASKELL__
infinity, notANumber :: Rational
infinity = 1 :% 0
notANumber = 0 :% 0
#endif
-- ---------------------------------------------------------------------------
-- Lexing character literals
lexLitChar :: ReadP Lexeme
lexLitChar =
do _ <- char '\''
(c,esc) <- lexCharE
guard (esc || c /= '\'') -- Eliminate '' possibility
_ <- char '\''
return (Char c)
lexChar :: ReadP Char
lexChar = do { (c,_) <- lexCharE; return c }
lexCharE :: ReadP (Char, Bool) -- "escaped or not"?
lexCharE =
do c1 <- get
if c1 == '\\'
then do c2 <- lexEsc; return (c2, True)
else do return (c1, False)
where
lexEsc =
lexEscChar
+++ lexNumeric
+++ lexCntrlChar
+++ lexAscii
lexEscChar =
do c <- get
case c of
'a' -> return '\a'
'b' -> return '\b'
'f' -> return '\f'
'n' -> return '\n'
'r' -> return '\r'
't' -> return '\t'
'v' -> return '\v'
'\\' -> return '\\'
'\"' -> return '\"'
'\'' -> return '\''
_ -> pfail
lexNumeric =
do base <- lexBaseChar <++ return 10
n <- lexInteger base
guard (n <= toInteger (ord maxBound))
return (chr (fromInteger n))
lexCntrlChar =
do _ <- char '^'
c <- get
case c of
'@' -> return '\^@'
'A' -> return '\^A'
'B' -> return '\^B'
'C' -> return '\^C'
'D' -> return '\^D'
'E' -> return '\^E'
'F' -> return '\^F'
'G' -> return '\^G'
'H' -> return '\^H'
'I' -> return '\^I'
'J' -> return '\^J'
'K' -> return '\^K'
'L' -> return '\^L'
'M' -> return '\^M'
'N' -> return '\^N'
'O' -> return '\^O'
'P' -> return '\^P'
'Q' -> return '\^Q'
'R' -> return '\^R'
'S' -> return '\^S'
'T' -> return '\^T'
'U' -> return '\^U'
'V' -> return '\^V'
'W' -> return '\^W'
'X' -> return '\^X'
'Y' -> return '\^Y'
'Z' -> return '\^Z'
'[' -> return '\^['
'\\' -> return '\^\'
']' -> return '\^]'
'^' -> return '\^^'
'_' -> return '\^_'
_ -> pfail
lexAscii =
do choice
[ (string "SOH" >> return '\SOH') <++
(string "SO" >> return '\SO')
-- \SO and \SOH need maximal-munch treatment
-- See the Haskell report Sect 2.6
, string "NUL" >> return '\NUL'
, string "STX" >> return '\STX'
, string "ETX" >> return '\ETX'
, string "EOT" >> return '\EOT'
, string "ENQ" >> return '\ENQ'
, string "ACK" >> return '\ACK'
, string "BEL" >> return '\BEL'
, string "BS" >> return '\BS'
, string "HT" >> return '\HT'
, string "LF" >> return '\LF'
, string "VT" >> return '\VT'
, string "FF" >> return '\FF'
, string "CR" >> return '\CR'
, string "SI" >> return '\SI'
, string "DLE" >> return '\DLE'
, string "DC1" >> return '\DC1'
, string "DC2" >> return '\DC2'
, string "DC3" >> return '\DC3'
, string "DC4" >> return '\DC4'
, string "NAK" >> return '\NAK'
, string "SYN" >> return '\SYN'
, string "ETB" >> return '\ETB'
, string "CAN" >> return '\CAN'
, string "EM" >> return '\EM'
, string "SUB" >> return '\SUB'
, string "ESC" >> return '\ESC'
, string "FS" >> return '\FS'
, string "GS" >> return '\GS'
, string "RS" >> return '\RS'
, string "US" >> return '\US'
, string "SP" >> return '\SP'
, string "DEL" >> return '\DEL'
]
-- ---------------------------------------------------------------------------
-- string literal
lexString :: ReadP Lexeme
lexString =
do _ <- char '"'
body id
where
body f =
do (c,esc) <- lexStrItem
if c /= '"' || esc
then body (f.(c:))
else let s = f "" in
return (String s)
lexStrItem = (lexEmpty >> lexStrItem)
+++ lexCharE
lexEmpty =
do _ <- char '\\'
c <- get
case c of
'&' -> do return ()
_ | isSpace c -> do skipSpaces; _ <- char '\\'; return ()
_ -> do pfail
-- ---------------------------------------------------------------------------
-- Lexing numbers
type Base = Int
type Digits = [Int]
lexNumber :: ReadP Lexeme
lexNumber
= lexHexOct <++ -- First try for hex or octal 0x, 0o etc
-- If that fails, try for a decimal number
lexDecNumber -- Start with ordinary digits
lexNumber' :: ReadP Number
lexNumber'
= lexHexOct' <++ -- First try for hex or octal 0x, 0o etc
-- If that fails, try for a decimal number
lexDecNumber'
lexHexOct :: ReadP Lexeme
lexHexOct
= do _ <- char '0'
base <- lexBaseChar
digits <- lexDigits base
return (Int (val (fromIntegral base) 0 digits))
lexHexOct' :: ReadP Number
lexHexOct'
= do _ <- char '0'
base <- lexBaseChar
digits <- lexDigits base
return (MkNumber base digits)
lexBaseChar :: ReadP Int
-- Lex a single character indicating the base; fail if not there
lexBaseChar = do { c <- get;
case c of
'o' -> return 8
'O' -> return 8
'x' -> return 16
'X' -> return 16
_ -> pfail }
lexDecNumber :: ReadP Lexeme
lexDecNumber =
do xs <- lexDigits 10
mFrac <- lexFrac <++ return Nothing
mExp <- lexExp <++ return Nothing
return (value xs mFrac mExp)
where
value xs mFrac mExp = valueFracExp (val 10 0 xs) mFrac mExp
valueFracExp :: Integer -> Maybe Digits -> Maybe Integer
-> Lexeme
valueFracExp a Nothing Nothing
= Int a -- 43
valueFracExp a Nothing (Just exp)
| exp >= 0 = Int (a * (10 ^ exp)) -- 43e7
| otherwise = Rat (a % (10 ^ (-exp))) -- 43e-7
valueFracExp a (Just fs) mExp -- 4.3[e2]
= Rat (fracExp (fromMaybe 0 mExp) a fs)
-- Be a bit more efficient in calculating the Rational.
-- Instead of calculating the fractional part alone, then
-- adding the integral part and finally multiplying with
-- 10 ^ exp if an exponent was given, do it all at once.
lexDecNumber' :: ReadP Number
lexDecNumber' =
do xs <- lexDigits 10
mFrac <- lexFrac <++ return Nothing
mExp <- lexExp <++ return Nothing
return (MkDecimal xs mFrac mExp)
lexFrac :: ReadP (Maybe Digits)
-- Read the fractional part; fail if it doesn't
-- start ".d" where d is a digit
lexFrac = do _ <- char '.'
fraction <- lexDigits 10
return (Just fraction)
lexExp :: ReadP (Maybe Integer)
lexExp = do _ <- char 'e' +++ char 'E'
exp <- signedExp +++ lexInteger 10
return (Just exp)
where
signedExp
= do c <- char '-' +++ char '+'
n <- lexInteger 10
return (if c == '-' then -n else n)
lexDigits :: Int -> ReadP Digits
-- Lex a non-empty sequence of digits in specified base
lexDigits base =
do s <- look
xs <- scan s id
guard (not (null xs))
return xs
where
scan (c:cs) f = case valDig base c of
Just n -> do _ <- get; scan cs (f.(n:))
Nothing -> do return (f [])
scan [] f = do return (f [])
lexInteger :: Base -> ReadP Integer
lexInteger base =
do xs <- lexDigits base
return (val (fromIntegral base) 0 xs)
val :: Num a => a -> a -> Digits -> a
-- val base y [d1,..,dn] = y ++ [d1,..,dn], as it were
val _ y [] = y
val base y (x:xs) = y' `seq` val base y' xs
where
y' = y * base + fromIntegral x
-- Calculate a Rational from the exponent [of 10 to multiply with],
-- the integral part of the mantissa and the digits of the fractional
-- part. Leaving the calculation of the power of 10 until the end,
-- when we know the effective exponent, saves multiplications.
-- More importantly, this way we need at most one gcd instead of three.
--
-- frac was never used with anything but Integer and base 10, so
-- those are hardcoded now (trivial to change if necessary).
fracExp :: Integer -> Integer -> Digits -> Rational
fracExp exp mant []
| exp < 0 = mant % (10 ^ (-exp))
| otherwise = fromInteger (mant * 10 ^ exp)
fracExp exp mant (d:ds) = exp' `seq` mant' `seq` fracExp exp' mant' ds
where
exp' = exp - 1
mant' = mant * 10 + fromIntegral d
valDig :: (Eq a, Num a) => a -> Char -> Maybe Int
valDig 8 c
| '0' <= c && c <= '7' = Just (ord c - ord '0')
| otherwise = Nothing
valDig 10 c = valDecDig c
valDig 16 c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| 'a' <= c && c <= 'f' = Just (ord c - ord 'a' + 10)
| 'A' <= c && c <= 'F' = Just (ord c - ord 'A' + 10)
| otherwise = Nothing
valDig _ _ = error "valDig: Bad base"
valDecDig :: Char -> Maybe Int
valDecDig c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| otherwise = Nothing
-- ----------------------------------------------------------------------
-- other numeric lexing functions
readIntP :: Num a => a -> (Char -> Bool) -> (Char -> Int) -> ReadP a
readIntP base isDigit valDigit =
do s <- munch1 isDigit
return (val base 0 (map valDigit s))
readIntP' :: (Eq a, Num a) => a -> ReadP a
readIntP' base = readIntP base isDigit valDigit
where
isDigit c = maybe False (const True) (valDig base c)
valDigit c = maybe 0 id (valDig base c)
readOctP, readDecP, readHexP :: (Eq a, Num a) => ReadP a
readOctP = readIntP' 8
readDecP = readIntP' 10
readHexP = readIntP' 16
|
mightymoose/liquidhaskell
|
benchmarks/base-4.5.1.0/Text/Read/Lex.hs
|
bsd-3-clause
| 17,364 | 153 | 22 | 5,629 | 4,678 | 2,428 | 2,250 | 388 | 44 |
{-# LANGUAGE CPP #-}
module Examples.Hello where
import Options.Applicative
#if __GLASGOW_HASKELL__ <= 702
import Data.Monoid
(<>) :: Monoid a => a -> a -> a
(<>) = mappend
#endif
data Sample = Sample
{ hello :: String
, quiet :: Bool }
deriving Show
sample :: Parser Sample
sample = Sample
<$> strOption
( long "hello"
<> metavar "TARGET"
<> help "Target for the greeting" )
<*> switch
( long "quiet"
<> help "Whether to be quiet" )
greet :: Sample -> IO ()
greet (Sample h False) = putStrLn $ "Hello, " ++ h
greet _ = return ()
main :: IO ()
main = execParser opts >>= greet
opts :: ParserInfo Sample
opts = info (sample <**> helper)
( fullDesc
<> progDesc "Print a greeting for TARGET"
<> header "hello - a test for optparse-applicative" )
|
begriffs/optparse-applicative
|
tests/Examples/Hello.hs
|
bsd-3-clause
| 812 | 0 | 11 | 207 | 247 | 130 | 117 | 29 | 1 |
module PatternMatch7 where
g = (\(y:ys) -> (case y of
p | p == 45 -> 12
_ -> 52))
f x = (\(p:ps) -> (case p of
l | x == 45 -> 12
_ -> 52))
|
kmate/HaRe
|
old/testing/foldDef/PatternMatch7_TokOut.hs
|
bsd-3-clause
| 205 | 0 | 14 | 104 | 105 | 56 | 49 | 7 | 2 |
{-
- This should give CORRECT on the default problem 'hello'.
-
- @EXPECTED_RESULTS@: CORRECT
-}
import System.IO
main = do putStr "Hello world!\n"
|
Lekssays/brackets
|
web/tests/test-hello.hs
|
gpl-3.0
| 153 | 0 | 7 | 29 | 18 | 9 | 9 | 2 | 1 |
module Main where
main :: IO ()
main = putStrLn "This is foo from has-exe-foo-too"
|
AndreasPK/stack
|
test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/app/Main.hs
|
bsd-3-clause
| 84 | 0 | 6 | 16 | 22 | 12 | 10 | 3 | 1 |
-- |
-- Module: FizzWrapper
-- Copyright: Copyright (C) 2016 Michael Litchard
-- License: LGPL-3
-- Maintainer: Michael Litchard <[email protected]>
-- Stability: experimental
-- Portability: portable
-- Driver function
module FizzWrapper (fizzWrapper) where
import BasicPrelude
import FizzBuzz ( fizzBuzzFib )
import FizzTypes ( prettyPrintFizzError )
import FizzInput
-- | Wrapper is only thing exposed to Main
-- and the cleaning of the input
-- needs to be seperated from the code doing
-- the conputation.
fizzWrapper :: [Text] -> Text
fizzWrapper input =
either prettyPrintFizzError unlines $
Right <$> fizzBuzzFib =<<
convertToPInt =<<
mustHaveOne input
|
mlitchard/swiftfizz
|
src/FizzWrapper.hs
|
isc
| 716 | 0 | 9 | 148 | 87 | 54 | 33 | 11 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
#if !MIN_VERSION_base(4,8,0)
{-# LANGUAGE DeriveDataTypeable #-}
import Control.Applicative ((<$>), (<*>), (*>))
#endif
import Control.DeepSeq
import Criterion.Main
import qualified Data.ByteString as BS
import Data.Int
import qualified Data.IntMap.Strict as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.Store
import Data.Typeable
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import Data.Word
import GHC.Generics
-- TODO: add packer
#if COMPARISON_BENCH
import qualified Data.Binary as Binary
import qualified Data.Serialize as Cereal
import qualified Data.ByteString.Lazy as BL
import Data.Vector.Serialize ()
#endif
data SomeData = SomeData !Int64 !Word8 !Double
deriving (Eq, Show, Generic, Typeable)
instance NFData SomeData where
rnf x = x `seq` ()
instance Store SomeData
#if COMPARISON_BENCH
instance Cereal.Serialize SomeData
instance Binary.Binary SomeData
#endif
main :: IO ()
main = do
#if SMALL_BENCH
let is = 0::Int
sds = SomeData 1 1 1
smallprods = (SmallProduct 0 1 2 3)
smallmanualprods = (SmallProductManual 0 1 2 3)
sss = [SS1 1, SS2 2, SS3 3, SS4 4]
ssms = [SSM1 1, SSM2 2, SSM3 3, SSM4 4]
nestedTuples = ((1,2),(3,4)) :: ((Int,Int),(Int,Int))
#else
let is = V.enumFromTo 1 100 :: V.Vector Int
sds = (\i -> SomeData i (fromIntegral i) (fromIntegral i))
<$> V.enumFromTo 1 100
smallprods = (\ i -> SmallProduct i (i+1) (i+2) (i+3))
<$> V.enumFromTo 1 100
smallmanualprods = (\ i -> SmallProductManual i (i+1) (i+2) (i+3))
<$> V.enumFromTo 1 100
sss = (\i -> case i `mod` 4 of
0 -> SS1 (fromIntegral i)
1 -> SS2 (fromIntegral i)
2 -> SS3 (fromIntegral i)
3 -> SS4 (fromIntegral i)
_ -> error "This does not compute."
) <$> V.enumFromTo 1 (100 :: Int)
ssms = (\i -> case i `mod` 4 of
0 -> SSM1 (fromIntegral i)
1 -> SSM2 (fromIntegral i)
2 -> SSM3 (fromIntegral i)
3 -> SSM4 (fromIntegral i)
_ -> error "This does not compute."
) <$> V.enumFromTo 1 (100 :: Int)
nestedTuples = (\i -> ((i,i+1),(i+2,i+3))) <$> V.enumFromTo (1::Int) 100
ints = [1..100] :: [Int]
pairs = map (\x -> (x, x)) ints
strings = show <$> ints
intsSet = Set.fromDistinctAscList ints
intSet = IntSet.fromDistinctAscList ints
intsMap = Map.fromDistinctAscList pairs
intMap = IntMap.fromDistinctAscList pairs
stringsSet = Set.fromList strings
stringsMap = Map.fromList (zip strings ints)
#endif
defaultMain
[ bgroup "encode"
[ benchEncode is
#if !SMALL_BENCH
, benchEncode' "1kb storable" (SV.fromList ([1..256] :: [Int32]))
, benchEncode' "10kb storable" (SV.fromList ([1..(256 * 10)] :: [Int32]))
, benchEncode' "1kb normal" (V.fromList ([1..256] :: [Int32]))
, benchEncode' "10kb normal" (V.fromList ([1..(256 * 10)] :: [Int32]))
, benchEncode intsSet
, benchEncode intSet
, benchEncode intsMap
, benchEncode intMap
, benchEncode stringsSet
, benchEncode stringsMap
#endif
, benchEncode smallprods
, benchEncode smallmanualprods
, benchEncode sss
, benchEncode ssms
, benchEncode nestedTuples
, benchEncode sds
]
, bgroup "decode"
[ benchDecode is
#if !SMALL_BENCH
, benchDecode' "1kb storable" (SV.fromList ([1..256] :: [Int32]))
, benchDecode' "10kb storable" (SV.fromList ([1..(256 * 10)] :: [Int32]))
, benchDecode' "1kb normal" (V.fromList ([1..256] :: [Int32]))
, benchDecode' "10kb normal" (V.fromList ([1..(256 * 10)] :: [Int32]))
, benchDecode intsSet
, benchDecode intSet
, benchDecode intsMap
, benchDecode intMap
, benchDecode stringsSet
, benchDecode stringsMap
#endif
, benchDecode smallprods
, benchDecode smallmanualprods
, benchDecode sss
, benchDecode ssms
, benchDecode nestedTuples
, benchDecode sds
]
]
type Ctx a =
( Store a, Typeable a, NFData a
#if COMPARISON_BENCH
, Binary.Binary a
, Cereal.Serialize a
#endif
)
benchEncode :: Ctx a => a -> Benchmark
benchEncode = benchEncode' ""
benchEncode' :: Ctx a => String -> a -> Benchmark
benchEncode' msg x0 =
env (return x0) $ \x ->
let label = msg ++ " (" ++ show (typeOf x0) ++ ")"
benchStore name = bench name (nf encode x) in
#if COMPARISON_BENCH
bgroup label
[ benchStore "store"
, bench "cereal" (nf Cereal.encode x)
, bench "binary" (nf Binary.encode x)
]
#else
benchStore label
#endif
benchDecode :: Ctx a => a -> Benchmark
benchDecode = benchDecode' ""
-- TODO: comparison bench for decode
benchDecode' :: forall a. Ctx a => String -> a -> Benchmark
#if COMPARISON_BENCH
benchDecode' prefix x0 =
bgroup label
[ env (return (encode x0)) $ \x -> bench "store" (nf (decodeEx :: BS.ByteString -> a) x)
, env (return (Cereal.encode x0)) $ \x -> bench "cereal" (nf ((ensureRight . Cereal.decode) :: BS.ByteString -> a) x)
, env (return (Binary.encode x0)) $ \x -> bench "binary" (nf (Binary.decode :: BL.ByteString -> a) x)
]
where
label = prefix ++ " (" ++ show (typeOf x0) ++ ")"
ensureRight (Left x) = error "left!"
ensureRight (Right x) = x
#else
benchDecode' prefix x0 =
env (return (encode x0)) $ \x ->
bench (prefix ++ " (" ++ show (typeOf x0) ++ ")") (nf (decodeEx :: BS.ByteString -> a) x)
#endif
------------------------------------------------------------------------
-- Serialized datatypes
data SmallProduct = SmallProduct Int32 Int32 Int32 Int32
deriving (Generic, Show, Typeable)
instance NFData SmallProduct
instance Store SmallProduct
data SmallProductManual = SmallProductManual Int32 Int32 Int32 Int32
deriving (Generic, Show, Typeable)
instance NFData SmallProductManual
instance Store SmallProductManual where
size = ConstSize 16
peek = SmallProductManual <$> peek <*> peek <*> peek <*> peek
poke (SmallProductManual a b c d) = poke a *> poke b *> poke c *> poke d
data SmallSum
= SS1 Int8
| SS2 Int32
| SS3 Int64
| SS4 Word32
deriving (Generic, Show, Typeable)
instance NFData SmallSum
instance Store SmallSum
data SmallSumManual
= SSM1 Int8
| SSM2 Int32
| SSM3 Int64
| SSM4 Word32
deriving (Generic, Show, Typeable)
instance NFData SmallSumManual
instance Store SmallSumManual where
size = VarSize $ \x -> 1 + case x of
SSM1{} -> 1
SSM2{} -> 4
SSM3{} -> 8
SSM4{} -> 4
peek = do
tag <- peek
case tag :: Word8 of
0 -> SSM1 <$> peek
1 -> SSM2 <$> peek
2 -> SSM3 <$> peek
3 -> SSM4 <$> peek
_ -> fail "Invalid tag"
poke (SSM1 x) = poke (0 :: Word8) >> poke x
poke (SSM2 x) = poke (1 :: Word8) >> poke x
poke (SSM3 x) = poke (2 :: Word8) >> poke x
poke (SSM4 x) = poke (3 :: Word8) >> poke x
-- TODO: add TH generation of the above, and add LargeSum / LargeProduct cases
#if COMPARISON_BENCH
instance Binary.Binary SmallProduct
instance Binary.Binary SmallSum
instance Cereal.Serialize SmallProduct
instance Cereal.Serialize SmallSum
instance Binary.Binary SmallProductManual where
get = SmallProductManual <$> Binary.get <*> Binary.get <*> Binary.get <*> Binary.get
put (SmallProductManual a b c d) = Binary.put a *> Binary.put b *> Binary.put c *> Binary.put d
instance Binary.Binary SmallSumManual where
get = do
tag <- Binary.get
case tag :: Word8 of
0 -> SSM1 <$> Binary.get
1 -> SSM2 <$> Binary.get
2 -> SSM3 <$> Binary.get
3 -> SSM4 <$> Binary.get
_ -> fail "Invalid tag"
put (SSM1 x) = Binary.put (0 :: Word8) *> Binary.put x
put (SSM2 x) = Binary.put (1 :: Word8) *> Binary.put x
put (SSM3 x) = Binary.put (2 :: Word8) *> Binary.put x
put (SSM4 x) = Binary.put (3 :: Word8) *> Binary.put x
instance Cereal.Serialize SmallProductManual where
get = SmallProductManual <$> Cereal.get <*> Cereal.get <*> Cereal.get <*> Cereal.get
put (SmallProductManual a b c d) = Cereal.put a *> Cereal.put b *> Cereal.put c *> Cereal.put d
instance Cereal.Serialize SmallSumManual where
get = do
tag <- Cereal.get
case tag :: Word8 of
0 -> SSM1 <$> Cereal.get
1 -> SSM2 <$> Cereal.get
2 -> SSM3 <$> Cereal.get
3 -> SSM4 <$> Cereal.get
_ -> fail "Invalid tag"
put (SSM1 x) = Cereal.put (0 :: Word8) *> Cereal.put x
put (SSM2 x) = Cereal.put (1 :: Word8) *> Cereal.put x
put (SSM3 x) = Cereal.put (2 :: Word8) *> Cereal.put x
put (SSM4 x) = Cereal.put (3 :: Word8) *> Cereal.put x
#endif
|
fpco/store
|
bench/Bench.hs
|
mit
| 9,639 | 0 | 17 | 2,898 | 2,591 | 1,369 | 1,222 | 162 | 9 |
module Game.Render.Core.Error
( initLogging
, logGL
)
where
import System.Log.Logger
import Control.Monad
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import qualified Graphics.Rendering.OpenGL as GL
initLogging :: IO ()
initLogging = do
h <- fileHandler "opengl.log" ERROR >>= \lh -> return $
setFormatter lh (simpleLogFormatter "[$time : $loggername : $prio] $msg")
updateGlobalLogger "OpenGL" (addHandler h >> setLevel DEBUG)
--updateGlobalLogger "OpenGL" (setLevel DEBUG)
logGL :: String -> IO ()
logGL msg = do
errors <- GL.get GL.errors
unless (null errors) $
errorM "OpenGL" $ msg ++ " with: " ++ show errors
|
mfpi/q-inqu
|
Game/Render/Core/Error.hs
|
mit
| 730 | 0 | 13 | 147 | 203 | 108 | 95 | 19 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PackageImports #-}
module Blocks where
import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue)
import Control.Applicative
import Control.Monad
import Control.Lens ((+~), (^.), contains)
import Data.Foldable (foldMap, traverse_)
import Data.Vinyl
import Data.Set (Set)
import Data.Vinyl.Universe ((:::), SField(..))
import Graphics.GLUtil
import qualified Graphics.UI.GLFW as GLFW
import Graphics.GLUtil.Camera2D
import Graphics.Rendering.OpenGL
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Graphics.VinylGL
import Data.Vector.Storable (fromList)
import Linear (V1(..), V2(..), _x, M33)
import System.FilePath ((</>))
import FRP.Elerea.Simple
import Data.List (transpose)
import Data.Maybe
import Control.Concurrent (threadDelay)
import Control.Monad.RWS.Strict (RWST, ask, asks, evalRWST, get, liftIO, modify, put)
import Data.Bool.Extras
import System.Random
import Graphics.Renderer.FontAtlas
import Data.Char
import Window
--------------------------------------------------------------------------------
-- Utils
--------------------------------------------------------------------------------
toDigits :: Int -> [Int]
toDigits x = let d = x `div` 10
m = x `mod` 10
in if d == 0
then [m]
else toDigits d ++ [m]
--------------------------------------------------------------------------------
-- Board
--------------------------------------------------------------------------------
type BlockID = Int
noBlockID = 7 :: BlockID
blockIDI = 6 :: BlockID
blockIDO = 5 :: BlockID
blockIDS = 4 :: BlockID
blockIDZ = 3 :: BlockID
blockIDT = 2 :: BlockID
blockIDJ = 1 :: BlockID
blockIDL = 0 :: BlockID
chooseBlock :: IO BlockID
chooseBlock = do
bid <- getStdRandom (randomR (0, 6))
return bid
-- Board starts noBlockID for cell
type Row = [BlockID]
type Board = [Row]
printBoard :: Board -> IO ()
printBoard = mapM_ (\row -> printRow row >> putStr "\n")
where printRow = mapM_ (\v -> putStr (show v) >> putStr " ")
emptyRow :: Row
emptyRow = replicate 10 noBlockID
emptyBoard ::Board
emptyBoard = replicate 20 emptyRow
applyBlock :: Board -> Block -> Board
applyBlock = error ""
rowFull :: Row -> Bool
rowFull = all isBlock
isBlock :: BlockID -> Bool
isBlock = not . noBlock
noBlock :: BlockID -> Bool
noBlock = (noBlockID==)
-- Check for full rows and removes them, appending an empty line at the front
-- Returns updated board and number of lines deleted
updateRows :: Board -> (Board, Int)
updateRows board = (replicate n emptyRow ++ b, n)
where (b, n) = foldr (\row (board, n) -> if rowFull row
then (board, n+1)
else (row : board, n) ) ([], 0) board
-- Adopt the OriginalNintendo Scoring System
-- Number of Lines: 1 2 3 4
-- 40 * (level + 1) 100 * (level + 1) 300 * (level + 1) 1200 * (level + 1)
--
-- Returns the number of points given the number of rows and level
calPoints :: Int -> Int -> Int
calPoints lines level = ([40, 100, 300, 1200] !! (lines-1)) * (level + 1)
levelUp :: Int -> Int
levelUp linesCompleted = bool (bool (1 + ((linesCompleted - 1) `div` 10)) 10 (linesCompleted > 90))
1 (linesCompleted == 0)
type Block = [[BlockID]]
{-
As we know the size of each of our blocks we should really define them using dependently
typed matrixes and then well typed transforms. This would lead to a bit more work on
apply to board functions as blocks has different sizes.
can come back to look at this later...
-}
--------------
pivit :: Pos
pivit = (2,1)
blockI0 = [ [noBlockID, noBlockID, noBlockID, noBlockID],
[blockIDI, blockIDI, blockIDI, blockIDI],
[noBlockID, noBlockID, noBlockID, noBlockID],
[noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockI1 = [ [noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID] ] :: Block
blockJ0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, noBlockID, blockIDJ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ1 = [ [ noBlockID, noBlockID, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ2 = [ [ noBlockID, blockIDJ, noBlockID, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ3 = [ [ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDL, blockIDL, blockIDL],
[ noBlockID, blockIDL, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL1 = [ [ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, blockIDL],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL2 = [ [ noBlockID, noBlockID, noBlockID, blockIDL],
[ noBlockID, blockIDL, blockIDL, blockIDL],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL3 = [ [ noBlockID, blockIDL, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockO0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDO, blockIDO, noBlockID],
[ noBlockID, blockIDO, blockIDO, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockS0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, blockIDS, blockIDS],
[ noBlockID, blockIDS, blockIDS, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockS1 = [ [ noBlockID, noBlockID, blockIDS, noBlockID],
[ noBlockID, noBlockID, blockIDS, blockIDS],
[ noBlockID, noBlockID, noBlockID, blockIDS],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDT, blockIDT, blockIDT],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT1 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, blockIDT, blockIDT],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT2 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, blockIDT, blockIDT, blockIDT],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT3 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, blockIDT, blockIDT, noBlockID],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockZ0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDZ, blockIDZ, noBlockID ],
[ noBlockID, noBlockID, blockIDZ, blockIDZ ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockZ1 = [ [ noBlockID, noBlockID, noBlockID, blockIDZ],
[ noBlockID, noBlockID, blockIDZ, blockIDZ ],
[ noBlockID, noBlockID, blockIDZ, noBlockID ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
-- next two functions calulate the number of rows and columns that are left, right,
leftRight :: Block -> (Int, Int)
leftRight b = (minimum $ map (length . takeWhile noBlock . take 2) b,
minimum $ map (length . dropWhile (not . noBlock) . drop 3) b)
upDown :: Block -> (Int, Int)
upDown = (\b -> (length $ takeWhile (==True) b,
length $ dropWhile (==False) $ dropWhile (==True) b)) . map (all noBlock)
isBlockOnBoard :: Pos -> Block -> Bool
isBlockOnBoard (x,y) b = let (l,r) = leftRight b
(u,d) = upDown b
(px,py) = pivit
in (y - (1 - u) >= 0) && (y + (2 - d) <= 19) &&
(x - (2 - l) >= 0) && (x + (1 - r) <= 9)
-- Precompute all blocks and their corresponding rotations
blocks :: [ [Block] ]
blocks = [ [ blockL0, blockL1, blockL2, blockL3 ],
[ blockJ0, blockJ1, blockJ2, blockJ3 ],
[ blockT0, blockT1, blockT2, blockT3 ],
[ blockZ0, blockZ1, blockZ0, blockZ1 ],
[ blockS0, blockS1, blockS0, blockS1 ],
[ blockO0, blockO0, blockO0, blockO0 ],
[ blockI0, blockI1, blockI0, blockI1 ] ]
rotatedBlock :: BlockID -> Int -> Block
rotatedBlock blockID rotation = (blocks !! blockID) !! rotation
-- place a block on an empty board at position (x,y)
-- if the block cannot be placed at the specifed position fail
placeBlockEmptyBoard' p@(x,y) id row =
let (l,r) = leftRight block
block = rotatedBlock id row
(u,d) = upDown block
b = replicate ((y - 1) + u) emptyRow
a = replicate ((19 - y - 2) + d) emptyRow
bs = map (\row -> replicate ((x - 2) + l) noBlockID ++
take ((4-l) - r) (drop l row) ++
replicate ((9 - x - 1) + r) noBlockID) (drop u (take (4 - d) block))
in if isBlockOnBoard p block
then Just (b ++ bs ++ a)
else Nothing
data Update = UPlace | UReplace
deriving (Eq, Show)
-- combine two boards
-- the idea is that we have board with single (i.e. current) block placed
-- and then we simply try and map that board over the current playing board
-- returns a merged board if the current piece can be placed correclty, otherwise
-- fails to construct a board
-- we actually have two modes, place and replace
overlayBoard :: Update -> Maybe Board -> Maybe Board -> Maybe Board
overlayBoard replace (Just b) (Just b') = zipWithM (zipWithM (f replace)) b b'
where
f UPlace v v' = if noBlock v && not (noBlock v')
then Just v'
else if isBlock v'
then Nothing
else Just v
f UReplace v v' = if isBlock v'
then Just noBlockID
else Just v
overlayBoard _ _ _ = Nothing
data Move = MoveL | MoveR | MoveD
deriving (Show, Eq)
type Pos = (Int, Int)
inBoard :: Pos -> Bool
inBoard (x,y) = (0 <= x && x < 10) && (0 <= y && y < 20)
initialPosition :: BlockID -> Pos
initialPosition id = (5,0)
iterationDelay :: Int -> Double
iterationDelay level = (11.0 - (fromIntegral level)) * 0.05
block = SField :: SField ("block" ::: BlockID) -- current block
nblock = SField :: SField ("nblock" ::: BlockID) -- next block
rotation = SField :: SField ("rotation" ::: Int) -- current rotation
pos = SField :: SField ("position" ::: Pos) -- current position
board = SField :: SField ("board" ::: Board) -- current board
score = SField :: SField ("score" ::: Int) -- current score
nlines = SField :: SField ("nlines" ::: Int) -- number of complete lines
idelay = SField :: SField ("idelay" ::: Double) -- delay between drop
ticks = SField :: SField ("ticks" ::: Double) -- ticks since start
frames = SField :: SField ("frames" ::: Double) -- number of frames processed
type World' = ["block" ::: BlockID,
"nblock" ::: BlockID,
"rotation" ::: Int,
"position" ::: Pos,
"board" ::: Board,
"score" ::: Int,
"nlines" ::: Int,
"idelay" ::: Double,
"ticks" ::: Double,
"frames" ::: Double]
type World = PlainFieldRec World'
mkWorld :: BlockID ->
BlockID ->
Int ->
Pos ->
Board ->
Int ->
Int ->
Double ->
Double ->
Double ->
World
mkWorld id nid r p b s l d t f =
block =: id <+> nblock =: nid <+> rotation =: r <+> pos =: p <+>
board =: b <+> score =: s <+> nlines =: l <+>
idelay =: d <+> ticks =: t <+> frames =: f
initialWorld :: BlockID -> BlockID -> World
initialWorld bid nbid = mkWorld bid nbid 0 (initialPosition bid)
(fromJust $ overlayBoard UPlace (Just emptyBoard) $
placeBlockEmptyBoard' (initialPosition bid) bid 0) 0 0
(iterationDelay 1) 0.0 0.0
--------------------------------------------------------------------------------
-- Graphics Stuff
--------------------------------------------------------------------------------
type Point2D = V2 GLfloat
type UV = Point2D
type VPos = "vertexCoord" ::: Point2D
type Tex = "texCoord" ::: Point2D
vpos :: SField VPos
vpos = SField
tex :: SField Tex
tex = SField
tex_width = (1 / 8) :: GLfloat
tex_height = 64 :: GLfloat
calcTex offset =
[[V2 (offset * tex_width) 1, V2 (offset') 1, V2 (offset * tex_width) 0],
[V2 (offset * tex_width) 0, V2 (offset') 0, V2 (offset') 1]]
where
offset' = tex_width + offset * tex_width
noBlockTex = calcTex 7 :: [[UV]]
blockTexI = calcTex 6 :: [[UV]]
blockTexO = calcTex 5 :: [[UV]]
blockTexS = calcTex 4 :: [[UV]]
blockTexZ = calcTex 3 :: [[UV]]
blockTexT = calcTex 2 :: [[UV]]
blockTexJ = calcTex 1 :: [[UV]]
blockTexL = calcTex 0 :: [[UV]]
blockTex = [ blockTexI, blockTexO, blockTexS, blockTexZ,
blockTexT, blockTexJ, blockTexL, noBlockTex]
-- this should move to a library as it can be part of the 2D engine
square :: GLfloat -> GLfloat -> [[Point2D]]
square x y = [[V2 (x * cell_width) (y * cell_height + cell_height),
V2 (x * cell_width + cell_width) (y * cell_height + cell_height),
V2 (x * cell_width) (y * cell_height)],
[V2 (x * cell_width) (y * cell_height),
V2 (x * cell_width + cell_width) (y * cell_height),
V2 (x * cell_width + cell_width) (y * cell_height + cell_height)
]
]
where
cell_width :: GLfloat
cell_width = 1.0 / 16
cell_height :: GLfloat
cell_height = 1.0 / 20
createScore :: Int -> [Char]
createScore s = let digits = take 5 $ toDigits s
in (take (5 - (length digits)) ['0','0','0','0','0']) ++
(map (\c -> toEnum $ c + 48) digits)
-- As we only want to print the minimum number of digits we
-- generate verts least digit first and then we simply draw the required
-- number of triangles. Score limited to 5 digits!!
scoreText :: GLfloat ->
GLfloat ->
CharInfo ->
[Char] ->
IO (BufferedVertices [VPos,Tex])
scoreText x y offsets digits = bufferVertices $ scoreText' x y offsets digits
scoreText' :: GLfloat ->
GLfloat ->
CharInfo ->
[Char] ->
[PlainFieldRec [VPos,Tex]]
scoreText' x y offsets (d1:d2:d3:d4:d5:_) = vt
where (o1,o1',h1) = charToOffsetWidthHeight offsets d1
(o2,o2',h2) = charToOffsetWidthHeight offsets d2
(o3,o3',h3) = charToOffsetWidthHeight offsets d3
(o4,o4',h4) = charToOffsetWidthHeight offsets d4
(o5,o5',h5) = charToOffsetWidthHeight offsets d5
vt :: [PlainFieldRec [VPos,Tex]]
vt = concat $ concat ps
f (sq, t) = zipWith (zipWith (\pt uv -> vpos =: pt <+> tex =: uv)) sq t
ps = map f [
(square x y, [[V2 o1 0, V2 o1' 0, V2 o1 h1],
[V2 o1 h1, V2 o1' h1, V2 o1' 0]]),
(square (x+1) y, [[V2 o2 0, V2 o2' 0, V2 o2 h2],
[V2 o2 h1, V2 o2' h1, V2 o2' 0]]),
(square (x+2) y, [[V2 o3 0, V2 o3' 0, V2 o3 h3],
[V2 o3 h1, V2 o3' h1, V2 o3' 0]]),
(square (x+3) y, [[V2 o4 0, V2 o4' 0, V2 o4 h4],
[V2 o4 h1, V2 o4' h4, V2 o4' 0]]),
(square (x+4) y, [[V2 o5 0, V2 o5' 0, V2 o5 h5],
[V2 o5 h5, V2 o5' h5, V2 o5' 0]]) ]
-- Generate the triangles for the board, this is done just once
-- If we used a SOA VOA, then we could upload this once and never again
graphicsBoard :: [[Point2D]]
graphicsBoard = concat $ concat $ b
where b = map (\y -> map (\x -> square x y) [0..9])
[19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0]
initialGraphicsBoard :: Board -> [PlainFieldRec [VPos,Tex]]
initialGraphicsBoard =
concat . zipWith (zipWith (\pt uv -> vpos =: pt <+> tex =: uv)) graphicsBoard . boardToTex
--boardToTex :: Board -> [[ [[UV]] ]]
boardToTex = concat . map (foldr (\a b -> (blockTex!!a) ++ b) [])
boardVerts :: Board -> IO (BufferedVertices [VPos,Tex])
boardVerts = bufferVertices . initialGraphicsBoard
type GLInfo = PlainFieldRec '["cam" ::: M33 GLfloat]
loadTextures :: [FilePath] -> IO [TextureObject]
loadTextures = fmap (either error id . sequence) . mapM aux
where aux f = do img <- readTexture ("resources" </> f)
traverse_ (const texFilter) img
return img
texFilter = do textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
texture2DWrap $= (Repeated, ClampToEdge)
isPress :: GLFW.KeyState -> Bool
isPress GLFW.KeyState'Pressed = True
isPress GLFW.KeyState'Repeating = True
isPress _ = False
isKeyPressed :: TQueue EventKey -> IO (Bool, Bool, Bool, Bool, Bool)
isKeyPressed kq = do
k <- atomically $ tryReadTQueue kq
maybe (return (False, False, False, False, False))
(\(EventKey win k scancode ks mk) ->
if isPress ks
then case k of
Key'Left -> return (True, False, False, False, False)
Key'Right -> return (False, True, False, False, False)
Key'Up -> return (False, False, True, False, False)
Key'Down -> return (False, False, False, True, False)
Key'Q -> return (False, False, False, False, True)
Key'Escape -> return (False, False, False, False, True)
_ -> return (False, False, False, False, False)
else return (False, False, False, False, False)) k
calPos :: Bool -> Bool -> Pos -> Pos
calPos l r (x,y) = let x' = bool x (x-1) l
in (bool x' (x'+1) r, y)
calRot :: Int -> Bool -> Int
calRot r = bool r ((r + 1) `mod` 4)
{- Game logic is pretty simple:
1. Check if quit, if so then clean up and exit
2. If game is not game over
1. Check if Left, Right, or Rotate key's pressed, if so
See if movement is valid (include move down with gravity) and update board if so
Otherwise, check to see if can move down and update board if so
Otherwise, introduce new piece
2. Check if any rows have been completed and remove and shuffle down, adding new rows at top, if necessary
3. Generate updated verts and texture and upload to GL
3. Renderer frame (this is not done in play function)
4. Continue (loop) to next frame
-}
-- add drop key support
-- return nothing if QUIT (and for the moment gameover)
play :: BufferedVertices [VPos, Tex] -> UI -> World -> IO (Maybe World)
play verts ui world = do
(l,r,u,d, q) <- isKeyPressed (keys ui) --isKeyPressed' ui
let d = (world ^. rLens idelay) - (timeStep ui)
yadd = bool 0 1 (d <= 0)
if q -- QUIT
then return Nothing
else do let (x,y) = world ^. rLens pos
(x',y') = calPos l r (x,y)
y'' = y'+ yadd
rot = world ^. rLens rotation
rot' = calRot rot u
bid = world ^. rLens block
nbid = world ^. rLens nblock
-- try and place updated piece (including any user movement)
b = overlayBoard UReplace (Just $ world ^. rLens board) $
placeBlockEmptyBoard' (x, y) bid rot
pb = placeBlockEmptyBoard' (x',y'') bid rot'
ub = overlayBoard UPlace b pb
nd = bool d (iterationDelay (levelUp (world ^. rLens nlines))) (d <= 0)
if isJust ub -- can piece be placed, including user movement
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust ub)
return $ Just $ mkWorld bid nbid rot' (x',y'') (fromJust ub)
(world ^. rLens score) (world ^. rLens nlines)
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else do let pb' = placeBlockEmptyBoard' (x, y + yadd) bid rot
ub' = overlayBoard UPlace b pb'
if isJust ub'
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust ub')
return $ Just $ mkWorld bid nbid rot' (x, y + yadd) (fromJust ub')
(world ^. rLens score) (world ^. rLens nlines)
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else do let (upb, nls) = updateRows $ world ^. rLens board
s = world ^. rLens score
nl = (world ^. rLens nlines) + nls
s' <- if nls > 0
then do let l = levelUp nl
ss = (s + calPoints nls l)
return ss
else return s
nbid' <- chooseBlock
let (nx,ny) = initialPosition nbid
npb = placeBlockEmptyBoard' (nx,ny) nbid 0
nb = overlayBoard UPlace (Just $ upb) npb
if isJust nb
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust nb)
return $ Just $ mkWorld nbid nbid' 0 (nx,ny) (fromJust nb)
s' nl
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else return Nothing -- Gameover
renderer :: World -> IO (GLInfo -> World -> UI -> IO (Maybe World))
renderer iworld = do
ts <- simpleShaderProgram ("shaders"</>"text.vert") ("shaders"</>"text.frag")
s <- simpleShaderProgram ("shaders"</>"piece.vert") ("shaders"</>"piece.frag")
[blocks] <- loadTextures ["blocks.png"]
putStrLn "Loaded shaders"
setUniforms s (texSampler =: 0)
nbid <- chooseBlock
verts <- boardVerts (iworld ^. rLens board)
indices <- bufferIndices [0..(2 * 10 * 20 * 3)]
vao <- makeVAO $ do
enableVertices' s verts
bindVertices verts
bindBuffer ElementArrayBuffer $= Just indices
(chars, offsets) <- createAtlas ("resources"</>"ArcadeClassic.ttf") 48 1
setUniforms ts (texSampler =: 1)
tverts <- scoreText 11 18 offsets $ createScore 0
tindices <- bufferIndices [0 .. 2 * 3 * 5]
tvao <- makeVAO $ do
enableVertices' ts tverts
bindVertices tverts
bindBuffer ElementArrayBuffer $= Just tindices
return $ \i world ui -> do
w <- play verts ui world
if isJust w
then do
currentProgram $= Just (program s)
setUniforms s i
withVAO vao . withTextures2D [blocks] $ drawIndexedTris (2 * 10 * 20)
currentProgram $= Just (program ts)
setUniforms ts i
-- TODO: we should really only upload new score verts when it changes
-- this needs to be moved into play
reloadVertices tverts
(fromList $ scoreText' 11 18 offsets $ createScore $
((fromJust w) ^. rLens score))
withVAO tvao . withTextures2D [chars] $ drawIndexedTris (2*5)
return w
else return w
where
texSampler = SField :: SField ("tex" ::: GLint)
loop :: IO UI -> World -> IO ()
loop tick world = do
clearColor $= Color4 0.00 0.1 0.1 1
r <- Blocks.renderer world
go camera2D world r
where go :: Camera GLfloat -> World -> (GLInfo -> World -> UI -> IO (Maybe World)) -> IO ()
go c world draw = do
ui <- tick
clear [ColorBuffer, DepthBuffer]
let mCam = camMatrix c
info = SField =: mCam
cells = [0,0,0]
world' <- draw info world ui
if isNothing world'
then return ()
else do --let world'' = (ticks `rPut` (((fromJust world') ^. rLens ticks) + timeStep ui) ) (fromJust world')
--fps = (world'' ^. rLens frames) / ((world'' ^. rLens ticks))
--print ("FPS: " ++ show fps)
swapBuffers (window ui) >> go c (fromJust world') draw
main :: IO ()
main = do
let width = 580
height = 960
tick <- initGL "ABC or Another Blocks Clone" width height
bid <- chooseBlock
nbid <- chooseBlock
loop tick $ initialWorld bid nbid
return ()
|
bgaster/blocks
|
Blocks.hs
|
mit
| 26,497 | 1 | 29 | 8,506 | 8,000 | 4,471 | 3,529 | 481 | 8 |
module Game ( Game
, Unfinished
, Position
, Coordinate
, start
, move
, isFinished
, bounds
, openPositions
, marks
) where
import Game.Internal
|
amar47shah/NICTA-TicTacToe
|
src/Game.hs
|
mit
| 267 | 0 | 4 | 147 | 41 | 27 | 14 | 11 | 0 |
-- |
-- Module : Main
-- Description :
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created December 21 2015
-- TODO | -
-- -
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module Main where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import System.Environment (getArgs)
import Control.Concurrent
import qualified Elrond.Core as Core
import qualified Elrond.Server as Server
import qualified Elrond.Client as Client
--------------------------------------------------------------------------------------------------------------------------------------------
-- Entry point
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
main :: IO ()
main = do
putStrLn "Elvish nonsense and dwarvish tenacity."
args <- getArgs
case take 1 args of
["server"] -> Server.start
["client"] -> Client.start
_ -> putStrLn "Put up your umbrellas, folks. Shit just hit the fan."
threadDelay $ round (5.0 * 10^6)
putStrLn "Shutting down..."
error "TODO: Figure out a better way to stop the server."
|
SwiftsNamesake/Elrond
|
app/Main.hs
|
mit
| 2,092 | 0 | 11 | 240 | 174 | 105 | 69 | 17 | 3 |
{-# LANGUAGE QuasiQuotes #-}
module TestLedgerProcess (ledgerProcessSpec) where
import Test.Hspec (Spec, describe, it, shouldReturn)
import Data.String.Interpolate (i)
import Data.String.Interpolate.Util (unindent)
import Text.RE.Replace
import Text.RE.TDFA.String
import qualified Data.Expenses.Ledger.Process as LP
{-|
Removes @version=".*"@, @id=".*"@, @ref=".*"@
attributes from the output of the @ledger xml@ command,
since these are non-deterministically generated.
|-}
stripUnstableAttributes :: String -> String
stripUnstableAttributes xml =
let
mtch = xml *=~ [re| (id|ref|version)="[^"]*">|]
in
replaceAll ">" mtch
-- | Replace CRLF with LF.
normalizeNewlines :: String -> String
normalizeNewlines s =
let
mtch = s *=~ [re|\r\n|]
in
replaceAll "\n" mtch
ledgerProcessSpec :: Spec
ledgerProcessSpec =
describe "Data.Expenses.Ledger.Process" $
describe "xmlOfString" $
it "should return digits for simple input" $
stripUnstableAttributes . normalizeNewlines <$> LP.xmlOfString simpleLedgerJournal
`shouldReturn` simpleLedgerXml
where
simpleLedgerJournal = unindent [i|
bucket Assets:SGD
2018/01/01 food
Expenses:Food 5 SGD
|]
simpleLedgerXml = unindent [i|
<?xml version="1.0" encoding="utf-8"?>
<ledger>
<commodities>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
</commodities>
<accounts>
<account>
<name/>
<fullname/>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>0</quantity>
</amount>
</account-total>
<account>
<name>Assets</name>
<fullname>Assets</fullname>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-total>
<account>
<name>SGD</name>
<fullname>Assets:SGD</fullname>
<account-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-amount>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-total>
</account>
</account>
<account>
<name>Expenses</name>
<fullname>Expenses</fullname>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-total>
<account>
<name>Food</name>
<fullname>Expenses:Food</fullname>
<account-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-amount>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-total>
</account>
</account>
</account>
</accounts>
<transactions>
<transaction>
<date>2018/01/01</date>
<payee>food</payee>
<postings>
<posting>
<account>
<name>Expenses:Food</name>
</account>
<post-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</post-amount>
<total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</total>
</posting>
<posting generated="true">
<account>
<name>Assets:SGD</name>
</account>
<post-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</post-amount>
<total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>0</quantity>
</amount>
</total>
</posting>
</postings>
</transaction>
</transactions>
</ledger>
|]
|
rgoulter/expenses-csv-utils
|
test/TestLedgerProcess.hs
|
mit
| 5,361 | 0 | 11 | 2,300 | 235 | 139 | 96 | 27 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
module Data.Neural.FeedForward where
import Control.Applicative
import Control.DeepSeq
import Data.Bifunctor
import Data.List
import Data.Reflection
import Data.Neural.Types
import Data.Neural.Utility
import Data.Proxy
import GHC.TypeLits
import Linear
import Linear.V
import Numeric.AD.Rank1.Forward
import System.Random
import Text.Printf
import qualified Data.Binary as B
import qualified Data.List as P
import qualified Data.Vector as V
data Network :: Nat -> [Nat] -> Nat -> *
-> * where
NetOL :: !(FLayer i o a) -> Network i '[] o a
NetIL :: KnownNat j => !(FLayer i j a) -> !(Network j hs o a) -> Network i (j ': hs) o a
infixr 5 `NetIL`
data SomeNet :: * -> * where
SomeNet :: (KnownNat i, KnownNat o) => Network i hs o a -> SomeNet a
data OpaqueNet :: Nat -> Nat -> * -> * where
OpaqueNet :: (KnownNat i, KnownNat o) => Network i hs o a -> OpaqueNet i o a
runNetwork :: forall i hs o a. (KnownNat i, Num a) => (a -> a) -> (a -> a) -> Network i hs o a -> V i a -> V o a
runNetwork f g = go
where
go :: forall i' hs' o'. KnownNat i' => Network i' hs' o' a -> V i' a -> V o' a
go n v = case n of
NetOL l -> g <$> runFLayer l v
NetIL l n' -> go n' (f <$> runFLayer l v)
{-# INLINE runNetwork #-}
trainSample :: forall i o a hs. (KnownNat i, KnownNat o, Num a)
=> a -> (Forward a -> Forward a) -> (Forward a -> Forward a)
-> V i a -> V o a
-> Network i hs o a
-> Network i hs o a
trainSample step f g x0 y n0 = snd $ go x0 n0
where
-- x: input
-- y: target
-- d: x * w
-- o: f d
go :: forall j hs'. KnownNat j => V j a -> Network j hs' o a -> (V j a, Network j hs' o a)
go x n =
case n of
NetOL l@(FLayer ln) ->
let d :: V o a
d = runFLayer l x
delta :: V o a
ln' :: V o (Node j a)
(delta, ln') = unzipV $ liftA3 (adjustOutput xb) ln y d
-- drop contrib from bias term
deltaws :: V j a
-- deltaws = delta *! (nodeWeights <$> ln')
deltaws = delta *! (nodeWeights <$> ln)
l' :: FLayer j o a
l' = FLayer ln'
in (deltaws, NetOL l')
NetIL l@(FLayer ln :: FLayer j k a) (n' :: Network k ks o a) ->
let d :: V k a
d = runFLayer l x
o :: V k a
o = fst . diff' f <$> d
deltaos :: V k a
n'' :: Network k ks o a
(deltaos, n'') = go o n'
delta :: V k a
ln' :: V k (Node j a)
(delta, ln') = unzipV $ liftA3 (adjustHidden xb) ln deltaos d
deltaws :: V j a
-- deltaws = delta *! (nodeWeights <$> ln')
deltaws = delta *! (nodeWeights <$> ln)
l' :: FLayer j k a
l' = FLayer ln'
in (deltaws, l' `NetIL` n'')
where
xb = Node 1 x
-- {-# INLINE go #-}
-- per neuron/node traversal
-- every neuron has a delta
adjustOutput :: KnownNat j => Node j a -> Node j a -> a -> a -> (a, Node j a)
adjustOutput xb node y' d = (delta, adjustWeights delta xb node)
where
delta = let (o, o') = diff' g d
in (o - y') * o'
-- delta = (f d - y) * f' d
{-# INLINE adjustOutput #-}
-- delta = d - y
adjustHidden :: KnownNat j => Node j a -> Node j a -> a -> a -> (a, Node j a)
adjustHidden xb node deltao d = (delta, adjustWeights delta xb node)
where
-- instead of (o - target), use deltao, weighted average of errors
delta = deltao * diff f d
{-# INLINE adjustHidden #-}
-- delta = deltao
-- per weight traversal
adjustWeights :: KnownNat j => a -> Node j a -> Node j a -> Node j a
adjustWeights delta = liftA2 (\w n -> n - step * delta * w)
{-# INLINE adjustWeights #-}
{-# INLINE trainSample #-}
networkHeatmap :: (KnownNat i, Num a) => (a -> a) -> (a -> a) -> Network i hs o a -> V i a -> [[a]]
networkHeatmap f g n v =
vToList v : case n of
NetOL l -> [vToList (g <$> runFLayer l v)]
NetIL l n' -> networkHeatmap f g n' $ f <$> runFLayer l v
where
vToList = V.toList . toVector
drawHeatmap :: KnownNat i => (Double -> Double) -> (Double -> Double) -> Network i hs o Double -> V i Double -> String
drawHeatmap f g n = unlines
. map (intercalate "\t")
. P.transpose
. map (padLists ' ')
. padLists ""
. map (padLists ' ' . map (printf "% .3f"))
. networkHeatmap f g n
where
padLists :: forall a. a -> [[a]] -> [[a]]
padLists p xss = flip map xss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (map length xss)
drawNetwork :: forall i hs o. Dim i => Network i hs o Double -> String
drawNetwork = unlines
. map (intercalate "\t")
. P.transpose
. map (padLists ' ')
. padLists ""
. map (intercalate [""])
. doublePad ""
. ([]:)
. (replicate (reflectDim (Proxy :: Proxy i)) ["o"] :)
. addDot
. (map . map . map) (printf "% .3f")
. networkToList
where
addDot :: [[[String]]] -> [[[String]]]
addDot = concatMap $ \xs -> [xs, replicate (length xs) ["o"]]
-- bracketize :: String -> String
-- bracketize str = '[' : str ++ "]"
padLists :: forall a. a -> [[a]] -> [[a]]
padLists p xss = flip map xss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (map length xss)
doublePad :: forall a. a -> [[[a]]] -> [[[a]]]
doublePad p xsss = flip (map . map) xsss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (concatMap (map length) xsss)
nodeToList :: forall j a. Node j a -> [a]
nodeToList (Node b (V w)) = b : V.toList w
layerToList :: forall i' o' a. FLayer i' o' a -> [[a]]
layerToList (FLayer (V l)) = nodeToList <$> V.toList l
networkToList :: forall i' hs' o' a. Network i' hs' o' a -> [[[a]]]
networkToList n' = case n' of
NetOL l -> [layerToList l]
NetIL l n'' -> layerToList l : networkToList n''
randomNetwork :: (RandomGen g, Random (Network i hs o a), Num a)
=> g
-> (Network i hs o a, g)
randomNetwork g = (first . fmap) (subtract 1 . (*2)) $ random g
randomNetworkIO :: (Random (Network i hs o a), Num a) => IO (Network i hs o a)
randomNetworkIO = fmap (subtract 1 . (*2)) <$> randomIO
networkStructure :: forall i hs o a. (KnownNat i, KnownNat o) => Network i hs o a -> (Int, [Int], Int)
networkStructure (NetOL _) = (reflectDim (Proxy :: Proxy i), [], reflectDim (Proxy :: Proxy o))
networkStructure (NetIL _ n') = (reflectDim (Proxy :: Proxy i), j : hs, o)
where
(j, hs, o) = networkStructure n'
-- induceOutput :: forall i hs o a. (KnownNat i, KnownNat o, Floating a, Ord a) => a -> a -> (a, a) -> (a -> a) -> Network i hs o a -> V o a -> V i a -> V i a
-- induceOutput nudge step (mn,mx) f n y x0@(V x0v) = V . fst $ foldl' g (x0v, errFrom x0) [0..V.length x0v - 1]
-- where
-- errFrom = qd y . runNetwork f n
-- g (x, e) i = let x' = V.modify (\v -> VM.write v i . clamp . (+ nudge) =<< VM.read v i) x
-- e' = errFrom (V x')
-- x'' = V.modify (\v -> VM.write v i . clamp . subtract (nudge*step/e') =<< VM.read v i) x
-- e'' = errFrom (V x'')
-- in (x'', e'')
-- clamp = min mx . max mn
-- | Boilerplate instances
instance Functor (Network i hs o) where
fmap f n = case n of
NetOL l -> NetOL (fmap f l)
NetIL l n' -> fmap f l `NetIL` fmap f n'
{-# INLINE fmap #-}
instance (KnownNat i, KnownNat o) => Applicative (Network i '[] o) where
pure = NetOL . pure
{-# INLINE pure #-}
NetOL f <*> NetOL x = NetOL (f <*> x)
{-# INLINE (<*>) #-}
instance (KnownNat i, KnownNat j, Applicative (Network j hs o)) => Applicative (Network i (j ': hs) o) where
pure x = pure x `NetIL` pure x
{-# INLINE pure #-}
NetIL fi fr <*> NetIL xi xr = NetIL (fi <*> xi) (fr <*> xr)
{-# INLINE (<*>) #-}
instance (KnownNat i, KnownNat o, Random a) => Random (Network i '[] o a) where
random = first NetOL . random
randomR (NetOL rmn, NetOL rmx) = first NetOL . randomR (rmn, rmx)
instance (KnownNat i, KnownNat j, Random a, Random (Network j hs o a)) => Random (Network i (j ': hs) o a) where
random g = let (l, g') = random g
in first (l `NetIL`) (random g')
randomR (NetIL lmn nmn, NetIL lmx nmx) g =
let (l , g') = randomR (lmn, lmx) g
in first (l `NetIL`) (randomR (nmn, nmx) g')
instance (KnownNat i, KnownNat o, B.Binary a) => B.Binary (Network i '[] o a) where
put (NetOL l) = B.put l
get = NetOL <$> B.get
-- instance (KnownNat i, KnownNat o, KnownNat j, B.Binary a, B.Binary (Network j hs o a)) => B.Binary (Network i (j ': hs) o a) where
instance (KnownNat i, KnownNat j, B.Binary a, B.Binary (Network j hs o a)) => B.Binary (Network i (j ': hs) o a) where
put (NetIL l n') = B.put l *> B.put n'
get = NetIL <$> B.get <*> B.get
instance NFData a => NFData (Network i hs o a) where
rnf (NetOL (force -> !_)) = ()
rnf (NetIL (force -> !_) (force -> !_)) = ()
deriving instance Show a => Show (Network i hs o a)
deriving instance Foldable (Network i hs o)
deriving instance Traversable (Network i hs o)
deriving instance Show a => Show (SomeNet a)
deriving instance Functor SomeNet
deriving instance Foldable SomeNet
deriving instance Traversable SomeNet
instance B.Binary a => B.Binary (SomeNet a) where
put sn = case sn of
SomeNet (n :: Network i hs o a) -> do
B.put $ natVal (Proxy :: Proxy i)
B.put $ natVal (Proxy :: Proxy o)
B.put $ OpaqueNet n
get = do
i <- B.get
o <- B.get
reifyNat i $ \(Proxy :: Proxy i) ->
reifyNat o $ \(Proxy :: Proxy o) -> do
oqn <- B.get :: B.Get (OpaqueNet i o a)
return $ case oqn of
OpaqueNet n -> SomeNet n
deriving instance Show a => Show (OpaqueNet i o a)
deriving instance Functor (OpaqueNet i o)
deriving instance Foldable (OpaqueNet i o)
deriving instance Traversable (OpaqueNet i o)
instance (KnownNat i, KnownNat o, B.Binary a) => B.Binary (OpaqueNet i o a) where
put oqn = case oqn of
OpaqueNet n -> do
case n of
NetOL l -> do
B.put True
B.put l
NetIL (l :: FLayer i j a) (n' :: Network j js o a) -> do
B.put False
B.put $ natVal (Proxy :: Proxy j)
B.put l
B.put (OpaqueNet n')
get = do
isOL <- B.get
if isOL
then do
OpaqueNet . NetOL <$> B.get
else do
j <- B.get
reifyNat j $ \(Proxy :: Proxy j) -> do
l <- B.get :: B.Get (FLayer i j a)
nqo <- B.get :: B.Get (OpaqueNet j o a)
return $ case nqo of
OpaqueNet n -> OpaqueNet $ l `NetIL` n
asOpaqueNet :: SomeNet a
-> (forall i o. (KnownNat i, KnownNat o) => OpaqueNet i o a -> r)
-> r
asOpaqueNet sn f = case sn of
SomeNet n -> f (OpaqueNet n)
|
mstksg/neural
|
src/Data/Neural/FeedForward.hs
|
mit
| 12,834 | 0 | 20 | 4,654 | 4,655 | 2,397 | 2,258 | 256 | 2 |
module Typing.Expr (i_expr) where
import Typing.Constraint
import Typing.Env
import Typing.Substitution
import Typing.Subtyping
import Typing.TypeError
import Typing.Types
import Typing.Util
import Absyn.Base
import Absyn.Meta
import qualified Absyn.Untyped as U
import qualified Absyn.Typed as T
import Util.Error
import Control.Monad (when, zipWithM)
import Data.List (union)
i_expr :: U.Expr -> Tc (T.Expr, Type)
i_expr (meta :< Literal lit) =
return (meta :< Literal lit, i_lit lit)
i_expr (meta :< Ident [i]) = do
ty <- lookupValue i
return (meta :< Ident [i], ty)
-- TODO: Clear this up - should be handled by the renamer now
i_expr (_ :< Ident (_:_)) = undefined
i_expr (_ :< Ident []) = undefined
i_expr (meta :< ParenthesizedExpr expr) = do
(expr', ty) <- i_expr expr
return (meta :< ParenthesizedExpr expr', ty)
i_expr (meta :< BinOp _ _ lhs op rhs) = do
tyOp@(Fun gen _ _) <- lookupValue op
(lhs', lhsTy) <- i_expr lhs
(rhs', rhsTy) <- i_expr rhs
(retType', typeArgs) <- inferTyArgs [lhsTy, rhsTy] tyOp
constraintArgs <- inferConstraintArgs gen [] typeArgs
return (meta :< BinOp constraintArgs [] lhs' (op, tyOp) rhs', retType')
i_expr (meta :< Match expr cases) = do
(expr', ty) <- i_expr expr
(cases', casesTy) <- unzip <$> mapM (i_case ty) cases
let retTy = case casesTy of
[] -> void
x:xs -> foldl (\/) x xs
return (meta :< Match expr' cases', retTy)
i_expr (meta :< Call fn constraintArgs types []) =
i_expr (meta :< Call fn constraintArgs types [emptySpanFromPos (spanEnd meta) :< VoidExpr])
where
emptySpanFromPos pos = SourceSpan { spanStart = pos, spanEnd = pos }
i_expr (meta :< Call fn _ types args) = do
(fn', tyFn) <- i_expr fn
let tyFn' = normalizeFnType tyFn
(tyFn''@(Fun gen _ retType), skippedVars) <- adjustFnType (null types) args tyFn'
(retType', args', typeArgs) <-
case (tyFn'', types) of
(Fun (_:_) params _, []) -> do
(_, argsTy) <- unzip <$> mapM i_expr args
(retType, typeArgs) <- inferTyArgs argsTy tyFn''
let s = zipSubst (map fst gen) typeArgs
let params' = map (applySubst s) params
args' <- zipWithM instSubtype args params'
return (retType, args', typeArgs)
(Fun gen params _, _) -> do
types' <- mapM resolveType types
let s = zipSubst (map fst gen) types'
let params' = map (applySubst s) params
args' <- zipWithM instSubtype args params'
return (applySubst s retType, args', types')
_ -> undefined
constraintArgs <- inferConstraintArgs gen skippedVars typeArgs
return (meta :< Call fn' constraintArgs types args', retType')
i_expr (meta :< Record fields) = do
(exprs, types) <- mapM (i_expr . snd) fields >>= return . unzip
let labels = map fst fields
let fieldsTy = zip labels types
let recordTy = Rec fieldsTy
let record = Record (zip labels exprs)
return (meta :< record, recordTy)
i_expr (meta :< FieldAccess expr field) = do
(expr', ty) <- i_expr expr
let
aux :: Type -> [(String, Type)] -> Tc (T.Expr, Type)
aux ty r = case lookup field r of
Nothing -> throwError $ UnknownField ty field
Just t -> return (meta :< FieldAccess expr' field, t)
case ty of
Rec r -> aux ty r
Cls _ -> do
vars <- lookupInstanceVars ty
aux ty vars
_ -> throwError . GenericError $ "Expected a record, but found value of type " ++ show ty
i_expr (meta :< If ifCond ifBody elseBody) = do
(ifCond', ty) <- i_expr ifCond
ty <:! bool
(ifBody', ifTy) <- i_body ifBody
(elseBody', elseTy) <- i_body elseBody
return (meta :< If ifCond' ifBody' elseBody', ifTy \/ elseTy)
i_expr (meta :< List _ items) = do
(items', itemsTy) <- unzip <$> mapM i_expr items
(ty, itemTy) <- case itemsTy of
[] -> do
nilTy <- lookupValue "Nil"
return (nilTy, Bot)
x:xs ->
let ty = foldl (\/) x xs
in return (list ty, ty)
return (meta :< List (Just itemTy) items', ty)
i_expr (meta :< FnExpr fn) = do
(fn', ty) <- i_fn fn
return $ (meta :< FnExpr fn', ty)
i_expr (meta :< Negate _ expr) = do
(expr', ty) <- i_expr expr
intf <- lookupInterface "Std.Number"
constrArgs <- boundsCheck ty intf
return (meta :< Negate constrArgs expr', ty)
-- Expressions generated during type checking
i_expr (meta :< VoidExpr) = return (meta :< VoidExpr, void)
i_expr (_ :< TypeCall {}) = undefined
instSubtype :: U.Expr -> Type -> Tc T.Expr
instSubtype arg@(meta :< _) ty = do
(arg', argTy) <- i_expr arg
arg'' <- case (argTy, ty) of
(Fun gen@(_:_) _ _, Fun [] _ _) -> do
typeArgs <- inferTyAbs argTy ty
constraintArgs <- inferConstraintArgs gen [] typeArgs
return $ meta :< TypeCall arg' constraintArgs
_ -> do
argTy <:! ty
return arg'
return arg''
inferConstraintArgs :: [BoundVar] -> [BoundVar] -> [Type] -> Tc [ConstraintArg]
inferConstraintArgs gen skippedVars typeArgs = do
concat <$> zipWithM findConstrArgs gen typeArgs'
where
typeArgs' = zipWith findHoles gen typeArgs
findHoles var ty =
if var `elem` skippedVars
then mkHole var
else ty
findConstrArgs (_, []) tyArg = do
return [CAType tyArg]
findConstrArgs (_, bounds) tyArg = do
concat <$> mapM (boundsCheck tyArg) bounds
-- Checks if `t1` implements the interface in `t2`
-- returns a list with a single ConstraintArg indicating
-- how to satisfy the bounds. See `ConstraintArg` for
-- an explanation on the kinds of ConstraintArgs
boundsCheck :: Type -> Intf -> Tc [ConstraintArg]
boundsCheck t1 t2@(Intf name _ _) = do
args <- boundsCheck' t1 t2
if null args
then throwError $ MissingImplementation name t1
else return args
boundsCheck' :: Type -> Intf -> Tc [ConstraintArg]
boundsCheck' v@(Var _ bounds) intf = do
return $ if intf `elem` bounds
then [CABound v intf]
else []
boundsCheck' (TyApp ty args) intf@(Intf name _ _) = do
implementations <- lookupImplementations name
case lookup ty implementations of
Nothing -> return []
Just vars -> do
let aux arg (_, bounds) =
concat <$> mapM (boundsCheck arg) bounds
args <- concat <$> zipWithM aux args vars
return [CAPoly ty intf args]
boundsCheck' (Forall params ty) intf =
boundsCheck' (params \\ ty) intf
boundsCheck' (TyAbs params ty) intf =
boundsCheck' (params \\ ty) intf
boundsCheck' Bot intf =
return [CABound Bot intf]
boundsCheck' ty intf@(Intf name _ _) = do
implementations <- lookupImplementations name
case lookup ty implementations of
Just [] -> return [CABound ty intf]
_ -> return []
normalizeFnType :: Type -> Type
normalizeFnType (Fun gen params (Fun [] params' retTy)) =
normalizeFnType (Fun gen (params ++ params') retTy)
normalizeFnType ty = ty
adjustFnType :: Bool -> [a] -> Type -> Tc (Type, [BoundVar])
adjustFnType allowHoles args fn@(Fun gen params retType) = do
let lArgs = length args
case compare lArgs (length params) of
EQ -> return (fn, [])
LT ->
let headArgs = take lArgs params
tailArgs = drop lArgs params
skippedGen = filter aux gen
aux (v, _) = allowHoles && v `elem` (fv $ Fun [] tailArgs retType) && v `notElem` (foldl union [] $ map fv headArgs)
in return (Fun gen headArgs $ Fun skippedGen tailArgs retType, skippedGen)
GT -> throwError ArityMismatch
adjustFnType _ _ ty = throwError . GenericError $ "Expected a function, found " ++ show ty
i_lit :: Literal -> Type
i_lit (Integer _) = int
i_lit (Float _) = float
i_lit (Char _) = char
i_lit (String _) = string
i_case :: Type -> U.Case -> Tc (T.Case, Type)
i_case ty (meta :< Case pattern caseBody) = do
m <- startMarker
pattern' <- c_pattern ty pattern
endMarker m
(caseBody', ty) <- i_body caseBody
clearMarker m
return (meta :< Case pattern' caseBody', ty)
c_pattern :: Type -> U.Pattern -> Tc T.Pattern
c_pattern _ (meta :< PatDefault) =
return $ meta :< PatDefault
c_pattern ty (meta :< PatLiteral l) = do
let litTy = i_lit l
litTy <:! ty
return $ meta :< PatLiteral l
c_pattern ty (meta :< PatVar v) = do
insertValue v ty
return $ meta :< PatVar v
c_pattern ty@(Rec tyFields) (meta :< PatRecord fields) = do
fields' <- mapM aux fields
return $ meta :< PatRecord fields'
where
aux (key, pat) = do
case lookup key tyFields of
Just ty -> do
pat' <- c_pattern ty pat
return (key, pat')
Nothing ->
throwError . GenericError $ "Matching against field `" ++ key ++ "`, which is not included in the type of the value being matched, `" ++ show ty ++ "`"
c_pattern ty (_ :< PatRecord _) = do
throwError . GenericError $ "Using a record pattern, but value being matched has type `" ++ show ty ++ "`"
c_pattern ty (meta :< PatList pats rest) = do
itemTy <- getItemTy ty
pats' <- mapM (c_pattern itemTy) pats
rest' <- case rest of
NoRest -> return NoRest
DiscardRest -> return DiscardRest
NamedRest n -> do
insertValue n ty
return (NamedRest n)
return $ meta :< PatList pats' rest'
where
getItemTy (Forall _ (TyApp (Con "List") _)) =
return Top
getItemTy (TyApp (Con "List") [ty]) =
return ty
getItemTy _ =
throwError . GenericError $ "Using a list pattern, but value being matched has type `" ++ show ty ++ "`"
c_pattern ty (meta :< PatCtor name vars) = do
ctorTy <- lookupValue name
let (fnTy, params, retTy) = case ctorTy of
fn@(Fun [] params retTy) -> (fn, params, retTy)
fn@(Fun gen params retTy) -> (fn, params, Forall (map fst gen) retTy)
t -> (Fun [] [] t, [], t)
when (length vars /= length params) (throwError ArityMismatch)
retTy <:! ty
let substs = case (retTy, ty) of
(Forall gen _, TyApp _ args) -> zipSubst gen args
_ -> emptySubst
let params' = map (applySubst substs) params
vars' <- zipWithM c_pattern params' vars
return $ meta :< PatCtor (name, fnTy) vars'
|
tadeuzagallo/verve-lang
|
src/Typing/Expr.hs
|
mit
| 10,404 | 0 | 20 | 2,825 | 4,022 | 1,987 | 2,035 | 248 | 9 |
{-# LANGUAGE Arrows #-}
module Game.Client.Objects.Network where
import FRP.Yampa as Yampa
import FRP.Yampa.Geometry
import Graphics.UI.SDL as SDL
import Graphics.UI.SDL.Events as SDL.Events
import Graphics.UI.SDL.Keysym as SDL.Keysym
import Game.Shared.Types
import Game.Shared.Networking
import Game.Shared.Object
import Game.Shared.Arrows
import Game.Client.Objects.Towers
import Game.Client.Objects.Input
import Game.Client.Objects.Minions
import Game.Client.Objects.Base
import Game.Client.Components.BasicComponents
import Game.Client.Components.Projectiles
import Game.Client.Object
import Game.Client.Resources
import Game.Client.Input
import Game.Client.Graphics
import Game.Client.Networking
-- |Manager that handles object creation events, generating
-- the game object and submitting a spawn request for it
netManager :: Object
netManager = proc objInput -> do
-- Connection events
connSuccEvent <- connectionSuccess -< oiNetwork objInput
connFailedEvent <- connectionFailed -< oiNetwork objInput
-- Object creation
newPlayerObjectEvent <- createObject Player -< oiNetwork objInput
newPlayerProjectile1ObjectEvent <- createObject (PlayerProjectile 1) -< oiNetwork objInput
newPlayerProjectile2ObjectEvent <- createObject (PlayerProjectile 2) -< oiNetwork objInput
newTurretObjectEvent <- createObject Turret -< oiNetwork objInput
newTurretProjectileObjectEvent <- createObject TurretProjectile -< oiNetwork objInput
newMinionObjectEvent <- createObject Minion -< oiNetwork objInput
newMinionProjectileObjectEvent <- createObject MinionProjectile -< oiNetwork objInput
newNexusObjectEvent <- createObject Nexus -< oiNetwork objInput
-- Return state
returnA -< (defaultObjOutput objInput) {
ooSpawnRequests = foldl (mergeBy (++)) noEvent [connSuccEvent `tag` [playerObject],
newPlayerObjectEvent `tagUsing` map networkPlayer,
newPlayerProjectile1ObjectEvent `tagUsing` map networkPlayerProjectile,
newPlayerProjectile2ObjectEvent `tagUsing` map networkPlayerProjectile,
newTurretObjectEvent `tagUsing` map turretObject,
newTurretProjectileObjectEvent `tagUsing` map towerProjectile,
newMinionObjectEvent `tagUsing` map minionObject,
newMinionProjectileObjectEvent `tagUsing` map minionProjectile,
newNexusObjectEvent `tagUsing` map nexusObject]
}
-- |Game object that draws the map background
mapBackground :: Object
mapBackground = proc objInput -> do
-- Return state
returnA -< (defaultObjOutput objInput) {
ooGraphic = draw backgroundImage (Mask Nothing 0 0),
ooGraphicLayer = GameLayer 0,
ooGameObject = (defaultGameObject objInput) {
goPos = zeroVector,
goSize = vector2 3072 3072
}
}
-- |Game object for a player not managed by this client
networkPlayer :: GameObject -- ^The representation of this player that constructed this game object
-> Object
networkPlayer obj = proc objInput -> do
-- Components
basicComponent <- basicObject obj -< BasicObjectInput {
boiNetwork = oiNetwork objInput
}
statsComponent <- objectStats obj -< ObjectStatsInput {
osiNetwork = oiNetwork objInput
}
let stats = osoStats statsComponent
position = booPosition basicComponent
-- Components
healthbarComponent <- healthbarDisplay 1.5 (vector2 (-6) (-12)) 44 6 -< HealthbarDisplayInput {
hdiHealthChangedEvent = osoHealthChanged statsComponent,
hdiObjectPosition = position,
hdiCurrentHealth = stHealth stats,
hdiCurrentMaxHealth = stMaxHealth stats
}
-- Return state
let (x, y) = vectorRoundedComponents position
team = goTeam obj
returnA -< (defaultObjOutput objInput) {
ooKillRequest = booObjectDestroyed basicComponent,
ooGraphic = drawAll [draw (playerImage team) (Mask Nothing x y),
hdoHealthbarGraphic healthbarComponent],
ooGraphicLayer = GameLayer 10,
ooGameObject = obj {
goPos = position,
goStats = Just stats
}
}
-- |Projectile fired by a networked player
networkPlayerProjectile :: GameObject -- ^Game object representation at creation time of this object
-> Object
networkPlayerProjectile obj = proc objInput -> do
-- Components
basicComponent <- basicObject obj -< BasicObjectInput {
boiNetwork = oiNetwork objInput
}
statsComponent <- objectStats obj -< ObjectStatsInput {
osiNetwork = oiNetwork objInput
}
let stats = osoStats statsComponent
rec projectileComponent <- directionalProjectile (enemyTeam (goTeam obj)) obj 5 -< DirectionalProjectileInput {
dpiAllCollisions = oiCollidingWith objInput,
dpiAllObjects = oiAllObjects objInput,
dpiSpeed = fromIntegral (stSpeed stats),
dpiCurrPos = position
}
position <- (^+^ (goPos obj)) ^<< integral -< dpoMoveDelta projectileComponent
-- Return state
let (x, y) = vectorRoundedComponents position
returnA -< (defaultObjOutput objInput) {
ooKillRequest = lMerge (booObjectDestroyed basicComponent) (dpoHitTargetEvent projectileComponent),
ooGraphic = draw turretProjectileImage (Mask Nothing x y),
ooGraphicLayer = GameLayer 6,
ooGameObject = obj {
goPos = position
}
}
|
Mattiemus/LaneWars
|
Game/Client/Objects/Network.hs
|
mit
| 6,036 | 4 | 16 | 1,759 | 1,172 | 630 | 542 | 97 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module SoOSiM.Components.Thread.Types where
import Control.Lens
import Control.Concurrent.STM.TQueue
import SoOSiM.Components.Common
import SoOSiM.Components.ResourceDescriptor
import SoOSiM.Components.SoOSApplicationGraph
data ThreadState = Blocked | Waiting | Executing | Killed
deriving Eq
data Deadline = Infinity | Exact Int
deriving (Eq,Show)
instance Ord Deadline where
compare Infinity Infinity = EQ
compare (Exact i) Infinity = LT
compare Infinity (Exact i) = GT
compare (Exact i) (Exact j) = compare i j
data Thread
= Thread
{ -- | The thread unique id
_threadId :: ThreadId
-- | number of incoming \"ports\", each in-port has an id from 0 to (n_in - 1)
, _n_in :: Int
-- | number of outgoing \"ports\", each out-port has an id form 0 to (n_in - 1)
, _n_out :: Int
-- | incoming ports: ntokens per port
, _in_ports :: [TQueue (Int,Int)]
-- | outgoing links
--
-- contains the pair (thread_dest_id, in_port_id) of the destination threads
, _out_ports :: [(ThreadId,TQueue (Int,Int))]
-- | Number of (simulation) cycles needed to complete one instance of the thread
, _exec_cycles :: Int
-- | resource requirements
, _rr :: ResourceDescriptor
-- | an enumerations: BLOCKED; READY; EXECUTING
, _execution_state :: ThreadState
-- | The id of the resource where this thread is executing
, _res_id :: ResourceId
-- | It is the SimTime when the current instance has been activated
-- this is update by the Scheduler.wake_up_threads when the thres is
-- moved from blocked to ready.
-- it can be needed to sort ready thread in FIFO order
, _activation_time :: Int
, _program :: [AppCommand]
, _localMem :: (Int,Int)
, _relativeDeadlineOut :: Deadline
, _relativeDeadlineIn :: Deadline
}
instance Show Thread where
show = show . _threadId
makeLenses ''Thread
|
christiaanb/SoOSiM-components
|
src/SoOSiM/Components/Thread/Types.hs
|
mit
| 1,994 | 0 | 12 | 484 | 330 | 202 | 128 | 35 | 0 |
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Arduino.SamplePrograms.SevenSegment
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Control a single seven-segment display, echoing user's key presses
-- on it verbatim. We use a shift-register to reduce the number of
-- pins we need on the Arduino to control the display.
-------------------------------------------------------------------------------
module System.Hardware.Arduino.SamplePrograms.SevenSegment where
import Control.Monad (forever)
import Control.Monad.Trans (liftIO)
import Data.Bits (testBit)
import Data.Word (Word8)
import System.IO (hSetBuffering, stdin, BufferMode(NoBuffering))
import System.Hardware.Arduino
import System.Hardware.Arduino.Parts.ShiftRegisters
import System.Hardware.Arduino.Parts.SevenSegmentCodes
-- | Connections for the Texas Instruments 74HC595 shift-register. Datasheet: <http://www.ti.com/lit/ds/symlink/sn74hc595.pdf>.
-- In our circuit, we merely use pins 8 thru 12 on the Arduino to control the 'serial', 'enable', 'rClock', 'sClock', and 'nClear'
-- lines, respectively. Since we do not need to read the output of the shift-register, we leave the 'bits' field unconnected.
sr :: SR_74HC595
sr = SR_74HC595 { serial = digital 8
, nEnable = digital 9
, rClock = digital 10
, sClock = digital 11
, nClear = digital 12
, mbBits = Nothing
}
-- | Seven-segment display demo. For each key-press, we display an equivalent pattern
-- on the connected 7-segment-display. Note that most characters are not-mappable, so
-- we use approximations if available. We use a shift-register to reduce the pin
-- requirements on the Arduino, setting the bits serially.
--
-- Parts:
--
-- * The seven-segment digit we use is a common-cathode single-digit display, such as
-- TDSG5150 (<http://www.vishay.com/docs/83126/83126.pdf>), or Microvity's IS121,
-- but almost any such digit would do. Just pay attention to the line-connections,
-- and do not forget the limiting resistors: 220 ohm's should do nicely.
--
-- * The shift-register is Texas-Instruments 74HC595: <http://www.ti.com/lit/ds/symlink/sn74hc595.pdf>.
-- Make sure to connect the register output lines to the seven-segment displays with the corresponding
-- letters. That is, shift-registers @Q_A@ (Chip-pin 15) should connect to segment @A@; @Q_B@ (Chip-pin 1)
-- to segment @B@, and so on. We do not use the shift-register @Q_H'@ (Chip-pin 9) in this design.
--
-- <<http://github.com/LeventErkok/hArduino/raw/master/System/Hardware/Arduino/SamplePrograms/Schematics/SevenSegment.png>>
sevenSegment :: IO ()
sevenSegment = withArduino False "/dev/cu.usbmodemfd131" $ do
initialize sr
liftIO $ do hSetBuffering stdin NoBuffering
putStrLn "Seven-Segment-Display demo."
putStrLn "For each key-press, we will try to display it as a 7-segment character."
putStrLn "If there is no good mapping (which is common), we'll just display a dot."
putStrLn ""
putStrLn "Press-keys to be shown on the display, Ctrl-C to quit.."
forever repl
where pushWord w = do mapM_ (push sr) [w `testBit` i | i <- [0..7]]
store sr
repl = do c <- liftIO getChar
case char2SS c of
Just w -> pushWord w
Nothing -> pushWord (0x01::Word8) -- the dot, which also nicely covers the '.'
|
aufheben/lambda-arduino
|
packages/hArduino-0.9/System/Hardware/Arduino/SamplePrograms/SevenSegment.hs
|
mit
| 3,812 | 0 | 13 | 934 | 374 | 213 | 161 | 32 | 2 |
module Graphics.Rendering.OpenGL.Extensions where
import Data.List as List
import Graphics.Rendering.OpenGL as OpenGL
originFrustum :: Rational -> Rational -> Rational -> Rational -> IO()
originFrustum = \width height near far -> do
let x_radius = ((/) width 2)
let y_radius = ((/) height 2)
let bounds = [(-) 0 x_radius, (+) 0 x_radius, (-) 0 y_radius, (+) 0 y_radius, near, far]
let [left, right, bottom, top, neard, fard] = (List.map fromRational bounds)
(OpenGL.frustum left right bottom top neard fard)
centeredOrtho :: Rational -> Rational -> Rational -> Rational -> Rational -> Rational -> IO()
centeredOrtho = \x y z width height depth -> do
let x_radius = ((/) width 2)
let y_radius = ((/) height 2)
let z_radius = ((/) depth 2)
let bounds = [(-) x x_radius, (+) x x_radius, (-) y y_radius, (+) y y_radius, (-) z z_radius, (+) z z_radius]
let [x0, x1, y0, y1, z0, z1] = (List.map fromRational bounds)
(OpenGL.ortho x0 x1 y0 y1 z0 z1)
originOrtho = (centeredOrtho 0 0 0)
|
stevedonnelly/haskell
|
code/Graphics/Rendering/OpenGL/Extensions.hs
|
mit
| 1,028 | 0 | 13 | 214 | 471 | 259 | 212 | 19 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Core.Data
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | The core state of ide. This module is imported from every other module,
-- | and all data structures of the state are declared here, to avoid circular
-- | module dependencies.
--
-------------------------------------------------------------------------------
module IDE.Core.Types (
IDE(..)
, IDEState(..)
, IDERef
, IDEM
, IDEEventM
, IDEAction
, IDEEvent(..)
, MonadIDE
, liftIDE
, (?>>=)
, WorkspaceM
, WorkspaceAction
, runWorkspace
, PackageM
, PackageAction
, runPackage
, DebugM
, DebugAction
, runDebug
, IDEPackage(..)
, ipdPackageDir
, ipdAllDirs
, ipdLib
, ipdPackageName
, Workspace(..)
, wsAllPackages
, VCSConf
, ActionDescr(..)
, ActionString
, KeyString
, Prefs(..)
, candyState
, cabalCommand
, EditorStyle(..)
, editorStyle
, LogRefType(..)
, LogRef(..)
, logRefRootPath
, logRefFilePath
, logRefFullFilePath
, isError
, isBreakpoint
, displaySrcSpan
, colorHexString
, SearchHint(..)
, CandyTable(..)
, CandyTableForth
, CandyTableBack
, KeymapI(..)
, SpecialKeyTable
, SpecialKeyCons
, PackageDescrCache
, ModuleDescrCache
, CompletionWindow(..)
, LogLaunch(..)
, LogLaunchData(..)
, LogTag(..)
, GUIHistory
, GUIHistory'(..)
, SensitivityMask(..)
, SearchMode(..)
, StatusbarCompartment(..)
) where
import qualified IDE.YiConfig as Yi
import Graphics.UI.Gtk
(TextBuffer, MenuItem, Window(..), KeyVal(..), Color(..), Menu(..),
TreeView(..), ListStore(..), Toolbar(..))
import Data.Unique (newUnique, Unique(..))
import Graphics.UI.Frame.Panes
import Distribution.Package
(PackageName(..), PackageIdentifier(..), Dependency(..))
import Distribution.PackageDescription (BuildInfo)
import Data.Map (Map(..))
import Data.Set (Set(..))
import Data.List (nubBy)
import Control.Concurrent (MVar)
import Distribution.ModuleName (ModuleName(..))
import Graphics.UI.Gtk.Gdk.EventM (Modifier(..))
import Graphics.UI.Gtk.ActionMenuToolbar.UIManager(MergeId)
import System.Time (ClockTime(..))
import Distribution.Simple (Extension(..))
import IDE.Utils.Tool (ToolState(..), ProcessHandle)
import Data.IORef (writeIORef, readIORef, IORef(..))
import Numeric (showHex)
import Control.Event
(EventSelector(..), EventSource(..), Event(..))
import System.FilePath (dropFileName, (</>))
import IDE.Core.CTypes
import IDE.StrippedPrefs(RetrieveStrategy)
import System.IO (Handle)
import Distribution.Text (display, disp)
import Text.PrettyPrint (render)
import Control.Monad.Trans.Class (lift)
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Trans.Reader (ReaderT(..))
import Data.Time (UTCTime(..))
import qualified VCSWrapper.Common as VCS
import qualified VCSGui.Common as VCSGUI
import qualified Data.Map as Map (Map)
import Data.Typeable (Typeable)
import Foreign (Ptr)
import Control.Monad.Reader.Class (MonadReader(..))
import Data.Text (Text)
import qualified Data.Text as T (pack, unpack)
import Language.Haskell.HLint3 (Idea(..))
import Data.Function (on)
import Control.Concurrent.STM.TVar (TVar)
import Data.Sequence (Seq)
import Data.Maybe (maybeToList)
-- ---------------------------------------------------------------------
-- IDE State
--
--
-- | The IDE state
--
data IDE = IDE {
frameState :: FrameState IDEM -- ^ state of the windows framework
, recentPanes :: [PaneName] -- ^ a list of panes which were selected last
, specialKeys :: SpecialKeyTable IDERef -- ^ a structure for emacs like keystrokes
, specialKey :: SpecialKeyCons IDERef -- ^ the first of a double keystroke
, candy :: CandyTable -- ^ table for source candy
, prefs :: Prefs -- ^ configuration preferences
, workspace :: Maybe Workspace -- ^ may be a workspace (set of packages)
, activePack :: Maybe IDEPackage
, activeExe :: Maybe Text
, bufferProjCache :: Map FilePath [IDEPackage] -- ^ cache the associated packages for a file
, allLogRefs :: Seq LogRef
, currentEBC :: (Maybe LogRef, Maybe LogRef, Maybe LogRef)
, currentHist :: Int
, systemInfo :: Maybe GenScope -- ^ the system scope
, packageInfo :: Maybe (GenScope, GenScope) -- ^ the second are the imports
, workspaceInfo :: Maybe (GenScope, GenScope) -- ^ the second are the imports
, workspInfoCache :: PackageDescrCache
, handlers :: Map Text [(Unique, IDEEvent -> IDEM IDEEvent)] -- ^ event handling table
, currentState :: IDEState
, guiHistory :: (Bool,[GUIHistory],Int)
, findbar :: (Bool,Maybe (Toolbar,ListStore Text))
, toolbar :: (Bool,Maybe Toolbar)
, recentFiles :: [FilePath]
, recentWorkspaces :: [FilePath]
, runningTool :: Maybe ProcessHandle
, debugState :: Maybe (IDEPackage, ToolState)
, completion :: ((Int, Int), Maybe CompletionWindow)
, yiControl :: Yi.Control
, serverQueue :: Maybe (MVar (ServerCommand, ServerAnswer -> IDEM ()))
, server :: Maybe Handle
, hlintQueue :: Maybe (TVar [Either FilePath FilePath])
, vcsData :: (Map FilePath MenuItem, Maybe (Maybe Text)) -- menus for packages, password
, logLaunches :: Map.Map Text LogLaunchData
, autoCommand :: IDEAction
, autoURI :: Maybe Text
} --deriving Show
--
-- | A mutable reference to the IDE state
--
type IDERef = IORef IDE
--
-- | The IDE Monad
--
type IDEM = ReaderT IDERef IO
--
-- | A shorthand for a reader monad for a mutable reference to the IDE state
-- which does not return a value
--
type IDEAction = IDEM ()
data IDEState =
-- | Leksah is in startup mode
IsStartingUp
-- | Leksah is about to go down
| IsShuttingDown
-- | Leksah is running
| IsRunning
-- | The flipper is used to switch between sources
| IsFlipping TreeView
-- | The completion feature is used
| IsCompleting Connections
class (Functor m, Monad m, MonadIO m) => MonadIDE m where
liftIDE :: IDEM a -> m a
instance MonadIDE IDEM where
liftIDE = id
instance MonadIDE WorkspaceM where
liftIDE = lift
(?>>=) :: Monad m => m (Maybe a) -> (a -> m ()) -> m ()
a ?>>= b = do
mA <- a
case mA of
Just v -> b v
Nothing -> return ()
-- ---------------------------------------------------------------------
-- Monad for Gtk events (use onIDE instead of on)
--
type IDEEventM t = ReaderT IDERef (ReaderT (Ptr t) IO)
instance MonadIDE (IDEEventM t) where
liftIDE f = do
ideR <- ask
liftIO $ runReaderT f ideR
-- ---------------------------------------------------------------------
-- Monad for functions that need an open workspace
--
type WorkspaceM = ReaderT Workspace IDEM
type WorkspaceAction = WorkspaceM ()
runWorkspace :: WorkspaceM a -> Workspace -> IDEM a
runWorkspace = runReaderT
-- ---------------------------------------------------------------------
-- Monad for functions that need an active package
--
type PackageM = ReaderT IDEPackage WorkspaceM
type PackageAction = PackageM ()
instance MonadIDE PackageM where
liftIDE = lift . lift
runPackage :: PackageM a -> IDEPackage -> WorkspaceM a
runPackage = runReaderT
-- ---------------------------------------------------------------------
-- Monad for functions that need to use the GHCi debugger
--
type DebugM = ReaderT (IDEPackage, ToolState) IDEM
type DebugAction = DebugM ()
runDebug :: DebugM a -> (IDEPackage, ToolState) -> IDEM a
runDebug = runReaderT
-- ---------------------------------------------------------------------
-- Events which can be signalled and handled
--
data IDEEvent =
InfoChanged Bool-- is it the initial = True else False
| UpdateWorkspaceInfo
| SelectInfo Text Bool -- navigate to source (== True)
| SelectIdent Descr
| LogMessage Text LogTag
| RecordHistory GUIHistory
| Sensitivity [(SensitivityMask,Bool)]
| SearchMeta Text
| StartFindInitial
| GotoDefinition Descr
| LoadSession FilePath
| SaveSession FilePath
| UpdateRecent
| VariablesChanged
| ErrorChanged Bool
| ErrorAdded Bool Int LogRef
| CurrentErrorChanged (Maybe LogRef)
| BreakpointChanged
| CurrentBreakChanged (Maybe LogRef)
| TraceChanged
| GetTextPopup (Maybe (IDERef -> Menu -> IO ()))
| StatusbarChanged [StatusbarCompartment]
| WorkspaceChanged Bool Bool -- ^ showPane updateFileCache
| SelectSrcSpan (Maybe SrcSpan)
| SavedFile FilePath
instance Event IDEEvent Text where
getSelector (InfoChanged _) = "InfoChanged"
getSelector UpdateWorkspaceInfo = "UpdateWorkspaceInfo"
getSelector (LogMessage _ _) = "LogMessage"
getSelector (SelectInfo _ _) = "SelectInfo"
getSelector (SelectIdent _) = "SelectIdent"
getSelector (RecordHistory _) = "RecordHistory"
getSelector (Sensitivity _) = "Sensitivity"
getSelector (SearchMeta _) = "SearchMeta"
getSelector (StartFindInitial) = "StartFindInitial"
getSelector (GotoDefinition _) = "GotoDefinition"
getSelector (LoadSession _) = "LoadSession"
getSelector (SaveSession _) = "SaveSession"
getSelector UpdateRecent = "UpdateRecent"
getSelector VariablesChanged = "VariablesChanged"
getSelector (ErrorChanged _) = "ErrorChanged"
getSelector (ErrorAdded _ _ _) = "ErrorAdded"
getSelector (CurrentErrorChanged _) = "CurrentErrorChanged"
getSelector BreakpointChanged = "BreakpointChanged"
getSelector (CurrentBreakChanged _) = "CurrentBreakChanged"
getSelector TraceChanged = "TraceChanged"
getSelector (GetTextPopup _) = "GetTextPopup"
getSelector (StatusbarChanged _) = "StatusbarChanged"
getSelector (WorkspaceChanged _ _) = "WorkspaceChanged"
getSelector (SelectSrcSpan _) = "SelectSrcSpan"
getSelector (SavedFile _) = "SavedFile"
instance EventSource IDERef IDEEvent IDEM Text where
canTriggerEvent _ "InfoChanged" = True
canTriggerEvent _ "UpdateWorkspaceInfo" = True
canTriggerEvent _ "LogMessage" = True
canTriggerEvent _ "SelectInfo" = True
canTriggerEvent _ "SelectIdent" = True
canTriggerEvent _ "RecordHistory" = True
canTriggerEvent _ "Sensitivity" = True
canTriggerEvent _ "DescrChoice" = True
canTriggerEvent _ "SearchMeta" = True
canTriggerEvent _ "StartFindInitial" = True
canTriggerEvent _ "SearchSymbolDialog" = True
canTriggerEvent _ "GotoDefinition" = True
canTriggerEvent _ "LoadSession" = True
canTriggerEvent _ "SaveSession" = True
canTriggerEvent _ "UpdateRecent" = True
canTriggerEvent _ "VariablesChanged" = True
canTriggerEvent _ "ErrorChanged" = True
canTriggerEvent _ "ErrorAdded" = True
canTriggerEvent _ "CurrentErrorChanged" = True
canTriggerEvent _ "BreakpointChanged" = True
canTriggerEvent _ "CurrentBreakChanged" = True
canTriggerEvent _ "TraceChanged" = True
canTriggerEvent _ "GetTextPopup" = True
canTriggerEvent _ "StatusbarChanged" = True
canTriggerEvent _ "WorkspaceChanged" = True
canTriggerEvent _ "SelectSrcSpan" = True
canTriggerEvent _ "SavedFile" = True
canTriggerEvent _ _ = False
getHandlers ideRef = do
ide <- liftIO $ readIORef ideRef
return (handlers ide)
setHandlers ideRef nh = do
ide <- liftIO $ readIORef ideRef
liftIO $ writeIORef ideRef (ide {handlers= nh})
myUnique _ =
liftIO newUnique
instance EventSelector Text
-- ---------------------------------------------------------------------
-- IDEPackages
--
data IDEPackage = IDEPackage {
ipdPackageId :: PackageIdentifier
, ipdCabalFile :: FilePath
, ipdDepends :: [Dependency]
, ipdModules :: Map ModuleName BuildInfo
, ipdHasLibs :: Bool
, ipdExes :: [Text]
, ipdTests :: [Text]
, ipdBenchmarks :: [Text]
, ipdMain :: [(FilePath, BuildInfo, Bool)]
, ipdExtraSrcs :: Set FilePath
, ipdSrcDirs :: [FilePath] -- ^ Relative paths to the source directories
, ipdExtensions :: [Extension]
, ipdConfigFlags :: [Text]
, ipdBuildFlags :: [Text]
, ipdTestFlags :: [Text]
, ipdHaddockFlags :: [Text]
, ipdExeFlags :: [Text]
, ipdInstallFlags :: [Text]
, ipdRegisterFlags :: [Text]
, ipdUnregisterFlags :: [Text]
, ipdSdistFlags :: [Text]
, ipdSandboxSources :: [IDEPackage]
}
deriving (Eq)
instance Show IDEPackage where
show p = show "IDEPackage for " ++ (render . disp) (ipdPackageId p)
instance Ord IDEPackage where
compare x y = compare (ipdPackageId x) (ipdPackageId y)
-- | The directory of the cabal file
ipdPackageDir :: IDEPackage -> FilePath
ipdPackageDir = dropFileName . ipdCabalFile
-- | Gets the package name
ipdPackageName :: IDEPackage -> Text
ipdPackageName = T.pack . unPackageName . pkgName . ipdPackageId
-- | Gets the library name if the package has a library component
ipdLib :: IDEPackage -> Maybe Text
ipdLib pkg = if ipdHasLibs pkg then Just (ipdPackageName pkg) else Nothing
-- | All directory of the package and those of all its source dependencies
ipdAllDirs :: IDEPackage -> [FilePath]
ipdAllDirs p = ipdPackageDir p : (ipdSandboxSources p >>= ipdAllDirs)
-- ---------------------------------------------------------------------
-- Workspace
--
data Workspace = Workspace {
wsVersion :: Int
, wsSaveTime :: Text
, wsName :: Text
, wsFile :: FilePath
, wsPackages :: [IDEPackage]
, wsPackagesFiles :: [FilePath]
, wsActivePackFile:: Maybe FilePath
, wsActiveExe :: Maybe Text
, wsNobuildPack :: [IDEPackage]
, packageVcsConf :: Map FilePath VCSConf -- ^ (FilePath to package, Version-Control-System Configuration)
} deriving Show
-- | Includes sandbox sources
wsAllPackages :: Workspace -> [IDEPackage]
wsAllPackages w = nubBy ((==) `on` ipdCabalFile) $ wsPackages w ++ (wsPackages w >>= ipdSandboxSources)
-- ---------------------------------------------------------------------
-- Other data structures which are used in the state
--
--
-- | ActionDescr is a data structure from which GtkActions are build, which are used for
-- menus, toolbars, and accelerator keystrokes
--
data ActionDescr alpha = AD {
name :: ActionString
, label :: Text
, tooltip :: Maybe Text
, stockID :: Maybe Text
, action :: ReaderT alpha IO ()
, accelerator :: [KeyString]
, isToggle :: Bool
}
type ActionString = Text
type KeyString = Text
--
-- | Preferences is a data structure to hold configuration data
--
data Prefs = Prefs {
prefsFormat :: Int
, prefsSaveTime :: Text
, showLineNumbers :: Bool
, rightMargin :: (Bool, Int)
, tabWidth :: Int
, wrapLines :: Bool
, sourceCandy :: (Bool,Text)
, darkUserInterface :: Bool
, saveSessionOnClose :: Bool
, keymapName :: Text
, forceLineEnds :: Bool
, removeTBlanks :: Bool
, textviewFont :: Maybe Text
, sourceStyle :: (Bool, Text)
, foundBackgroundLight :: Color
, matchBackgroundLight :: Color
, contextBackgroundLight :: Color
, breakpointBackgroundLight :: Color
, lintBackgroundLight :: Color
, foundBackgroundDark :: Color
, matchBackgroundDark :: Color
, contextBackgroundDark :: Color
, breakpointBackgroundDark :: Color
, lintBackgroundDark :: Color
, autoLoad :: Bool
, textEditorType :: Text
, logviewFont :: Maybe Text
, defaultSize :: (Int,Int)
, browser :: Text
, pathForCategory :: [(Text, PanePath)]
, defaultPath :: PanePath
, categoryForPane :: [(Text, Text)]
, packageBlacklist :: [Dependency]
, collectAtStart :: Bool
, useCtrlTabFlipping :: Bool
, docuSearchURL :: Text
, completeRestricted :: Bool
, saveAllBeforeBuild :: Bool
, jumpToWarnings :: Bool
, useVado :: Bool
, useCabalDev :: Bool
, backgroundBuild :: Bool
, runUnitTests :: Bool
, makeMode :: Bool
, singleBuildWithoutLinking :: Bool
, dontInstallLast :: Bool
, printEvldWithShow :: Bool
, breakOnException :: Bool
, breakOnError :: Bool
, printBindResult :: Bool
, serverIP :: Text
, showHiddenFiles :: Bool
, showWorkspaceIcons :: Bool
-- As well used by server
, serverPort :: Int
, sourceDirectories :: [FilePath]
, unpackDirectory :: Maybe FilePath
, retrieveURL :: Text
, retrieveStrategy :: RetrieveStrategy
, endWithLastConn :: Bool
} deriving(Eq,Show)
cabalCommand :: Prefs -> FilePath
cabalCommand p = if useCabalDev p then "cabal-dev" else "cabal"
candyState :: Prefs -> Bool
candyState = fst . sourceCandy
data EditorStyle = EditorStyle { styleName :: Maybe Text
, preferDark :: Bool
, foundBG :: (Color, Color)
, matchBG :: (Color, Color)
, contextBG :: (Color, Color)
, breakpointBG :: (Color, Color)
, lintBG :: (Color, Color)
}
editorStyle :: Bool -> Prefs -> EditorStyle
editorStyle preferDark prefs = EditorStyle { styleName = case sourceStyle prefs of
(False,_) -> Nothing
(True,v) -> Just v
, preferDark = preferDark
, foundBG = (foundBackgroundDark prefs, foundBackgroundLight prefs)
, matchBG = (matchBackgroundDark prefs, matchBackgroundLight prefs)
, contextBG = (contextBackgroundDark prefs, contextBackgroundLight prefs)
, breakpointBG = (breakpointBackgroundDark prefs, breakpointBackgroundLight prefs)
, lintBG = (lintBackgroundDark prefs, lintBackgroundLight prefs)
}
data SearchHint = Forward | Backward | Insert | Delete | Initial
deriving (Eq)
#ifndef LEKSAH_WITH_YI
instance Ord Modifier
where compare a b = compare (fromEnum a) (fromEnum b)
#endif
-- Version-Control-System Configuration
type VCSConf = (VCS.VCSType, VCS.Config, Maybe VCSGUI.MergeTool)
--
-- | Other types
--
data LogLaunchData = LogLaunchData {
logLaunch :: LogLaunch
, mbPid :: Maybe ProcessHandle
}
data LogLaunch = LogLaunch {
logBuffer :: TextBuffer
} deriving Typeable
-- Order determines priority of the icons in the gutter
data LogRefType = ContextRef | BreakpointRef | ErrorRef | TestFailureRef | WarningRef | LintRef
deriving (Eq, Ord, Show, Enum, Bounded)
-- | Represents a message about a part of the source code
data LogRef = LogRef {
logRefSrcSpan :: SrcSpan
, logRefPackage :: IDEPackage
, refDescription :: Text
, logRefIdea :: Maybe (Text, Idea)
, logLines :: Maybe (Int, Int)
, logRefType :: LogRefType
} deriving (Eq)
instance Show LogRef where
show lr = T.unpack (refDescription lr) ++ displaySrcSpan (logRefSrcSpan lr)
displaySrcSpan s = srcSpanFilename s ++ ":" ++
if srcSpanStartLine s == srcSpanEndLine s
then show (srcSpanStartLine s) ++ ":" ++
if srcSpanStartColumn s == srcSpanEndColumn s
then show (srcSpanStartColumn s)
else show (srcSpanStartColumn s) ++ "-" ++ show (srcSpanEndColumn s)
else show (srcSpanStartLine s) ++ ":" ++
show (srcSpanStartColumn s) ++ "-" ++ show (srcSpanEndColumn s)
logRefRootPath :: LogRef -> FilePath
logRefRootPath = ipdPackageDir . logRefPackage
logRefFilePath :: LogRef -> FilePath
logRefFilePath = srcSpanFilename . logRefSrcSpan
logRefFullFilePath :: LogRef -- ^ The log ref
-> FilePath -- ^ the result
logRefFullFilePath lr = logRefRootPath lr </> logRefFilePath lr
isError :: LogRef -> Bool
isError = (== ErrorRef) . logRefType
isBreakpoint :: LogRef -> Bool
isBreakpoint = (== BreakpointRef) . logRefType
isContext :: LogRef -> Bool
isContext = (== ContextRef) . logRefType
-- This should probably be in Gtk2Hs allong with a suitable parser
colorHexString (Color r g b) = '#' : pad (showHex r "")
++ pad (showHex g "")
++ pad (showHex b "")
where pad s = replicate (4 - length s) '0' ++ s
newtype CandyTable = CT (CandyTableForth,CandyTableBack)
type CandyTableForth = [(Bool,Text,Text)]
type CandyTableBack = [(Text,Text,Int)]
newtype KeymapI = KM (Map ActionString
[(Maybe (Either KeyString (KeyString,KeyString)), Maybe Text)])
type SpecialKeyTable alpha = Map (KeyVal,[Modifier]) (Map (KeyVal,[Modifier]) (ActionDescr alpha))
type SpecialKeyCons alpha = Maybe (Map (KeyVal, [Modifier]) (ActionDescr alpha), Text)
data LogTag = LogTag | ErrorTag | FrameTag | InputTag | InfoTag
-- | the first one is the new and the second the old state
type GUIHistory = (GUIHistory', GUIHistory')
data GUIHistory' =
ModuleSelected {
moduleS :: Maybe ModuleName
, facetS :: Maybe Text}
| ScopeSelected {
scope :: Scope
, blacklist :: Bool}
| InfoElementSelected {
mbInfo :: Maybe Descr}
| PaneSelected {
paneN :: Maybe Text}
deriving (Eq, Ord, Show)
data SensitivityMask =
SensitivityForwardHist
| SensitivityBackwardHist
| SensitivityProjectActive
| SensitivityWorkspaceOpen
| SensitivityError
| SensitivityEditor
| SensitivityInterpreting
deriving (Eq, Ord, Show)
data SearchMode = Exact {caseSense :: Bool} | Prefix {caseSense :: Bool}
| Regex {caseSense :: Bool}
deriving (Eq,Ord,Read,Show)
data CompletionWindow = CompletionWindow {
cwWindow :: Window,
cwTreeView :: TreeView,
cwListStore :: ListStore Text}
data StatusbarCompartment =
CompartmentCommand Text
| CompartmentPane (Maybe (IDEPane IDEM))
| CompartmentPackage Text
| CompartmentState Text
| CompartmentOverlay Bool
| CompartmentBufferPos (Int,Int)
| CompartmentBuild Bool
| CompartmentCollect Bool
type PackageDescrCache = Map PackageIdentifier ModuleDescrCache
type ModuleDescrCache = Map ModuleName (UTCTime, Maybe FilePath, ModuleDescr)
|
ChrisLane/leksah
|
src/IDE/Core/Types.hs
|
gpl-2.0
| 24,398 | 0 | 15 | 6,873 | 5,340 | 3,143 | 2,197 | 519 | 3 |
-- OmegaGB Copyright 2007 Bit Connor
-- This program is distributed under the terms of the GNU General Public License
-----------------------------------------------------------------------------
-- |
-- Module : GuiTest02
-- Copyright : (c) Bit Connor 2007 <[email protected]>
-- License : GPL
-- Maintainer : [email protected]
-- Stability : in-progress
--
-- OmegaGB
-- Game Boy Emulator
--
-- This module runs a gtk+ application that emulates a ROM, including a
-- display and push buttons for joypad control.
--
-----------------------------------------------------------------------------
module GuiTest02 where
import qualified Control.Exception as C
import Maybe(fromJust)
import Data.IORef
import Data.Word
import Data.Array.IArray
import Data.Array.MArray
import Text.Printf
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
import RomImage
import GuiDrawUtil
import Machine
import Joypad
import MachineIO
gladeFile = "guis/test03/test03.glade"
test02 :: IO ()
test02 = do
initGUI
windowXml <- C.catch
((xmlNew gladeFile) >>= return . fromJust)
(\e -> putStrLn ("Error Loading " ++ gladeFile) >> C.throwIO e)
let bindWidget x y = xmlGetWidget windowXml x y
main_window <- bindWidget castToWindow "main_window"
menu_open <- bindWidget castToMenuItem "menu_open"
menu_quit <- bindWidget castToMenuItem "menu_quit"
menu_about <- bindWidget castToMenuItem "menu_about"
display <- bindWidget castToDrawingArea "display"
joypad_right <- bindWidget castToToggleButton "joypad_right"
joypad_left <- bindWidget castToToggleButton "joypad_left"
joypad_up <- bindWidget castToToggleButton "joypad_up"
joypad_down <- bindWidget castToToggleButton "joypad_down"
joypad_a <- bindWidget castToToggleButton "joypad_a"
joypad_b <- bindWidget castToToggleButton "joypad_b"
joypad_select <- bindWidget castToToggleButton "joypad_select"
joypad_start <- bindWidget castToToggleButton "joypad_start"
displayPixBuf <- pixbufNew ColorspaceRgb False 8 160 144
pbData <- (pixbufGetPixels displayPixBuf :: IO (PixbufData Int Word8))
row <- pixbufGetRowstride displayPixBuf
chan <- pixbufGetNChannels displayPixBuf
bits <- pixbufGetBitsPerSample displayPixBuf
state <- newIORef Nothing
-- for video capture, counts the current frame number
--n <- newIORef (0::Int)
let
------------------------------------------------------------------------
refreshDisplay d = do
-- draw into the Pixbuf
doFromTo 0 143 $ \y ->
doFromTo 0 159 $ \x -> do
let color = d!(y, x)
colorByte = (fromIntegral color) * 85
writeArray pbData (x*chan+y*row) colorByte
writeArray pbData (1+x*chan+y*row) colorByte
writeArray pbData (2+x*chan+y*row) colorByte
widgetQueueDraw display
------------------------------------------------------------------------
step = do
s <- readIORef state
case s of
Nothing -> return ()
Just s' -> do
right <- toggleButtonGetActive joypad_right
left <- toggleButtonGetActive joypad_left
up <- toggleButtonGetActive joypad_up
down <- toggleButtonGetActive joypad_down
a <- toggleButtonGetActive joypad_a
b <- toggleButtonGetActive joypad_b
select <- toggleButtonGetActive joypad_select
start <- toggleButtonGetActive joypad_start
let jp = initJoypadKeyStates right left up down a b select start
let (d, s'') = updateMachineDisplayFrame jp s'
writeIORef state (Just s'')
refreshDisplay d
--- for video capture, dump current frame to png file
--num <- readIORef n
--pixbufSave displayPixBuf ("tmp/f" ++ (printf "%04d" num) ++ ".png") "png" []
--modifyIORef n (+1)
---
return True
------------------------------------------------------------------------
------------------------------------------------------------------------
open = do
fileSelect <- fileChooserDialogNew
(Just "Open Game Boy ROM")
(Just main_window)
FileChooserActionOpen
[("gtk-open", ResponseOk), ("gtk-cancel", ResponseDeleteEvent)]
response <- dialogRun fileSelect
case response of
ResponseOk -> do
romFile <- fileChooserGetFilename fileSelect
romImage <- loadRomImage (fromJust romFile)
writeIORef state $ Just (initialMachineState romImage)
ResponseDeleteEvent -> do
return ()
widgetDestroy fileSelect
-- register Idle action
------------------------------------------------------------------------
quit = widgetDestroy main_window >> mainQuit
------------------------------------------------------------------------
menu_quit `onActivateLeaf` quit
main_window `onDestroy` quit
menu_open `onActivateLeaf` open
menu_about `onActivateLeaf` do
dia <- aboutDialogNew
aboutDialogSetName dia "OmegaGB test01"
aboutDialogSetComments dia "Game Boy Emulator Development Test"
dialogRun dia
widgetDestroy dia
display `onSizeRequest` return (Requisition 160 144)
display `onExpose` updateCanvas display displayPixBuf
idleAdd step priorityDefaultIdle
mainGUI
return ()
|
bitc/omegagb
|
src/GuiTest02.hs
|
gpl-2.0
| 5,550 | 0 | 23 | 1,363 | 1,087 | 525 | 562 | 100 | 3 |
import Distribution.Simple
import Distribution.Simple.Setup(InstallFlags(..))
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..))
import Distribution.PackageDescription (PackageDescription(..))
import System.Environment
import System.FilePath.Posix
import System.Directory
import System.Process
import System.FilePath
import System.Posix.User
import System.Exit
import Control.Monad
includeDir = "/usr/local/include/ass"
rcfile = ".assrc"
main = defaultMainWithHooks $
simpleUserHooks
{
postInst = installRC >> installHeaders
}
installHeaders :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO ()
installHeaders _ _ _ _ = do
putStrLn "Copying headers..."
createDirectoryIfMissing True includeDir
copyFile "includes/ass.hpp" (includeDir </> "ass.hpp")
copyFile "includes/ass-boost.hpp" (includeDir </> "ass-boost.hpp")
copyFile "includes/ass-cat.hpp" (includeDir </> "ass-cat.hpp")
putStrLn "Done."
installRC :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO ()
installRC _ _ _ _ = do
putStrLn $ "Copying " ++ rcfile ++ "..."
dest <- liftM (</> rcfile) getHomeDirectory
doesFileExist dest >>= \n ->
if n then putStrLn $ "~/" ++ rcfile ++ " already installed."
else copyFile rcfile dest
|
awgn/ass
|
Setup.hs
|
gpl-2.0
| 1,395 | 0 | 12 | 300 | 339 | 179 | 160 | 32 | 2 |
{-|
Hledger.Cli re-exports the options, utilities and commands provided by
the hledger command-line program. This module also aggregates the
built-in unit tests defined throughout hledger and hledger-lib, and
adds some more which are easier to define here.
-}
{-# LANGUAGE OverloadedStrings #-}
module Hledger.Cli (
module Hledger.Cli.Accounts,
module Hledger.Cli.Add,
module Hledger.Cli.Balance,
module Hledger.Cli.Balancesheet,
module Hledger.Cli.Cashflow,
module Hledger.Cli.Help,
module Hledger.Cli.Histogram,
module Hledger.Cli.Incomestatement,
module Hledger.Cli.Info,
module Hledger.Cli.Man,
module Hledger.Cli.Print,
module Hledger.Cli.Register,
module Hledger.Cli.Stats,
module Hledger.Cli.CliOptions,
module Hledger.Cli.DocFiles,
module Hledger.Cli.Utils,
module Hledger.Cli.Version,
tests_Hledger_Cli,
module Hledger,
module System.Console.CmdArgs.Explicit
)
where
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Calendar
import System.Console.CmdArgs.Explicit hiding (Name) -- don't clash with hledger-ui
import Test.HUnit
import Hledger
import Hledger.Cli.Accounts
import Hledger.Cli.Add
import Hledger.Cli.Balance
import Hledger.Cli.Balancesheet
import Hledger.Cli.Cashflow
import Hledger.Cli.Histogram
import Hledger.Cli.Help
import Hledger.Cli.Incomestatement
import Hledger.Cli.Info
import Hledger.Cli.Man
import Hledger.Cli.Print
import Hledger.Cli.Register
import Hledger.Cli.Stats
import Hledger.Cli.CliOptions
import Hledger.Cli.DocFiles
import Hledger.Cli.Utils
import Hledger.Cli.Version
tests_Hledger_Cli :: Test
tests_Hledger_Cli = TestList
[
tests_Hledger
-- ,tests_Hledger_Cli_Add
,tests_Hledger_Cli_Balance
,tests_Hledger_Cli_Balancesheet
,tests_Hledger_Cli_Cashflow
-- ,tests_Hledger_Cli_Histogram
,tests_Hledger_Cli_Incomestatement
,tests_Hledger_Cli_CliOptions
-- ,tests_Hledger_Cli_Print
,tests_Hledger_Cli_Register
-- ,tests_Hledger_Cli_Stats
,"apply account directive" ~:
let ignoresourcepos j = j{jtxns=map (\t -> t{tsourcepos=nullsourcepos}) (jtxns j)} in
let sameParse str1 str2 = do j1 <- readJournal Nothing Nothing True Nothing str1 >>= either error' (return . ignoresourcepos)
j2 <- readJournal Nothing Nothing True Nothing str2 >>= either error' (return . ignoresourcepos)
j1 `is` j2{jlastreadtime=jlastreadtime j1, jfiles=jfiles j1} --, jparsestate=jparsestate j1}
in sameParse
("2008/12/07 One\n alpha $-1\n beta $1\n" <>
"apply account outer\n2008/12/07 Two\n aigh $-2\n bee $2\n" <>
"apply account inner\n2008/12/07 Three\n gamma $-3\n delta $3\n" <>
"end apply account\n2008/12/07 Four\n why $-4\n zed $4\n" <>
"end apply account\n2008/12/07 Five\n foo $-5\n bar $5\n"
)
("2008/12/07 One\n alpha $-1\n beta $1\n" <>
"2008/12/07 Two\n outer:aigh $-2\n outer:bee $2\n" <>
"2008/12/07 Three\n outer:inner:gamma $-3\n outer:inner:delta $3\n" <>
"2008/12/07 Four\n outer:why $-4\n outer:zed $4\n" <>
"2008/12/07 Five\n foo $-5\n bar $5\n"
)
,"apply account directive should preserve \"virtual\" posting type" ~: do
j <- readJournal Nothing Nothing True Nothing "apply account test\n2008/12/07 One\n (from) $-1\n (to) $1\n" >>= either error' return
let p = head $ tpostings $ head $ jtxns j
assertBool "" $ paccount p == "test:from"
assertBool "" $ ptype p == VirtualPosting
,"account aliases" ~: do
j <- readJournal Nothing Nothing True Nothing "!alias expenses = equity:draw:personal\n1/1\n (expenses:food) 1\n" >>= either error' return
let p = head $ tpostings $ head $ jtxns j
assertBool "" $ paccount p == "equity:draw:personal:food"
,"ledgerAccountNames" ~:
ledgerAccountNames ledger7 `is`
["assets","assets:cash","assets:checking","assets:saving","equity","equity:opening balances",
"expenses","expenses:food","expenses:food:dining","expenses:phone","expenses:vacation",
"liabilities","liabilities:credit cards","liabilities:credit cards:discover"]
-- ,"journalCanonicaliseAmounts" ~:
-- "use the greatest precision" ~:
-- (map asprecision $ journalAmountAndPriceCommodities $ journalCanonicaliseAmounts $ journalWithAmounts ["1","2.00"]) `is` [2,2]
-- don't know what this should do
-- ,"elideAccountName" ~: do
-- (elideAccountName 50 "aaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaa"
-- `is` "aa:aaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaa")
-- (elideAccountName 20 "aaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaa:aaaaaaaaaaaaaaaaaaaa"
-- `is` "aa:aa:aaaaaaaaaaaaaa")
,"default year" ~: do
j <- readJournal Nothing Nothing True Nothing defaultyear_journal_txt >>= either error' return
tdate (head $ jtxns j) `is` fromGregorian 2009 1 1
return ()
,"show dollars" ~: showAmount (usd 1) ~?= "$1.00"
,"show hours" ~: showAmount (hrs 1) ~?= "1.00h"
]
-- fixtures/test data
-- date1 = parsedate "2008/11/26"
-- t1 = LocalTime date1 midday
{-
samplejournal = readJournal' sample_journal_str
sample_journal_str = unlines
["; A sample journal file."
,";"
,"; Sets up this account tree:"
,"; assets"
,"; bank"
,"; checking"
,"; saving"
,"; cash"
,"; expenses"
,"; food"
,"; supplies"
,"; income"
,"; gifts"
,"; salary"
,"; liabilities"
,"; debts"
,""
,"2008/01/01 income"
," assets:bank:checking $1"
," income:salary"
,""
,"2008/06/01 gift"
," assets:bank:checking $1"
," income:gifts"
,""
,"2008/06/02 save"
," assets:bank:saving $1"
," assets:bank:checking"
,""
,"2008/06/03 * eat & shop"
," expenses:food $1"
," expenses:supplies $1"
," assets:cash"
,""
,"2008/12/31 * pay off"
," liabilities:debts $1"
," assets:bank:checking"
,""
,""
,";final comment"
]
-}
defaultyear_journal_txt :: Text
defaultyear_journal_txt = T.unlines
["Y2009"
,""
,"01/01 A"
," a $1"
," b"
]
-- write_sample_journal = writeFile "sample.journal" sample_journal_str
-- entry2_str = unlines
-- ["2007/01/27 * joes diner"
-- ," expenses:food:dining $10.00"
-- ," expenses:gifts $10.00"
-- ," assets:checking $-20.00"
-- ,""
-- ]
-- entry3_str = unlines
-- ["2007/01/01 * opening balance"
-- ," assets:cash $4.82"
-- ," equity:opening balances"
-- ,""
-- ,"2007/01/01 * opening balance"
-- ," assets:cash $4.82"
-- ," equity:opening balances"
-- ,""
-- ,"2007/01/28 coopportunity"
-- ," expenses:food:groceries $47.18"
-- ," assets:checking"
-- ,""
-- ]
-- periodic_entry1_str = unlines
-- ["~ monthly from 2007/2/2"
-- ," assets:saving $200.00"
-- ," assets:checking"
-- ,""
-- ]
-- periodic_entry2_str = unlines
-- ["~ monthly from 2007/2/2"
-- ," assets:saving $200.00 ;auto savings"
-- ," assets:checking"
-- ,""
-- ]
-- periodic_entry3_str = unlines
-- ["~ monthly from 2007/01/01"
-- ," assets:cash $4.82"
-- ," equity:opening balances"
-- ,""
-- ,"~ monthly from 2007/01/01"
-- ," assets:cash $4.82"
-- ," equity:opening balances"
-- ,""
-- ]
-- journal1_str = unlines
-- [""
-- ,"2007/01/27 * joes diner"
-- ," expenses:food:dining $10.00"
-- ," expenses:gifts $10.00"
-- ," assets:checking $-20.00"
-- ,""
-- ,""
-- ,"2007/01/28 coopportunity"
-- ," expenses:food:groceries $47.18"
-- ," assets:checking $-47.18"
-- ,""
-- ,""
-- ]
-- journal2_str = unlines
-- [";comment"
-- ,"2007/01/27 * joes diner"
-- ," expenses:food:dining $10.00"
-- ," assets:checking $-47.18"
-- ,""
-- ]
-- journal3_str = unlines
-- ["2007/01/27 * joes diner"
-- ," expenses:food:dining $10.00"
-- ,";intra-entry comment"
-- ," assets:checking $-47.18"
-- ,""
-- ]
-- journal4_str = unlines
-- ["!include \"somefile\""
-- ,"2007/01/27 * joes diner"
-- ," expenses:food:dining $10.00"
-- ," assets:checking $-47.18"
-- ,""
-- ]
-- journal5_str = ""
-- journal6_str = unlines
-- ["~ monthly from 2007/1/21"
-- ," expenses:entertainment $16.23 ;netflix"
-- ," assets:checking"
-- ,""
-- ,"; 2007/01/01 * opening balance"
-- ,"; assets:saving $200.04"
-- ,"; equity:opening balances "
-- ,""
-- ]
-- journal7_str = unlines
-- ["2007/01/01 * opening balance"
-- ," assets:cash $4.82"
-- ," equity:opening balances "
-- ,""
-- ,"2007/01/01 * opening balance"
-- ," income:interest $-4.82"
-- ," equity:opening balances "
-- ,""
-- ,"2007/01/02 * ayres suites"
-- ," expenses:vacation $179.92"
-- ," assets:checking "
-- ,""
-- ,"2007/01/02 * auto transfer to savings"
-- ," assets:saving $200.00"
-- ," assets:checking "
-- ,""
-- ,"2007/01/03 * poquito mas"
-- ," expenses:food:dining $4.82"
-- ," assets:cash "
-- ,""
-- ,"2007/01/03 * verizon"
-- ," expenses:phone $95.11"
-- ," assets:checking "
-- ,""
-- ,"2007/01/03 * discover"
-- ," liabilities:credit cards:discover $80.00"
-- ," assets:checking "
-- ,""
-- ,"2007/01/04 * blue cross"
-- ," expenses:health:insurance $90.00"
-- ," assets:checking "
-- ,""
-- ,"2007/01/05 * village market liquor"
-- ," expenses:food:dining $6.48"
-- ," assets:checking "
-- ,""
-- ]
journal7 :: Journal
journal7 = nulljournal {jtxns =
[
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/01/01",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="opening balance",
tcomment="",
ttags=[],
tpostings=
["assets:cash" `post` usd 4.82
,"equity:opening balances" `post` usd (-4.82)
],
tpreceding_comment_lines=""
}
,
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/02/01",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="ayres suites",
tcomment="",
ttags=[],
tpostings=
["expenses:vacation" `post` usd 179.92
,"assets:checking" `post` usd (-179.92)
],
tpreceding_comment_lines=""
}
,
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/01/02",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="auto transfer to savings",
tcomment="",
ttags=[],
tpostings=
["assets:saving" `post` usd 200
,"assets:checking" `post` usd (-200)
],
tpreceding_comment_lines=""
}
,
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/01/03",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="poquito mas",
tcomment="",
ttags=[],
tpostings=
["expenses:food:dining" `post` usd 4.82
,"assets:cash" `post` usd (-4.82)
],
tpreceding_comment_lines=""
}
,
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/01/03",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="verizon",
tcomment="",
ttags=[],
tpostings=
["expenses:phone" `post` usd 95.11
,"assets:checking" `post` usd (-95.11)
],
tpreceding_comment_lines=""
}
,
txnTieKnot Transaction {
tindex=0,
tsourcepos=nullsourcepos,
tdate=parsedate "2007/01/03",
tdate2=Nothing,
tstatus=Unmarked,
tcode="*",
tdescription="discover",
tcomment="",
ttags=[],
tpostings=
["liabilities:credit cards:discover" `post` usd 80
,"assets:checking" `post` usd (-80)
],
tpreceding_comment_lines=""
}
]
}
ledger7 :: Ledger
ledger7 = ledgerFromJournal Any journal7
|
mstksg/hledger
|
hledger/Hledger/Cli.hs
|
gpl-3.0
| 14,410 | 0 | 20 | 5,118 | 1,743 | 1,088 | 655 | 188 | 1 |
{-
Copyright 2012-2015 Vidar Holen
This file is part of ShellCheck.
http://www.vidarholen.net/contents/shellcheck
ShellCheck is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ShellCheck is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module ShellCheck.Interface where
import ShellCheck.AST
import Control.Monad.Identity
import qualified Data.Map as Map
data SystemInterface m = SystemInterface {
-- Read a file by filename, or return an error
siReadFile :: String -> m (Either ErrorMessage String)
}
-- ShellCheck input and output
data CheckSpec = CheckSpec {
csFilename :: String,
csScript :: String,
csExcludedWarnings :: [Integer],
csShellTypeOverride :: Maybe Shell
} deriving (Show, Eq)
data CheckResult = CheckResult {
crFilename :: String,
crComments :: [PositionedComment]
} deriving (Show, Eq)
emptyCheckSpec = CheckSpec {
csFilename = "",
csScript = "",
csExcludedWarnings = [],
csShellTypeOverride = Nothing
}
-- Parser input and output
data ParseSpec = ParseSpec {
psFilename :: String,
psScript :: String
} deriving (Show, Eq)
data ParseResult = ParseResult {
prComments :: [PositionedComment],
prTokenPositions :: Map.Map Id Position,
prRoot :: Maybe Token
} deriving (Show, Eq)
-- Analyzer input and output
data AnalysisSpec = AnalysisSpec {
asScript :: Token,
asShellType :: Maybe Shell,
asExecutionMode :: ExecutionMode
}
data AnalysisResult = AnalysisResult {
arComments :: [TokenComment]
}
-- Supporting data types
data Shell = Ksh | Sh | Bash | Dash deriving (Show, Eq)
data ExecutionMode = Executed | Sourced deriving (Show, Eq)
type ErrorMessage = String
type Code = Integer
data Severity = ErrorC | WarningC | InfoC | StyleC deriving (Show, Eq, Ord)
data Position = Position {
posFile :: String, -- Filename
posLine :: Integer, -- 1 based source line
posColumn :: Integer -- 1 based source column, where tabs are 8
} deriving (Show, Eq)
data Comment = Comment Severity Code String deriving (Show, Eq)
data PositionedComment = PositionedComment Position Comment deriving (Show, Eq)
data TokenComment = TokenComment Id Comment deriving (Show, Eq)
-- For testing
mockedSystemInterface :: [(String, String)] -> SystemInterface Identity
mockedSystemInterface files = SystemInterface {
siReadFile = rf
}
where
rf file =
case filter ((== file) . fst) files of
[] -> return $ Left "File not included in mock."
[(_, contents)] -> return $ Right contents
|
david-caro/shellcheck
|
ShellCheck/Interface.hs
|
gpl-3.0
| 3,073 | 0 | 12 | 653 | 616 | 363 | 253 | 56 | 2 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Run.Scene.Plain.ShadeCorners
(
shadeCorners2D,
shadeCorners3D,
shadeCornersWrite2D,
) where
import MyPrelude
import Game
import Game.Run.RunData
import OpenGL
import OpenGL.Helpers
shadeCorners3D :: ShadeCorners -> Float -> Mat4 ->
Float -> Float -> Float ->
Float -> Float -> Float -> Float ->
Float -> Float -> Float -> Float -> IO ()
shadeCorners3D sh alpha projmodv p0 p1 p2
sizeX x0 x1 x2
sizeY y0 y1 y2 = do
glUseProgram $ shadeCornersPrg sh
uniformMat4 (shadeCornersUniProjModvMatrix sh) projmodv
glUniform1f (shadeCornersUniAlpha sh) $ rTF alpha
-- draw
glActiveTexture gl_TEXTURE0
glBindTexture gl_TEXTURE_2D $ shadeCornersTex sh
glBindVertexArrayOES $ shadeCornersVAO3D sh
-- populate pos
shadeCornersWrite3D sh p0 p1 p2 sizeX x0 x1 x2 sizeY y0 y1 y2
glDrawArrays gl_TRIANGLE_STRIP 0 14
shadeCorners2D :: ShadeCorners -> Float -> Mat4 -> IO ()
shadeCorners2D sh alpha projmodv = do
glUseProgram $ shadeCornersPrg sh
uniformMat4 (shadeCornersUniProjModvMatrix sh) projmodv
glUniform1f (shadeCornersUniAlpha sh) $ rTF alpha
-- draw
glDisable gl_CULL_FACE
glActiveTexture gl_TEXTURE0
glBindTexture gl_TEXTURE_2D $ shadeCornersTex sh
glBindVertexArrayOES $ shadeCornersVAO2D sh
glDrawArrays gl_TRIANGLE_STRIP 0 14
--------------------------------------------------------------------------------
-- write
-- | write corners for new size
shadeCornersWrite2D :: ShadeCorners -> Float -> Float -> IO ()
shadeCornersWrite2D sh wth hth = do
let sizeX = hth * valueSceneCornerSize
sizeY = wth * valueSceneCornerSize
a1x = rTF $ sizeX * wth
a2x = rTF $ (1 - sizeX) * wth
a3x = rTF $ wth
b1y = rTF $ sizeY * hth
b2y = rTF $ (1 - sizeY) * hth
b3y = rTF $ hth
glBindBuffer gl_ARRAY_BUFFER $ shadeCornersVBO2D sh
writeBuf gl_ARRAY_BUFFER $ \ptr -> do
pokeByteOff ptr (0 + 0) (0.0 :: GLfloat)
pokeByteOff ptr (0 + 4) (b1y :: GLfloat)
pokeByteOff ptr (16 + 0) (0.0 :: GLfloat)
pokeByteOff ptr (16 + 4) (0.0 :: GLfloat)
pokeByteOff ptr (32 + 0) (a1x :: GLfloat)
pokeByteOff ptr (32 + 4) (0.0 :: GLfloat)
pokeByteOff ptr (48 + 0) (a3x :: GLfloat)
pokeByteOff ptr (48 + 4) (0.0 :: GLfloat)
pokeByteOff ptr (64 + 0) (a2x :: GLfloat)
pokeByteOff ptr (64 + 4) (0.0 :: GLfloat)
pokeByteOff ptr (80 + 0) (a3x :: GLfloat)
pokeByteOff ptr (80 + 4) (b1y :: GLfloat)
-- begin
pokeByteOff ptr (96 + 0) (a3x :: GLfloat)
pokeByteOff ptr (96 + 4) (b1y :: GLfloat)
pokeByteOff ptr (112+ 0) (a3x :: GLfloat)
pokeByteOff ptr (112+ 4) (b2y :: GLfloat)
-- end
pokeByteOff ptr (128+ 0) (a3x :: GLfloat)
pokeByteOff ptr (128+ 4) (b2y :: GLfloat)
pokeByteOff ptr (144+ 0) (a3x :: GLfloat)
pokeByteOff ptr (144+ 4) (b3y :: GLfloat)
pokeByteOff ptr (160+ 0) (a2x :: GLfloat)
pokeByteOff ptr (160+ 4) (b3y :: GLfloat)
pokeByteOff ptr (176+ 0) (0.0 :: GLfloat)
pokeByteOff ptr (176+ 4) (b3y :: GLfloat)
pokeByteOff ptr (192+ 0) (a1x :: GLfloat)
pokeByteOff ptr (192+ 4) (b3y :: GLfloat)
pokeByteOff ptr (208+ 0) (0.0 :: GLfloat)
pokeByteOff ptr (208+ 4) (b2y :: GLfloat)
shadeCornersWrite3D :: ShadeCorners -> Float -> Float -> Float ->
Float -> Float -> Float -> Float ->
Float -> Float -> Float -> Float -> IO ()
shadeCornersWrite3D sh p0 p1 p2
sizeX x0 x1 x2
sizeY y0 y1 y2 = do
let a1x = sizeX * x0
a1y = sizeX * x1
a1z = sizeX * x2
a2x = (1 - sizeX) * x0
a2y = (1 - sizeX) * x1
a2z = (1 - sizeX) * x2
a3x = x0
a3y = x1
a3z = x2
b1x = sizeY * y0
b1y = sizeY * y1
b1z = sizeY * y2
b2x = (1 - sizeY) * y0
b2y = (1 - sizeY) * y1
b2z = (1 - sizeY) * y2
b3x = y0
b3y = y1
b3z = y2
glBindBuffer gl_ARRAY_BUFFER $ shadeCornersVBOPos3D sh
glBufferData gl_ARRAY_BUFFER 168 nullPtr gl_STREAM_DRAW
writeBuf gl_ARRAY_BUFFER $ \ptr -> do
pokeByteOff ptr (0 + 0) (rTF $ p0 + b1x :: GLfloat)
pokeByteOff ptr (0 + 4) (rTF $ p1 + b1y :: GLfloat)
pokeByteOff ptr (0 + 8) (rTF $ p2 + b1z :: GLfloat)
pokeByteOff ptr (12 + 0) (rTF $ p0 + 0 :: GLfloat)
pokeByteOff ptr (12 + 4) (rTF $ p1 + 0 :: GLfloat)
pokeByteOff ptr (12 + 8) (rTF $ p2 + 0 :: GLfloat)
pokeByteOff ptr (24 + 0) (rTF $ p0 + a1x :: GLfloat)
pokeByteOff ptr (24 + 4) (rTF $ p1 + a1y :: GLfloat)
pokeByteOff ptr (24 + 8) (rTF $ p2 + a1z :: GLfloat)
pokeByteOff ptr (36 + 0) (rTF $ p0 + a3x :: GLfloat)
pokeByteOff ptr (36 + 4) (rTF $ p1 + a3y :: GLfloat)
pokeByteOff ptr (36 + 8) (rTF $ p2 + a3z :: GLfloat)
pokeByteOff ptr (48 + 0) (rTF $ p0 + a2x :: GLfloat)
pokeByteOff ptr (48 + 4) (rTF $ p1 + a2y :: GLfloat)
pokeByteOff ptr (48 + 8) (rTF $ p2 + a2z :: GLfloat)
pokeByteOff ptr (60 + 0) (rTF $ p0 + a3x + b1x :: GLfloat)
pokeByteOff ptr (60 + 4) (rTF $ p1 + a3y + b1y :: GLfloat)
pokeByteOff ptr (60 + 8) (rTF $ p2 + a3z + b1z :: GLfloat)
-- begin
pokeByteOff ptr (72 + 0) (rTF $ p0 + a3x + b1x :: GLfloat)
pokeByteOff ptr (72 + 4) (rTF $ p1 + a3y + b1y :: GLfloat)
pokeByteOff ptr (72 + 8) (rTF $ p2 + a3z + b1z :: GLfloat)
pokeByteOff ptr (84 + 0) (rTF $ p0 + a3x + b2x :: GLfloat)
pokeByteOff ptr (84 + 4) (rTF $ p1 + a3y + b2y :: GLfloat)
pokeByteOff ptr (84 + 8) (rTF $ p2 + a3z + b2z :: GLfloat)
-- end
pokeByteOff ptr (96 + 0) (rTF $ p0 + a3x + b2x :: GLfloat)
pokeByteOff ptr (96 + 4) (rTF $ p1 + a3y + b2y :: GLfloat)
pokeByteOff ptr (96 + 8) (rTF $ p2 + a3z + b2z :: GLfloat)
pokeByteOff ptr (108+ 0) (rTF $ p0 + a3x + b3x :: GLfloat)
pokeByteOff ptr (108+ 4) (rTF $ p1 + a3y + b3y :: GLfloat)
pokeByteOff ptr (108+ 8) (rTF $ p2 + a3z + b3z :: GLfloat)
pokeByteOff ptr (120+ 0) (rTF $ p0 + a2x + b3x :: GLfloat)
pokeByteOff ptr (120+ 4) (rTF $ p1 + a2y + b3y :: GLfloat)
pokeByteOff ptr (120+ 8) (rTF $ p2 + a2z + b3z :: GLfloat)
pokeByteOff ptr (132+ 0) (rTF $ p0 + b3x :: GLfloat)
pokeByteOff ptr (132+ 4) (rTF $ p1 + b3y :: GLfloat)
pokeByteOff ptr (132+ 8) (rTF $ p2 + b3z :: GLfloat)
pokeByteOff ptr (144+ 0) (rTF $ p0 + a1x + b3x :: GLfloat)
pokeByteOff ptr (144+ 4) (rTF $ p1 + a1y + b3y :: GLfloat)
pokeByteOff ptr (144+ 8) (rTF $ p2 + a1z + b3z :: GLfloat)
pokeByteOff ptr (156+ 0) (rTF $ p0 + b2x :: GLfloat)
pokeByteOff ptr (156+ 4) (rTF $ p1 + b2y :: GLfloat)
pokeByteOff ptr (156+ 8) (rTF $ p2 + b2z :: GLfloat)
|
karamellpelle/grid
|
source/Game/Run/Scene/Plain/ShadeCorners.hs
|
gpl-3.0
| 8,013 | 8 | 20 | 2,584 | 2,915 | 1,504 | 1,411 | 144 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Plan9.Consts where
import Data.Word
import Data.Text (Text)
import qualified Data.ByteString.Lazy as L
{-
-- * Qid - Server side data type for path tracking (<http://9p.cat-v.org> for details)
, Qid(..)
, emptyQid
-- * Stat - Namespace metadata /(somewhat like a unix fstat)/
, Stat(..)
, emptyStat
-- ** MsgTyp - A message payload type
, MsgTyp(..)
, typInt
-- ** NineMsg - An envelope encapsulating the various 9P2000 messages
, NineMsg(..)
, NinePkt
-}
-------------- Declare the data types !!! ------------------
-- | A variable message type that encapsulates the valid kinds of messages in a 9P2000 payload
-- see http://man.cat-v.org/plan_9/5/intro
-- and http://ericvh.github.com/9p-rfc/rfc9p2000.u.html
data NineMsg =
Tversion { tv_msize :: Word32, -- | An agreed-upon max. message size.
tv_version :: Text
}
| Rversion { rv_msize :: Word32,
rv_version :: Text
}
| Tauth { tau_afid :: Word32, -- | The auth protocol is obsolete,
tau_uname :: Text, -- so we always use NOFID, and "", ""
tau_aname :: Text
}
| Rauth { ra_aqid :: Qid }
| Rerror { re_ename :: Text
-- re_errno :: Word32 -- 9p2000.u extension
}
| Tflush { tf_oldtag :: Word16 } -- | Interrupt a pending op.
| Rflush
| Tattach { tat_fid :: Word32, -- | Establish the root fid.
tat_afid :: Word32,
tat_uname :: Text,
tat_aname :: Text
}
| Rattach { rat_qid :: Qid }
| Twalk { tw_fid :: Word32, -- | Create a fid from a fid.
tw_newfid :: Word32,
tw_wnames :: [Text]
}
| Rwalk { rwr_wqid :: [Qid] }
| Topen { to_fid :: Word32, -- | Start an IO session.
to_mode :: Word8
}
| Ropen { ro_qid :: Qid,
ro_iounit :: Word32
}
| Tcreate { tcr_fid :: Word32, -- | Create and start IO.
tcr_name :: Text,
tcr_perm :: Word32,
tcr_mode :: Word8
}
| Rcreate { rcr_qid :: Qid,
rcr_iounit :: Word32
}
| Tread { trd_fid :: Word32, -- | The quintessential IO op.
trd_offset :: Word64,
trd_count :: Word32
}
| Rread { rrd_dat :: L.ByteString }
| Twrite { twr_fid :: Word32, -- | And its little brother.
twr_offset :: Word64,
twr_dat :: L.ByteString
}
| Rwrite { rw_count :: Word32 }
| Tclunk { tcl_fid :: Word32 } -- | This means close, but for any fid.
| Rclunk
| Tremove { trm_fid :: Word32 } -- | Sayonara.
| Rremove
| Tstat { ts_fid :: Word32 } -- | Read a stat.
| Rstat { rs_stat :: Stat }
| Twstat { tws_fid :: Word32, -- | Write a stat.
tws_stat :: Stat
}
| Rwstat
deriving (Show, Eq)
-- | The auth protocol is obsolete,
-- so we always use emptyAuth.
emptyAuth = Tauth nofid "" ""
type NinePkt = (Word16,NineMsg)
-- | A type that enumerates all the valid /(and one invalid)/ message type in 9P2000
-- The invalid type would be Terror, but the client initiates all the requests,
-- so sending it doesn't make sense. We mapped it to TFail here to denote parse
-- errors and the like.
data MsgTyp = TTversion | TRversion | TTauth | TRauth | TTattach | TRattach
| TFail | TRerror | TTflush | TRflush
| TTwalk | TRwalk | TTopen | TRopen
| TTcreate | TRcreate | TTread | TRread | TTwrite | TRwrite
| TTclunk | TRclunk | TTremove | TRremove | TTstat | TRstat
| TTwstat | TRwstat
deriving (Show, Eq, Ord, Enum)
-- | A Plan 9 Qid type. See http://9p.cat-v.org for more information
data Qid = Qid {
qid_typ :: Word8, -- qid.type[1] the type of the file (directory, etc.),
-- represented as a bit vector corresponding to the high
-- 8 bits of the file's mode word.
qid_vers :: Word32, -- qid.vers[4] version number for given path
qid_path :: Word64 -- qid.path[8] the file server's unique identification
-- for the file
} deriving (Eq, Ord, Show)
emptyQid :: Qid
emptyQid = Qid {
qid_typ = 0,
qid_vers = 0,
qid_path = 0
}
-- | Provides information on a path entry at a 9P2000 server
-- reading a directory returns a list of these.
data Stat = Stat {
st_typ :: Word16, -- type[2] used by the kernel to store the device type (~major #)
st_dev :: Word32, -- dev[4] used by the kernel to index the particular dev (~minor #)
st_qid :: Qid, -- @Qid@
st_mode :: Word32, -- mode[4] permissions and flags
st_atime :: Word32, -- atime[4] last access time
st_mtime :: Word32, -- mtime[4] last modification time
st_length :: Word64, -- length[8] length of file in bytes
st_name :: Text, -- name[ s ] file name; must be / if the file is the root directory of the server
st_uid :: Text, -- uid[ s ] owner name
st_gid :: Text, -- gid[ s ] group name
st_muid :: Text -- muid[ s ] name of the user who last modified the file
-- Extensions:
{- For dot u extensions:
There are four new fields in the stat structure supporting 9P2000 extensions - as well as new qid.type bits and mode bits.
The n_uid, n_gid, and n_muid are numeric hints that clients may use to map
numeric ids when a string to numeric id mapping facility is not available.
extension[s] is a string describing special files, depending on the mode
bit. For DSYMLINK files, the string is the target of the link. For DMDEVICE
files, the string is "b 1 2" for a block device with major 1, minor 2.
For normal files, this string is empty.
st_extension :: Text,-- extension[ s ] For use by the UNIX extension to store data about special files (links, devices, pipes, etc.)
st_n_uid :: Word32, -- n_uid[4] numeric id of the user who owns the file
st_n_gid :: Word32, -- n_gid[4] numeric id of the group associated with the file
st_n_muid :: Word32 -- n_muid[4] numeric id of the user who last modified the file
-}
} deriving (Eq, Ord, Show)
emptyStat :: Stat
emptyStat = Stat {
st_typ = 0,
st_dev = 0,
st_qid = emptyQid,
st_mode = 0,
st_atime = 0,
st_mtime = 0,
st_length = 0,
st_name = "",
st_uid = "",
st_gid = "",
st_muid = ""
{- 9P2000.u extensions:
st_extension = "",
st_n_uid = 0,
st_n_gid = 0,
st_n_muid = 0 -}
}
-- Flags for the mode field in Topen and Tcreate messages
oread = 0 :: Word8 -- open read-only
owrite = 1 :: Word8 -- open write-only
ordwr = 2 :: Word8 -- open read-write
oexec = 3 :: Word8 -- execute (== read but check execute permission)
otrunc = 16 :: Word8 -- or'ed in (except for exec), truncate file first
ocexec = 32 :: Word8 -- or'ed in, close on exec
orclose = 64 :: Word8 -- or'ed in, remove on close
--oexcl = 0x1000 -- or'ed in, exclusive use (create only)
-- | qid.types (high 8 bits of the st_mode)
qtdir = 0x80 :: Word8
qtappend = 0x40 :: Word8
qtexcl = 0x20 :: Word8
qtmount = 0x10 :: Word8
qtauth = 0x08 :: Word8
qttmp = 0x04 :: Word8
qtlink = 0x02 :: Word8
qtfile = 0x00 :: Word8
-- | st_mode flags
dmdir = 0x80000000 :: Word32
dmappend = 0x40000000 :: Word32 -- append-only file (offset is ignored in writes)
dmexcl = 0x20000000 :: Word32 -- exclusive-use (only one client may have it open at a time)
dmmount = 0x10000000 :: Word32 -- (Bit 28 is skipped for historical reasons.)
dmauth = 0x08000000 :: Word32 -- an authentication file established by auth messages
dmtmp = 0x04000000 :: Word32 -- the contents of the file (or directory) are not included in nightly archives
-- | 9p2000.u extensions for legacy *?NIX
dmsymlink = 0x02000000 :: Word32
dmlink = 0x01000000 :: Word32 -- mode bit for hard link (Unix, 9P2000.u)
dmdevice = 0x00800000 :: Word32
dmnamedpipe = 0x00200000 :: Word32
dmsocket = 0x00100000 :: Word32
dmsetuid = 0x00080000 :: Word32
dmsetgid = 0x00040000 :: Word32
-- | The permission bits
dmread = 0x4 :: Word8 -- mode bit for read permission
dmwrite = 0x2 :: Word8 -- mode bit for write permission
dmexec = 0x1 :: Word8 -- mode bit for execute permission
{- | The most significant change of 9P2000.u (not implemented here)
- to the create operation is the new permission
- modes which allow for creation of special files. In addition to creating
- directories with DMDIR, 9P2000.u allows the creation of symlinks (DMSYMLINK),
- devices (DMDEVICE), named pipes (DMNAMEPIPE), and sockets (DMSOCKET).
- extension[s] is a string describing special files [in the create message
- and Stat structures], depending on the mode bit.
- For DSYMLINK files, the string is the target of the link. For DMDEVICE files,
- the string is "b 1 2" for a block device with major 1, minor 2. For normal
- files, this string is empty.
-}
-- | Special fields specifying blank-ness.
-- In a Wstat these fields won't modify the stat.
notag = 0xFFFF :: Word16 -- no tag specified
nofid = 0xFFFFFFFF :: Word32 -- no fid specified
nouid = 0xFFFFFFFF :: Word32 -- no uid specified (9P2000.u extension)
-- | Some Error Numbers from 9P2000.u
{-
econnreset = syscall.ECONNRESET
eexist = syscall.EEXIST
einval = syscall.EINVAL
eio = syscall.EIO
enotdir = syscall.ENOTDIR
enoent = syscall.ENOENT
enosys = syscall.ENOSYS
eperm = syscall.EPERM
-}
-- | Some protocol flags:
p9_msize = 8192 + p9_iohdrsz -- default message size (8192+iohdrsz)
p9_iohdrsz = 24 -- the non-data size of the Twrite messages
p9_port = 564 -- default port for 9P file servers
-- | Mount flags, if anyone implements a re-arrangeable namespace.
morder = 0x0003 -- mask for bits defining order of mounting
mrepl = 0x0000 -- mount replaces object
mbefore = 0x0001 -- mount goes before others in union directory
mafter = 0x0002 -- mount goes after others in union directory
mcreate = 0x0004 -- permit creation in mounted directory
mcache = 0x0010 -- cache some data
mmask = 0x0017 -- all bits on
|
frobnitzem/hack9p
|
src/Network/Plan9/Consts.hs
|
gpl-3.0
| 10,361 | 32 | 9 | 2,907 | 1,262 | 828 | 434 | 143 | 1 |
-- | Commad line interface helpers
module VSim.Runtime.CLI where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Text.Printf
import VSim.Runtime.Monad
import VSim.Runtime.Waveform
import VSim.Runtime.Time
import VSim.Runtime.Ptr
printWaveform :: (Show a) => Waveform a -> String
printWaveform (Waveform cs) = concat $ map pc cs where
pc (Change t c)
| t < maxBound = printf "< %s until %d >" (show c) ((watch t) - 1)
| otherwise = printf "< %s until inf >" (show c)
printSignalM :: (MonadIO m) => AnyPrimitiveSignal -> m String
printSignalM (AnyPrimitiveSignal v) = derefM (vr v) >>= return . printSignal (vn v)
printSignal :: (Show t) => String -> SigR t -> String
printSignal n s = printf "signal %s wave %s" n (printWaveform (swave s))
printSignalsM :: (MonadIO m) => Memory -> m ()
printSignalsM m = liftIO $ (mapM printSignalM >=> mapM_ putStrLn) $ (allSignals m)
printProcessesM :: (MonadIO m) => Memory -> m ()
printProcessesM m = do
forM_ (mprocesses m) $ \r -> do
p <- derefM r
liftIO $ printf "proc %s active %s\n" (pname p) (show $ pawake p)
|
grwlf/vsim
|
src_r/VSim/Runtime/CLI.hs
|
gpl-3.0
| 1,169 | 0 | 15 | 263 | 436 | 222 | 214 | 25 | 1 |
{-
This is the bootstrapping compiler for the Bzo programming language.
Copyright (C) 2020 Charles Rosenbauer
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.-}
{-# LANGUAGE OverloadedStrings #-}
module Builtins where
import Data.Int
import qualified Data.Text as T
import qualified Data.Map.Strict as M
isBuiltinType :: T.Text -> Int64
--Integers
isBuiltinType "#Int8" = 1
isBuiltinType "#Int16" = 2
isBuiltinType "#Int32" = 3
isBuiltinType "#Int64" = 4
isBuiltinType "#Unt8" = 5
isBuiltinType "#Unt16" = 6
isBuiltinType "#Unt32" = 7
isBuiltinType "#Unt64" = 8
--Floating Point
isBuiltinType "#Flt16" = 9
isBuiltinType "#Flt32" = 10
isBuiltinType "#Flt64" = 11
--Primitives
isBuiltinType "#Int" = 12
isBuiltinType "#Flt" = 13
isBuiltinType "#True" = 14
isBuiltinType "#False" = 15
isBuiltinType "#Bool" = 16
--Text
isBuiltinType "#Str" = 17
isBuiltinType "#Char" = 18
--Regular Expressions
isBuiltinType "#Regex" = 28
--Internal/Unsafe/FFI Types
isBuiltinType "#Ptr" = 10001
isBuiltinType "#UntPtr" = 10002
isBuiltinType "#Unique" = 10003
isBuiltinType "#Effect" = 10004
isBuiltinType "#Any" = 10005
isBuiltinType "#GenArr" = 10006
isBuiltinType _ = 0
topBuiltinType :: Int
topBuiltinType = 20000
isBuiltinFunc :: T.Text -> Int64
--Arithmetic / Bitwise
isBuiltinFunc "#add-binop" = 20001
isBuiltinFunc "#sub-binop" = 20002
isBuiltinFunc "#mul-binop" = 20003
isBuiltinFunc "#div-binop" = 20004
isBuiltinFunc "#mod-binop" = 20005
isBuiltinFunc "#neg-op" = 20006
isBuiltinFunc "#and-binop" = 20007
isBuiltinFunc "#or-binop" = 20008
isBuiltinFunc "#xor-binop" = 20009
isBuiltinFunc "#not-op" = 20010
isBuiltinFunc "#rshift-binop" = 20011
isBuiltinFunc "#lshift-binop" = 20012
isBuiltinFunc "#ctlz-op" = 20013
isBuiltinFunc "#cttz-op" = 20014
isBuiltinFunc "#popcount-op" = 20015
isBuiltinFunc "#toBits" = 20016
isBuiltinFunc "#trunc" = 20017
isBuiltinFunc "#floatcast" = 20018
isBuiltinFunc "#positcast" = 20019
isBuiltinFunc "#shadeInsert" = 20020
isBuiltinFunc "#shadeRemove" = 20021
isBuiltinFunc "#shadeRead" = 20022
isBuiltinFunc "#shadeComp" = 20023
isBuiltinFunc "#shadeUnion" = 20024
isBuiltinFunc "#shadeIntersection" = 20025
isBuiltinFunc "#shadeDifference" = 20026
isBuiltinFunc "#shadeInverse" = 20027
isBuiltinFunc "#rrotate-binop"= 20028
isBuiltinFunc "#lrotate-binop"= 20029
--Comparison
isBuiltinFunc "#gtr-binop" = 30001
isBuiltinFunc "#lss-binop" = 30002
isBuiltinFunc "#geq-binop" = 30003
isBuiltinFunc "#leq-binop" = 30004
isBuiltinFunc "#eql-binop" = 30005
isBuiltinFunc "#neq-binop" = 30006
--Higher Order Functions
isBuiltinFunc "#map" = 40001
isBuiltinFunc "#pfold" = 40002
isBuiltinFunc "#sfold" = 40003
isBuiltinFunc "#pscan" = 40004
isBuiltinFunc "#sscan" = 40005
isBuiltinFunc "#zip" = 40006
isBuiltinFunc "#unzip" = 40007
isBuiltinFunc "#toMap" = 40008
isBuiltinFunc "#toArr" = 40009
isBuiltinFunc "#toSet" = 40010
isBuiltinFunc "#reverse" = 40011
isBuiltinFunc "#take" = 40012
isBuiltinFunc "#drop" = 40013
isBuiltinFunc "#intersperse" = 40014
isBuiltinFunc "#sort" = 40015
isBuiltinFunc "#sortBy" = 40016
isBuiltinFunc "#nub" = 40017
isBuiltinFunc "#nubBy" = 40018
isBuiltinFunc "#subcycle" = 40019
isBuiltinFunc "#head" = 40020
isBuiltinFunc "#tail" = 40021
isBuiltinFunc "#filter" = 40022
isBuiltinFunc "#concat" = 40023
isBuiltinFunc "#concatMap" = 40024
isBuiltinFunc "#rotate" = 40025
isBuiltinFunc "#square" = 40026
isBuiltinFunc "#cube" = 40027
isBuiltinFunc "#tesseract" = 40028
isBuiltinFunc "#any" = 40029
isBuiltinFunc "#all" = 40030
isBuiltinFunc "#none" = 40031
isBuiltinFunc "#iter" = 40032
isBuiltinFunc "#project" = 40033
isBuiltinFunc "#ife" = 40034
isBuiltinFunc "#guard" = 40035
isBuiltinFunc "#isPrefix" = 40036
isBuiltinFunc "#isSuffix" = 40037
isBuiltinFunc "#arrLength" = 40038
isBuiltinFunc "#getIndex" = 40039
isBuiltinFunc "#setIndex" = 40040
isBuiltinFunc "#cons" = 40041
isBuiltinFunc "#zipWith" = 40042
isBuiltinFunc "#replace" = 40043
isBuiltinFunc "#adjust" = 40044
--String
isBuiltinFunc "#to-uppercase" = 50001
isBuiltinFunc "#to-lowercase" = 50002
isBuiltinFunc "#str-compare" = 50003
isBuiltinFunc "#compileRegex" = 50004
isBuiltinFunc "#matchRegex" = 50005
--Math
isBuiltinFunc "#exp-binop" = 60001
isBuiltinFunc "#surd-binop" = 60002
isBuiltinFunc "#log-binop" = 60003
isBuiltinFunc "#ln-op" = 60004
isBuiltinFunc "#sine" = 60005
isBuiltinFunc "#cosine" = 60006
isBuiltinFunc "#tangent" = 60007
isBuiltinFunc "#arcsine" = 60008
isBuiltinFunc "#arccosine" = 60009
isBuiltinFunc "#arctangent" = 60010
isBuiltinFunc "#hsine" = 60011
isBuiltinFunc "#hcosine" = 60012
isBuiltinFunc "#htangent" = 60013
isBuiltinFunc "#arc-hsine" = 60014
isBuiltinFunc "#arc-hcosine" = 60015
isBuiltinFunc "#arc-htangent" = 60016
isBuiltinFunc "#floor" = 60017
isBuiltinFunc "#ciel" = 60018
isBuiltinFunc "#round" = 60019
isBuiltinFunc "#factorial" = 60020
isBuiltinFunc "#gamma" = 60021
isBuiltinFunc "#nCr" = 60022
isBuiltinFunc "#nPr" = 60023
isBuiltinFunc "#sqrt" = 60024
isBuiltinFunc "#cbrt" = 60025
isBuiltinFunc "#log2" = 60026
isBuiltinFunc "#log10" = 60027
isBuiltinFunc "#exp-unop" = 60028
--Effect / FFI / Unsafe
isBuiltinFunc "#print" = 70001
isBuiltinFunc "#printf" = 70002
isBuiltinFunc "#println" = 70003
isBuiltinFunc "#ffi" = 70004
isBuiltinFunc "#coerce" = 70005
isBuiltinFunc "#fail!" = 70006
isBuiltinFunc "#errLog" = 70007
--Map Functions
isBuiltinFunc "#insertMap" = 80001
isBuiltinFunc "#removeMap" = 80002
isBuiltinFunc "#adjustMap" = 80003
isBuiltinFunc "#assocsMap" = 80004
isBuiltinFunc "#createMap" = 80005
isBuiltinFunc "#memberMap" = 80006
isBuiltinFunc "#lookupMap" = 80007
isBuiltinFunc _ = 0
topBuiltinFunc :: Int
topBuiltinFunc = 80000
topBuiltin :: Int
topBuiltin = 100000
|
charlesrosenbauer/Bzo-Compiler
|
src/Builtins.hs
|
gpl-3.0
| 6,924 | 0 | 6 | 1,510 | 1,317 | 676 | 641 | 166 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Fonts.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Fonts.Types.Product where
import Network.Google.Fonts.Types.Sum
import Network.Google.Prelude
-- | Metadata describing a family of fonts.
--
-- /See:/ 'webfont' smart constructor.
data Webfont =
Webfont'
{ _wVariants :: !(Maybe [Text])
, _wKind :: !(Maybe Text)
, _wCategory :: !(Maybe Text)
, _wFamily :: !(Maybe Text)
, _wVersion :: !(Maybe Text)
, _wFiles :: !(Maybe WebfontFiles)
, _wSubSets :: !(Maybe [Text])
, _wLastModified :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Webfont' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wVariants'
--
-- * 'wKind'
--
-- * 'wCategory'
--
-- * 'wFamily'
--
-- * 'wVersion'
--
-- * 'wFiles'
--
-- * 'wSubSets'
--
-- * 'wLastModified'
webfont
:: Webfont
webfont =
Webfont'
{ _wVariants = Nothing
, _wKind = Nothing
, _wCategory = Nothing
, _wFamily = Nothing
, _wVersion = Nothing
, _wFiles = Nothing
, _wSubSets = Nothing
, _wLastModified = Nothing
}
-- | The available variants for the font.
wVariants :: Lens' Webfont [Text]
wVariants
= lens _wVariants (\ s a -> s{_wVariants = a}) .
_Default
. _Coerce
-- | This kind represents a webfont object in the webfonts service.
wKind :: Lens' Webfont (Maybe Text)
wKind = lens _wKind (\ s a -> s{_wKind = a})
-- | The category of the font.
wCategory :: Lens' Webfont (Maybe Text)
wCategory
= lens _wCategory (\ s a -> s{_wCategory = a})
-- | The name of the font.
wFamily :: Lens' Webfont (Maybe Text)
wFamily = lens _wFamily (\ s a -> s{_wFamily = a})
-- | The font version.
wVersion :: Lens' Webfont (Maybe Text)
wVersion = lens _wVersion (\ s a -> s{_wVersion = a})
-- | The font files (with all supported scripts) for each one of the
-- available variants, as a key : value map.
wFiles :: Lens' Webfont (Maybe WebfontFiles)
wFiles = lens _wFiles (\ s a -> s{_wFiles = a})
-- | The scripts supported by the font.
wSubSets :: Lens' Webfont [Text]
wSubSets
= lens _wSubSets (\ s a -> s{_wSubSets = a}) .
_Default
. _Coerce
-- | The date (format \"yyyy-MM-dd\") the font was modified for the last
-- time.
wLastModified :: Lens' Webfont (Maybe Text)
wLastModified
= lens _wLastModified
(\ s a -> s{_wLastModified = a})
instance FromJSON Webfont where
parseJSON
= withObject "Webfont"
(\ o ->
Webfont' <$>
(o .:? "variants" .!= mempty) <*> (o .:? "kind") <*>
(o .:? "category")
<*> (o .:? "family")
<*> (o .:? "version")
<*> (o .:? "files")
<*> (o .:? "subsets" .!= mempty)
<*> (o .:? "lastModified"))
instance ToJSON Webfont where
toJSON Webfont'{..}
= object
(catMaybes
[("variants" .=) <$> _wVariants,
("kind" .=) <$> _wKind,
("category" .=) <$> _wCategory,
("family" .=) <$> _wFamily,
("version" .=) <$> _wVersion,
("files" .=) <$> _wFiles,
("subsets" .=) <$> _wSubSets,
("lastModified" .=) <$> _wLastModified])
-- | Response containing the list of fonts currently served by the Google
-- Fonts API.
--
-- /See:/ 'webfontList' smart constructor.
data WebfontList =
WebfontList'
{ _wlKind :: !(Maybe Text)
, _wlItems :: !(Maybe [Webfont])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'WebfontList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wlKind'
--
-- * 'wlItems'
webfontList
:: WebfontList
webfontList = WebfontList' {_wlKind = Nothing, _wlItems = Nothing}
-- | This kind represents a list of webfont objects in the webfonts service.
wlKind :: Lens' WebfontList (Maybe Text)
wlKind = lens _wlKind (\ s a -> s{_wlKind = a})
-- | The list of fonts currently served by the Google Fonts API.
wlItems :: Lens' WebfontList [Webfont]
wlItems
= lens _wlItems (\ s a -> s{_wlItems = a}) . _Default
. _Coerce
instance FromJSON WebfontList where
parseJSON
= withObject "WebfontList"
(\ o ->
WebfontList' <$>
(o .:? "kind") <*> (o .:? "items" .!= mempty))
instance ToJSON WebfontList where
toJSON WebfontList'{..}
= object
(catMaybes
[("kind" .=) <$> _wlKind, ("items" .=) <$> _wlItems])
-- | The font files (with all supported scripts) for each one of the
-- available variants, as a key : value map.
--
-- /See:/ 'webfontFiles' smart constructor.
newtype WebfontFiles =
WebfontFiles'
{ _wfAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'WebfontFiles' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wfAddtional'
webfontFiles
:: HashMap Text Text -- ^ 'wfAddtional'
-> WebfontFiles
webfontFiles pWfAddtional_ =
WebfontFiles' {_wfAddtional = _Coerce # pWfAddtional_}
wfAddtional :: Lens' WebfontFiles (HashMap Text Text)
wfAddtional
= lens _wfAddtional (\ s a -> s{_wfAddtional = a}) .
_Coerce
instance FromJSON WebfontFiles where
parseJSON
= withObject "WebfontFiles"
(\ o -> WebfontFiles' <$> (parseJSONObject o))
instance ToJSON WebfontFiles where
toJSON = toJSON . _wfAddtional
|
brendanhay/gogol
|
gogol-fonts/gen/Network/Google/Fonts/Types/Product.hs
|
mpl-2.0
| 6,267 | 0 | 19 | 1,656 | 1,359 | 790 | 569 | 146 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Genomics.Variants.Merge
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Merges the given variants with existing variants. For the definitions of
-- variants and other genomics resources, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- Each variant will be merged with an existing variant that matches its
-- reference sequence, start, end, reference bases, and alternative bases.
-- If no such variant exists, a new one will be created. When variants are
-- merged, the call information from the new variant is added to the
-- existing variant. Variant info fields are merged as specified in the
-- infoMergeConfig field of the MergeVariantsRequest. Please exercise
-- caution when using this method! It is easy to introduce mistakes in
-- existing variants and difficult to back out of them. For example,
-- suppose you were trying to merge a new variant with an existing one and
-- both variants contain calls that belong to callsets with the same
-- callset ID. \/\/ Existing variant - irrelevant fields trimmed for
-- clarity { \"variantSetId\": \"10473108253681171589\", \"referenceName\":
-- \"1\", \"start\": \"10582\", \"referenceBases\": \"G\",
-- \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\":
-- \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\":
-- [ 0, 1 ], } ] } \/\/ New variant with conflicting call information {
-- \"variantSetId\": \"10473108253681171589\", \"referenceName\": \"1\",
-- \"start\": \"10582\", \"referenceBases\": \"G\", \"alternateBases\": [
-- \"A\" ], \"calls\": [ { \"callSetId\": \"10473108253681171589-0\",
-- \"callSetName\": \"CALLSET0\", \"genotype\": [ 1, 1 ], } ] } The
-- resulting merged variant would overwrite the existing calls with those
-- from the new variant: { \"variantSetId\": \"10473108253681171589\",
-- \"referenceName\": \"1\", \"start\": \"10582\", \"referenceBases\":
-- \"G\", \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\":
-- \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\":
-- [ 1, 1 ], } ] } This may be the desired outcome, but it is up to the
-- user to determine if if that is indeed the case.
--
-- /See:/ <https://cloud.google.com/genomics Genomics API Reference> for @genomics.variants.merge@.
module Network.Google.Resource.Genomics.Variants.Merge
(
-- * REST Resource
VariantsMergeResource
-- * Creating a Request
, variantsMerge
, VariantsMerge
-- * Request Lenses
, vmXgafv
, vmUploadProtocol
, vmPp
, vmAccessToken
, vmUploadType
, vmPayload
, vmBearerToken
, vmCallback
) where
import Network.Google.Genomics.Types
import Network.Google.Prelude
-- | A resource alias for @genomics.variants.merge@ method which the
-- 'VariantsMerge' request conforms to.
type VariantsMergeResource =
"v1" :>
"variants:merge" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] MergeVariantsRequest :>
Post '[JSON] Empty
-- | Merges the given variants with existing variants. For the definitions of
-- variants and other genomics resources, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- Each variant will be merged with an existing variant that matches its
-- reference sequence, start, end, reference bases, and alternative bases.
-- If no such variant exists, a new one will be created. When variants are
-- merged, the call information from the new variant is added to the
-- existing variant. Variant info fields are merged as specified in the
-- infoMergeConfig field of the MergeVariantsRequest. Please exercise
-- caution when using this method! It is easy to introduce mistakes in
-- existing variants and difficult to back out of them. For example,
-- suppose you were trying to merge a new variant with an existing one and
-- both variants contain calls that belong to callsets with the same
-- callset ID. \/\/ Existing variant - irrelevant fields trimmed for
-- clarity { \"variantSetId\": \"10473108253681171589\", \"referenceName\":
-- \"1\", \"start\": \"10582\", \"referenceBases\": \"G\",
-- \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\":
-- \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\":
-- [ 0, 1 ], } ] } \/\/ New variant with conflicting call information {
-- \"variantSetId\": \"10473108253681171589\", \"referenceName\": \"1\",
-- \"start\": \"10582\", \"referenceBases\": \"G\", \"alternateBases\": [
-- \"A\" ], \"calls\": [ { \"callSetId\": \"10473108253681171589-0\",
-- \"callSetName\": \"CALLSET0\", \"genotype\": [ 1, 1 ], } ] } The
-- resulting merged variant would overwrite the existing calls with those
-- from the new variant: { \"variantSetId\": \"10473108253681171589\",
-- \"referenceName\": \"1\", \"start\": \"10582\", \"referenceBases\":
-- \"G\", \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\":
-- \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\":
-- [ 1, 1 ], } ] } This may be the desired outcome, but it is up to the
-- user to determine if if that is indeed the case.
--
-- /See:/ 'variantsMerge' smart constructor.
data VariantsMerge = VariantsMerge'
{ _vmXgafv :: !(Maybe Xgafv)
, _vmUploadProtocol :: !(Maybe Text)
, _vmPp :: !Bool
, _vmAccessToken :: !(Maybe Text)
, _vmUploadType :: !(Maybe Text)
, _vmPayload :: !MergeVariantsRequest
, _vmBearerToken :: !(Maybe Text)
, _vmCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantsMerge' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vmXgafv'
--
-- * 'vmUploadProtocol'
--
-- * 'vmPp'
--
-- * 'vmAccessToken'
--
-- * 'vmUploadType'
--
-- * 'vmPayload'
--
-- * 'vmBearerToken'
--
-- * 'vmCallback'
variantsMerge
:: MergeVariantsRequest -- ^ 'vmPayload'
-> VariantsMerge
variantsMerge pVmPayload_ =
VariantsMerge'
{ _vmXgafv = Nothing
, _vmUploadProtocol = Nothing
, _vmPp = True
, _vmAccessToken = Nothing
, _vmUploadType = Nothing
, _vmPayload = pVmPayload_
, _vmBearerToken = Nothing
, _vmCallback = Nothing
}
-- | V1 error format.
vmXgafv :: Lens' VariantsMerge (Maybe Xgafv)
vmXgafv = lens _vmXgafv (\ s a -> s{_vmXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
vmUploadProtocol :: Lens' VariantsMerge (Maybe Text)
vmUploadProtocol
= lens _vmUploadProtocol
(\ s a -> s{_vmUploadProtocol = a})
-- | Pretty-print response.
vmPp :: Lens' VariantsMerge Bool
vmPp = lens _vmPp (\ s a -> s{_vmPp = a})
-- | OAuth access token.
vmAccessToken :: Lens' VariantsMerge (Maybe Text)
vmAccessToken
= lens _vmAccessToken
(\ s a -> s{_vmAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
vmUploadType :: Lens' VariantsMerge (Maybe Text)
vmUploadType
= lens _vmUploadType (\ s a -> s{_vmUploadType = a})
-- | Multipart request metadata.
vmPayload :: Lens' VariantsMerge MergeVariantsRequest
vmPayload
= lens _vmPayload (\ s a -> s{_vmPayload = a})
-- | OAuth bearer token.
vmBearerToken :: Lens' VariantsMerge (Maybe Text)
vmBearerToken
= lens _vmBearerToken
(\ s a -> s{_vmBearerToken = a})
-- | JSONP
vmCallback :: Lens' VariantsMerge (Maybe Text)
vmCallback
= lens _vmCallback (\ s a -> s{_vmCallback = a})
instance GoogleRequest VariantsMerge where
type Rs VariantsMerge = Empty
type Scopes VariantsMerge =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/genomics"]
requestClient VariantsMerge'{..}
= go _vmXgafv _vmUploadProtocol (Just _vmPp)
_vmAccessToken
_vmUploadType
_vmBearerToken
_vmCallback
(Just AltJSON)
_vmPayload
genomicsService
where go
= buildClient (Proxy :: Proxy VariantsMergeResource)
mempty
|
rueshyna/gogol
|
gogol-genomics/gen/Network/Google/Resource/Genomics/Variants/Merge.hs
|
mpl-2.0
| 9,261 | 0 | 18 | 1,900 | 918 | 557 | 361 | 121 | 1 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
-- Module : Main
-- Copyright : (c) 2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Main (main) where
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteString (ByteString)
import qualified Data.Map.Strict as Map
import Data.Monoid hiding (Any)
import qualified Data.Set as Set
import qualified Data.Text as Text
import Network.AWS
import Options.Applicative
import System.APT.IO
import qualified System.APT.Index as Index
import System.APT.Log
import System.APT.Options
import qualified System.APT.Package as Pkg
import System.APT.Store (ToKey(..), Store)
import qualified System.APT.Store as Store
import System.APT.Types
import System.Environment
import System.Exit
default (ByteString)
data Options = Options
{ optFrom :: !Bucket
, optTo :: !Bucket
, optTemp :: !FilePath
, optAddress :: Maybe String
, optVersions :: !Int
, optSemantic :: !Bool
, optDebug :: !Bool
} deriving (Eq, Show)
options :: Parser Options
options = Options
<$> bucketOption
( long "from"
<> metavar "BUCKET/PREFIX"
<> help "Source S3 bucket and optional prefix to traverse for packages. [required]"
)
<*> bucketOption
( long "to"
<> metavar "BUCKET/PREFIX"
<> help "Destination S3 bucket and optional prefix to store packages. [required]"
)
<*> strOption
( long "tmp"
<> short 't'
<> metavar "PATH"
<> help "Temporary directory for unpacking Debian control files. [default: /tmp]"
<> value "/tmp"
)
<*> optional (strOption
$ long "addr"
<> short 'a'
<> metavar "ADDR"
<> help "Server to notify with new package descriptions. [default: none]"
)
<*> option
( long "versions"
<> short 'v'
<> metavar "INT"
<> help "Maximum number of most recent package versions to retain. [default: 3]"
<> value 3
)
<*> switch
( long "semantic"
<> short 's'
<> help "Whether to use S3 object versions or flat, semantically named keys."
)
<*> switch
( long "debug"
<> short 'd'
<> help "Print debug output."
)
data Any where
AE :: ToKey (Entry a) => Entry a -> Any
instance ToKey Any where
objectKey b (AE e) = objectKey b e
main :: IO ()
main = do
Options{..} <- parseOptions options
n <- getProgName
e <- getAWSEnv optDebug
-- FIXME: Check destination bucket has versioning turned on
say n "Looking for entries in {}..." [optFrom]
r <- Store.run optVersions e $ do
xs <- if optSemantic
then cat (Store.semantic optFrom) id
else cat (Store.versioned optFrom) Map.elems
mapM_ (say n "Discovered {}" . Only . objectKey optFrom) (concat xs)
say n "Copying to {}..." [optTo]
void $ Store.parMapM (worker optTemp optFrom optTo) xs
case r of
Left x -> say n "Error: {}" (Only $ Shown x) >> exitFailure
Right _ -> say_ n "Done." >> trigger optAddress
where
cat s f = map (map AE . Set.toDescList) . f <$> s
trigger Nothing = return ()
trigger (Just x) =
say "server" "Triggering rebuild of {}" [x] >> Index.rebuild x
worker :: FilePath -> Bucket -> Bucket -> [Any] -> Store ()
worker tmp bf bt xs = mapM_ (\x -> tid >>= go x) xs
where
go kf n = do
let k = objectKey bf kf
say n "Downloading {}..." [k]
m <- Store.get bf kf (liftEitherT . Pkg.fromFile tmp)
case m of
Nothing -> say n "Unable to retrieve package from {}" [k]
Just kt -> do
say n "Retrieved package description from {}" [k]
r <- Store.monotonic bt kt (Store.copy bf kf bt kt)
maybe (say n "{} already exists, skipping" [k])
(const $ say n "Copied {} from {}" [build k, build bf])
r
tid = (Text.drop 9 . Text.pack . show) `liftM` liftIO myThreadId
|
brendanhay/apteryx
|
apteryx-copy/Main.hs
|
mpl-2.0
| 5,050 | 0 | 19 | 1,686 | 1,176 | 604 | 572 | 125 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.People.ContactGroups.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Delete an existing contact group owned by the authenticated user by
-- specifying a contact group resource name.
--
-- /See:/ <https://developers.google.com/people/ People API Reference> for @people.contactGroups.delete@.
module Network.Google.Resource.People.ContactGroups.Delete
(
-- * REST Resource
ContactGroupsDeleteResource
-- * Creating a Request
, contactGroupsDelete
, ContactGroupsDelete
-- * Request Lenses
, cgdXgafv
, cgdUploadProtocol
, cgdDeleteContacts
, cgdResourceName
, cgdAccessToken
, cgdUploadType
, cgdCallback
) where
import Network.Google.People.Types
import Network.Google.Prelude
-- | A resource alias for @people.contactGroups.delete@ method which the
-- 'ContactGroupsDelete' request conforms to.
type ContactGroupsDeleteResource =
"v1" :>
Capture "resourceName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "deleteContacts" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Delete an existing contact group owned by the authenticated user by
-- specifying a contact group resource name.
--
-- /See:/ 'contactGroupsDelete' smart constructor.
data ContactGroupsDelete =
ContactGroupsDelete'
{ _cgdXgafv :: !(Maybe Xgafv)
, _cgdUploadProtocol :: !(Maybe Text)
, _cgdDeleteContacts :: !(Maybe Bool)
, _cgdResourceName :: !Text
, _cgdAccessToken :: !(Maybe Text)
, _cgdUploadType :: !(Maybe Text)
, _cgdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ContactGroupsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cgdXgafv'
--
-- * 'cgdUploadProtocol'
--
-- * 'cgdDeleteContacts'
--
-- * 'cgdResourceName'
--
-- * 'cgdAccessToken'
--
-- * 'cgdUploadType'
--
-- * 'cgdCallback'
contactGroupsDelete
:: Text -- ^ 'cgdResourceName'
-> ContactGroupsDelete
contactGroupsDelete pCgdResourceName_ =
ContactGroupsDelete'
{ _cgdXgafv = Nothing
, _cgdUploadProtocol = Nothing
, _cgdDeleteContacts = Nothing
, _cgdResourceName = pCgdResourceName_
, _cgdAccessToken = Nothing
, _cgdUploadType = Nothing
, _cgdCallback = Nothing
}
-- | V1 error format.
cgdXgafv :: Lens' ContactGroupsDelete (Maybe Xgafv)
cgdXgafv = lens _cgdXgafv (\ s a -> s{_cgdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cgdUploadProtocol :: Lens' ContactGroupsDelete (Maybe Text)
cgdUploadProtocol
= lens _cgdUploadProtocol
(\ s a -> s{_cgdUploadProtocol = a})
-- | Optional. Set to true to also delete the contacts in the specified
-- group.
cgdDeleteContacts :: Lens' ContactGroupsDelete (Maybe Bool)
cgdDeleteContacts
= lens _cgdDeleteContacts
(\ s a -> s{_cgdDeleteContacts = a})
-- | Required. The resource name of the contact group to delete.
cgdResourceName :: Lens' ContactGroupsDelete Text
cgdResourceName
= lens _cgdResourceName
(\ s a -> s{_cgdResourceName = a})
-- | OAuth access token.
cgdAccessToken :: Lens' ContactGroupsDelete (Maybe Text)
cgdAccessToken
= lens _cgdAccessToken
(\ s a -> s{_cgdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cgdUploadType :: Lens' ContactGroupsDelete (Maybe Text)
cgdUploadType
= lens _cgdUploadType
(\ s a -> s{_cgdUploadType = a})
-- | JSONP
cgdCallback :: Lens' ContactGroupsDelete (Maybe Text)
cgdCallback
= lens _cgdCallback (\ s a -> s{_cgdCallback = a})
instance GoogleRequest ContactGroupsDelete where
type Rs ContactGroupsDelete = Empty
type Scopes ContactGroupsDelete =
'["https://www.googleapis.com/auth/contacts"]
requestClient ContactGroupsDelete'{..}
= go _cgdResourceName _cgdXgafv _cgdUploadProtocol
_cgdDeleteContacts
_cgdAccessToken
_cgdUploadType
_cgdCallback
(Just AltJSON)
peopleService
where go
= buildClient
(Proxy :: Proxy ContactGroupsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-people/gen/Network/Google/Resource/People/ContactGroups/Delete.hs
|
mpl-2.0
| 5,171 | 0 | 16 | 1,168 | 777 | 453 | 324 | 113 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.KMS.GetKeyRotationStatus
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves a Boolean value that indicates whether key rotation is enabled for
-- the specified key.
--
-- <http://docs.aws.amazon.com/kms/latest/APIReference/API_GetKeyRotationStatus.html>
module Network.AWS.KMS.GetKeyRotationStatus
(
-- * Request
GetKeyRotationStatus
-- ** Request constructor
, getKeyRotationStatus
-- ** Request lenses
, gkrsKeyId
-- * Response
, GetKeyRotationStatusResponse
-- ** Response constructor
, getKeyRotationStatusResponse
-- ** Response lenses
, gkrsrKeyRotationEnabled
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.KMS.Types
import qualified GHC.Exts
newtype GetKeyRotationStatus = GetKeyRotationStatus
{ _gkrsKeyId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'GetKeyRotationStatus' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gkrsKeyId' @::@ 'Text'
--
getKeyRotationStatus :: Text -- ^ 'gkrsKeyId'
-> GetKeyRotationStatus
getKeyRotationStatus p1 = GetKeyRotationStatus
{ _gkrsKeyId = p1
}
-- | A unique identifier for the customer master key. This value can be a globally
-- unique identifier or the fully specified ARN to a key. Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012 Globally Unique Key ID Example - 12345678-1234-1234-123456789012
--
gkrsKeyId :: Lens' GetKeyRotationStatus Text
gkrsKeyId = lens _gkrsKeyId (\s a -> s { _gkrsKeyId = a })
newtype GetKeyRotationStatusResponse = GetKeyRotationStatusResponse
{ _gkrsrKeyRotationEnabled :: Maybe Bool
} deriving (Eq, Ord, Read, Show)
-- | 'GetKeyRotationStatusResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gkrsrKeyRotationEnabled' @::@ 'Maybe' 'Bool'
--
getKeyRotationStatusResponse :: GetKeyRotationStatusResponse
getKeyRotationStatusResponse = GetKeyRotationStatusResponse
{ _gkrsrKeyRotationEnabled = Nothing
}
-- | A Boolean value that specifies whether key rotation is enabled.
gkrsrKeyRotationEnabled :: Lens' GetKeyRotationStatusResponse (Maybe Bool)
gkrsrKeyRotationEnabled =
lens _gkrsrKeyRotationEnabled (\s a -> s { _gkrsrKeyRotationEnabled = a })
instance ToPath GetKeyRotationStatus where
toPath = const "/"
instance ToQuery GetKeyRotationStatus where
toQuery = const mempty
instance ToHeaders GetKeyRotationStatus
instance ToJSON GetKeyRotationStatus where
toJSON GetKeyRotationStatus{..} = object
[ "KeyId" .= _gkrsKeyId
]
instance AWSRequest GetKeyRotationStatus where
type Sv GetKeyRotationStatus = KMS
type Rs GetKeyRotationStatus = GetKeyRotationStatusResponse
request = post "GetKeyRotationStatus"
response = jsonResponse
instance FromJSON GetKeyRotationStatusResponse where
parseJSON = withObject "GetKeyRotationStatusResponse" $ \o -> GetKeyRotationStatusResponse
<$> o .:? "KeyRotationEnabled"
|
dysinger/amazonka
|
amazonka-kms/gen/Network/AWS/KMS/GetKeyRotationStatus.hs
|
mpl-2.0
| 4,035 | 0 | 9 | 801 | 446 | 272 | 174 | 56 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import Yesod
import Yesod.Core.Json
import Data.ByteString.Lazy
data HelloWorld = HelloWorld
mkYesod "HelloWorld" [parseRoutes|
/location LocUpdate POST
|]
instance Yesod HelloWorld
--getHomeR :: Handler Html
--getHomeR = defaultLayout [whamlet|Hello World!|]
postLocUpdate :: Handler Value
postLocUpdate = undefined
main :: IO ()
main = warp 3000 HelloWorld
|
igraves/relay-server
|
cruft/yesodtest.hs
|
agpl-3.0
| 515 | 0 | 6 | 121 | 80 | 46 | 34 | 13 | 1 |
module Kata where
findMissingLetter :: [Char] -> Char
findMissingLetter (c1 : cs@(c2 : _))
|succ c1 == c2 = findMissingLetter cs
|otherwise = succ c1
--
|
ice1000/OI-codes
|
codewars/1-100/find-the-missing-letter.hs
|
agpl-3.0
| 163 | 0 | 10 | 36 | 70 | 36 | 34 | 5 | 1 |
module ObjD.Link.Pointer (
linkPointerCall, linkPointerStatic
)where
import ObjD.Link.Struct
import ObjD.Link.Env
import ObjD.Link.DataType
import ObjD.Link.Call
import Ex.String
import qualified ObjD.Struct as D
linkPointerCall :: Env -> (D.Exp, Exp) -> D.Exp -> Maybe Exp
linkPointerCall env (_, leftExp) (D.Call "cast" Nothing [tp]) = Just $ Cast (case dataType env tp of
p@TPPointer{} -> p
p -> TPUnknown $ "Cast Pointer to non-pointer: " ++ show p) leftExp
linkPointerCall _ (_, leftExp) (D.Call "get" Nothing _) = Just $ Deferencing leftExp
linkPointerCall env (_, leftExp) (D.Call "get" (Just [(_, rel)]) _) = Just $ Deferencing $ MathOp Plus leftExp (envExprCompile env rel)
linkPointerCall env (_, leftExp) (D.Call "set" (Just [(_, value)]) _) = Just $ Set Nothing (Deferencing $ leftExp) (envExprCompile env value)
linkPointerCall env (_, leftExp) (D.Call "set" (Just [(_, rel), (_, value)]) _) =
Just $ Set Nothing (Deferencing $ MathOp Plus leftExp (envExprCompile env rel)) (envExprCompile env value)
linkPointerCall env (_, leftExp) c@(D.Call "free" Nothing _) = Just $ Dot leftExp $ exprCall env (Just $ exprDataType leftExp) c
linkPointerCall env (_, leftExp) c@(D.Call "copy" (Just [_]) _) = Just $ Dot leftExp $ exprCall env (Just $ exprDataType leftExp) c
linkPointerCall _ _ _ = Nothing
linkPointerStatic :: Env -> D.Exp -> Maybe Exp
linkPointerStatic env (D.Call "null" Nothing [tp]) = Just $ Null $ dataType env tp
linkPointerStatic _ _ = Nothing
|
antonzherdev/objd
|
src/ObjD/Link/Pointer.hs
|
lgpl-3.0
| 1,512 | 12 | 12 | 267 | 658 | 350 | 308 | 23 | 2 |
#!/usr/bin/env runhaskell
main = putStrLn "Hello World!"
|
pwittchen/learning-haskell
|
tasks/00_hello_world.hs
|
apache-2.0
| 57 | 0 | 5 | 7 | 10 | 5 | 5 | 1 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Hack3.Lens where
import Hack3
import Control.Lens
makeLenses ''Env
makeLenses ''Response
|
nfjinjing/hack3-lens
|
src/Hack3/Lens.hs
|
apache-2.0
| 133 | 0 | 6 | 19 | 31 | 16 | 15 | 6 | 0 |
{-# LANGUAGE CPP, TupleSections, BangPatterns, LambdaCase #-}
{-# OPTIONS_GHC -Wwarn #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Interface.Create
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Interface.Create (createInterface) where
import Documentation.Haddock.Doc (metaDocAppend)
import Haddock.Types
import Haddock.Options
import Haddock.GhcUtils
import Haddock.Utils
import Haddock.Convert
import Haddock.Interface.LexParseRn
import qualified Data.Map as M
import Data.Map (Map)
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Ord
import Control.Applicative
import Control.Arrow (second)
import Control.DeepSeq
import Control.Monad
import Data.Function (on)
import qualified Data.Foldable as F
import qualified Packages
import qualified Module
import qualified SrcLoc
import GHC
import HscTypes
import Name
import Bag
import RdrName
import TcRnTypes
import FastString (concatFS)
import qualified Outputable as O
-- | Use a 'TypecheckedModule' to produce an 'Interface'.
-- To do this, we need access to already processed modules in the topological
-- sort. That's what's in the 'IfaceMap'.
createInterface :: TypecheckedModule -> [Flag] -> IfaceMap -> InstIfaceMap -> ErrMsgGhc Interface
createInterface tm flags modMap instIfaceMap = do
let ms = pm_mod_summary . tm_parsed_module $ tm
mi = moduleInfo tm
L _ hsm = parsedSource tm
!safety = modInfoSafe mi
mdl = ms_mod ms
dflags = ms_hspp_opts ms
!instances = modInfoInstances mi
!fam_instances = md_fam_insts md
!exportedNames = modInfoExports mi
(TcGblEnv {tcg_rdr_env = gre, tcg_warns = warnings}, md) = tm_internals_ tm
-- The renamed source should always be available to us, but it's best
-- to be on the safe side.
(group_, mayExports, mayDocHeader) <-
case renamedSource tm of
Nothing -> do
liftErrMsg $ tell [ "Warning: Renamed source is not available." ]
return (emptyRnGroup, Nothing, Nothing)
Just (x, _, y, z) -> return (x, y, z)
opts0 <- liftErrMsg $ mkDocOpts (haddockOptions dflags) flags mdl
let opts
| Flag_IgnoreAllExports `elem` flags = OptIgnoreExports : opts0
| otherwise = opts0
(!info, mbDoc) <- liftErrMsg $ processModuleHeader dflags gre safety mayDocHeader
let declsWithDocs = topDecls group_
fixMap = mkFixMap group_
(decls, _) = unzip declsWithDocs
localInsts = filter (nameIsLocalOrFrom mdl) $ map getName instances
++ map getName fam_instances
-- Locations of all TH splices
splices = [ l | L l (SpliceD _) <- hsmodDecls hsm ]
maps@(!docMap, !argMap, !subMap, !declMap, _) =
mkMaps dflags gre localInsts declsWithDocs
let exports0 = fmap (reverse . map unLoc) mayExports
exports
| OptIgnoreExports `elem` opts = Nothing
| otherwise = exports0
warningMap = mkWarningMap dflags warnings gre exportedNames
let allWarnings = M.unions (warningMap : map ifaceWarningMap (M.elems modMap))
exportItems <- mkExportItems modMap mdl allWarnings gre exportedNames decls
maps fixMap splices exports instIfaceMap dflags
let !visibleNames = mkVisibleNames maps exportItems opts
-- Measure haddock documentation coverage.
let prunedExportItems0 = pruneExportItems exportItems
!haddockable = 1 + length exportItems -- module + exports
!haddocked = (if isJust mbDoc then 1 else 0) + length prunedExportItems0
!coverage = (haddockable, haddocked)
-- Prune the export list to just those declarations that have
-- documentation, if the 'prune' option is on.
let prunedExportItems'
| OptPrune `elem` opts = prunedExportItems0
| otherwise = exportItems
!prunedExportItems = seqList prunedExportItems' `seq` prunedExportItems'
let !aliases =
mkAliasMap dflags $ tm_renamed_source tm
modWarn = moduleWarning dflags gre warnings
return $! Interface {
ifaceMod = mdl
, ifaceOrigFilename = msHsFilePath ms
, ifaceInfo = info
, ifaceDoc = Documentation mbDoc modWarn
, ifaceRnDoc = Documentation Nothing Nothing
, ifaceOptions = opts
, ifaceDocMap = docMap
, ifaceArgMap = argMap
, ifaceRnDocMap = M.empty
, ifaceRnArgMap = M.empty
, ifaceExportItems = prunedExportItems
, ifaceRnExportItems = []
, ifaceExports = exportedNames
, ifaceVisibleExports = visibleNames
, ifaceDeclMap = declMap
, ifaceSubMap = subMap
, ifaceFixMap = fixMap
, ifaceModuleAliases = aliases
, ifaceInstances = instances
, ifaceFamInstances = fam_instances
, ifaceHaddockCoverage = coverage
, ifaceWarningMap = warningMap
}
mkAliasMap :: DynFlags -> Maybe RenamedSource -> M.Map Module ModuleName
mkAliasMap dflags mRenamedSource =
case mRenamedSource of
Nothing -> M.empty
Just (_,impDecls,_,_) ->
M.fromList $
mapMaybe (\(SrcLoc.L _ impDecl) -> do
alias <- ideclAs impDecl
return $
(lookupModuleDyn dflags
(fmap Module.fsToPackageKey $
fmap snd $ ideclPkgQual impDecl)
(case ideclName impDecl of SrcLoc.L _ name -> name),
alias))
impDecls
-- similar to GHC.lookupModule
lookupModuleDyn ::
DynFlags -> Maybe PackageKey -> ModuleName -> Module
lookupModuleDyn _ (Just pkgId) mdlName =
Module.mkModule pkgId mdlName
lookupModuleDyn dflags Nothing mdlName =
case Packages.lookupModuleInAllPackages dflags mdlName of
(m,_):_ -> m
[] -> Module.mkModule Module.mainPackageKey mdlName
-------------------------------------------------------------------------------
-- Warnings
-------------------------------------------------------------------------------
mkWarningMap :: DynFlags -> Warnings -> GlobalRdrEnv -> [Name] -> WarningMap
mkWarningMap dflags warnings gre exps = case warnings of
NoWarnings -> M.empty
WarnAll _ -> M.empty
WarnSome ws ->
let ws' = [ (n, w) | (occ, w) <- ws, elt <- lookupGlobalRdrEnv gre occ
, let n = gre_name elt, n `elem` exps ]
in M.fromList $ map (second $ parseWarning dflags gre) ws'
moduleWarning :: DynFlags -> GlobalRdrEnv -> Warnings -> Maybe (Doc Name)
moduleWarning _ _ NoWarnings = Nothing
moduleWarning _ _ (WarnSome _) = Nothing
moduleWarning dflags gre (WarnAll w) = Just $ parseWarning dflags gre w
parseWarning :: DynFlags -> GlobalRdrEnv -> WarningTxt -> Doc Name
parseWarning dflags gre w = force $ case w of
DeprecatedTxt _ msg -> format "Deprecated: " (concatFS $ map (snd . unLoc) msg)
WarningTxt _ msg -> format "Warning: " (concatFS $ map (snd . unLoc) msg)
where
format x xs = DocWarning . DocParagraph . DocAppend (DocString x)
. processDocString dflags gre $ HsDocString xs
-------------------------------------------------------------------------------
-- Doc options
--
-- Haddock options that are embedded in the source file
-------------------------------------------------------------------------------
mkDocOpts :: Maybe String -> [Flag] -> Module -> ErrMsgM [DocOption]
mkDocOpts mbOpts flags mdl = do
opts <- case mbOpts of
Just opts -> case words $ replace ',' ' ' opts of
[] -> tell ["No option supplied to DOC_OPTION/doc_option"] >> return []
xs -> liftM catMaybes (mapM parseOption xs)
Nothing -> return []
hm <- if Flag_HideModule (moduleString mdl) `elem` flags
then return $ OptHide : opts
else return opts
if Flag_ShowExtensions (moduleString mdl) `elem` flags
then return $ OptShowExtensions : hm
else return hm
parseOption :: String -> ErrMsgM (Maybe DocOption)
parseOption "hide" = return (Just OptHide)
parseOption "prune" = return (Just OptPrune)
parseOption "ignore-exports" = return (Just OptIgnoreExports)
parseOption "not-home" = return (Just OptNotHome)
parseOption "show-extensions" = return (Just OptShowExtensions)
parseOption other = tell ["Unrecognised option: " ++ other] >> return Nothing
--------------------------------------------------------------------------------
-- Maps
--------------------------------------------------------------------------------
type Maps = (DocMap Name, ArgMap Name, SubMap, DeclMap, InstMap)
-- | Create 'Maps' by looping through the declarations. For each declaration,
-- find its names, its subordinates, and its doc strings. Process doc strings
-- into 'Doc's.
mkMaps :: DynFlags
-> GlobalRdrEnv
-> [Name]
-> [(LHsDecl Name, [HsDocString])]
-> Maps
mkMaps dflags gre instances decls =
let (a, b, c, d) = unzip4 $ map mappings decls
in (f' $ map (nubBy ((==) `on` fst)) a , f b, f c, f d, instanceMap)
where
f :: (Ord a, Monoid b) => [[(a, b)]] -> Map a b
f = M.fromListWith (<>) . concat
f' :: [[(Name, MDoc Name)]] -> Map Name (MDoc Name)
f' = M.fromListWith metaDocAppend . concat
mappings :: (LHsDecl Name, [HsDocString])
-> ( [(Name, MDoc Name)]
, [(Name, Map Int (MDoc Name))]
, [(Name, [Name])]
, [(Name, [LHsDecl Name])]
)
mappings (ldecl, docStrs) =
let L l decl = ldecl
declDoc :: [HsDocString] -> Map Int HsDocString
-> (Maybe (MDoc Name), Map Int (MDoc Name))
declDoc strs m =
let doc' = processDocStrings dflags gre strs
m' = M.map (processDocStringParas dflags gre) m
in (doc', m')
(doc, args) = declDoc docStrs (typeDocs decl)
subs :: [(Name, [HsDocString], Map Int HsDocString)]
subs = subordinates instanceMap decl
(subDocs, subArgs) = unzip $ map (\(_, strs, m) -> declDoc strs m) subs
ns = names l decl
subNs = [ n | (n, _, _) <- subs ]
dm = [ (n, d) | (n, Just d) <- zip ns (repeat doc) ++ zip subNs subDocs ]
am = [ (n, args) | n <- ns ] ++ zip subNs subArgs
sm = [ (n, subNs) | n <- ns ]
cm = [ (n, [ldecl]) | n <- ns ++ subNs ]
in seqList ns `seq`
seqList subNs `seq`
doc `seq`
seqList subDocs `seq`
seqList subArgs `seq`
(dm, am, sm, cm)
instanceMap :: Map SrcSpan Name
instanceMap = M.fromList [ (getSrcSpan n, n) | n <- instances ]
names :: SrcSpan -> HsDecl Name -> [Name]
names l (InstD d) = maybeToList (M.lookup loc instanceMap) -- See note [2].
where loc = case d of
TyFamInstD _ -> l -- The CoAx's loc is the whole line, but only for TFs
_ -> getInstLoc d
names _ decl = getMainDeclBinder decl
-- Note [2]:
------------
-- We relate ClsInsts to InstDecls using the SrcSpans buried inside them.
-- That should work for normal user-written instances (from looking at GHC
-- sources). We can assume that commented instances are user-written.
-- This lets us relate Names (from ClsInsts) to comments (associated
-- with InstDecls).
--------------------------------------------------------------------------------
-- Declarations
--------------------------------------------------------------------------------
-- | Get all subordinate declarations inside a declaration, and their docs.
subordinates :: InstMap -> HsDecl Name -> [(Name, [HsDocString], Map Int HsDocString)]
subordinates instMap decl = case decl of
InstD (ClsInstD d) -> do
DataFamInstDecl { dfid_tycon = L l _
, dfid_defn = def } <- unLoc <$> cid_datafam_insts d
[ (n, [], M.empty) | Just n <- [M.lookup l instMap] ] ++ dataSubs def
InstD (DataFamInstD d) -> dataSubs (dfid_defn d)
TyClD d | isClassDecl d -> classSubs d
| isDataDecl d -> dataSubs (tcdDataDefn d)
_ -> []
where
classSubs dd = [ (name, doc, typeDocs d) | (L _ d, doc) <- classDecls dd
, name <- getMainDeclBinder d, not (isValD d)
]
dataSubs dd = constrs ++ fields
where
cons = map unL $ (dd_cons dd)
constrs = [ (unL cname, maybeToList $ fmap unL $ con_doc c, M.empty)
| c <- cons, cname <- con_names c ]
fields = [ (unL n, maybeToList $ fmap unL doc, M.empty)
| RecCon flds <- map con_details cons
, L _ (ConDeclField ns _ doc) <- (unLoc flds)
, n <- ns ]
-- | Extract function argument docs from inside types.
typeDocs :: HsDecl Name -> Map Int HsDocString
typeDocs d =
let docs = go 0 in
case d of
SigD (TypeSig _ ty _) -> docs (unLoc ty)
SigD (PatSynSig _ _ req prov ty) ->
let allTys = ty : concat [ unLoc req, unLoc prov ]
in F.foldMap (docs . unLoc) allTys
ForD (ForeignImport _ ty _ _) -> docs (unLoc ty)
TyClD (SynDecl { tcdRhs = ty }) -> docs (unLoc ty)
_ -> M.empty
where
go n (HsForAllTy _ _ _ _ ty) = go n (unLoc ty)
go n (HsFunTy (L _ (HsDocTy _ (L _ x))) (L _ ty)) = M.insert n x $ go (n+1) ty
go n (HsFunTy _ ty) = go (n+1) (unLoc ty)
go n (HsDocTy _ (L _ doc)) = M.singleton n doc
go _ _ = M.empty
-- | All the sub declarations of a class (that we handle), ordered by
-- source location, with documentation attached if it exists.
classDecls :: TyClDecl Name -> [(LHsDecl Name, [HsDocString])]
classDecls class_ = filterDecls . collectDocs . sortByLoc $ decls
where
decls = docs ++ defs ++ sigs ++ ats
docs = mkDecls tcdDocs DocD class_
defs = mkDecls (bagToList . tcdMeths) ValD class_
sigs = mkDecls tcdSigs SigD class_
ats = mkDecls tcdATs (TyClD . FamDecl) class_
-- | The top-level declarations of a module that we care about,
-- ordered by source location, with documentation attached if it exists.
topDecls :: HsGroup Name -> [(LHsDecl Name, [HsDocString])]
topDecls = filterClasses . filterDecls . collectDocs . sortByLoc . ungroup
-- | Extract a map of fixity declarations only
mkFixMap :: HsGroup Name -> FixMap
mkFixMap group_ = M.fromList [ (n,f)
| L _ (FixitySig ns f) <- hs_fixds group_,
L _ n <- ns ]
-- | Take all declarations except pragmas, infix decls, rules from an 'HsGroup'.
ungroup :: HsGroup Name -> [LHsDecl Name]
ungroup group_ =
mkDecls (tyClGroupConcat . hs_tyclds) TyClD group_ ++
mkDecls hs_derivds DerivD group_ ++
mkDecls hs_defds DefD group_ ++
mkDecls hs_fords ForD group_ ++
mkDecls hs_docs DocD group_ ++
mkDecls hs_instds InstD group_ ++
mkDecls (typesigs . hs_valds) SigD group_ ++
mkDecls (valbinds . hs_valds) ValD group_
where
typesigs (ValBindsOut _ sigs) = filter isVanillaLSig sigs
typesigs _ = error "expected ValBindsOut"
valbinds (ValBindsOut binds _) = concatMap bagToList . snd . unzip $ binds
valbinds _ = error "expected ValBindsOut"
-- | Take a field of declarations from a data structure and create HsDecls
-- using the given constructor
mkDecls :: (a -> [Located b]) -> (b -> c) -> a -> [Located c]
mkDecls field con struct = [ L loc (con decl) | L loc decl <- field struct ]
-- | Sort by source location
sortByLoc :: [Located a] -> [Located a]
sortByLoc = sortBy (comparing getLoc)
--------------------------------------------------------------------------------
-- Filtering of declarations
--
-- We filter out declarations that we don't intend to handle later.
--------------------------------------------------------------------------------
-- | Filter out declarations that we don't handle in Haddock
filterDecls :: [(LHsDecl a, doc)] -> [(LHsDecl a, doc)]
filterDecls = filter (isHandled . unL . fst)
where
isHandled (ForD (ForeignImport {})) = True
isHandled (TyClD {}) = True
isHandled (InstD {}) = True
isHandled (SigD d) = isVanillaLSig (reL d)
isHandled (ValD _) = True
-- we keep doc declarations to be able to get at named docs
isHandled (DocD _) = True
isHandled _ = False
-- | Go through all class declarations and filter their sub-declarations
filterClasses :: [(LHsDecl a, doc)] -> [(LHsDecl a, doc)]
filterClasses decls = [ if isClassD d then (L loc (filterClass d), doc) else x
| x@(L loc d, doc) <- decls ]
where
filterClass (TyClD c) =
TyClD $ c { tcdSigs = filter (liftA2 (||) isVanillaLSig isMinimalLSig) $ tcdSigs c }
filterClass _ = error "expected TyClD"
--------------------------------------------------------------------------------
-- Collect docs
--
-- To be able to attach the right Haddock comment to the right declaration,
-- we sort the declarations by their SrcLoc and "collect" the docs for each
-- declaration.
--------------------------------------------------------------------------------
-- | Collect docs and attach them to the right declarations.
collectDocs :: [LHsDecl a] -> [(LHsDecl a, [HsDocString])]
collectDocs = go Nothing []
where
go Nothing _ [] = []
go (Just prev) docs [] = finished prev docs []
go prev docs (L _ (DocD (DocCommentNext str)) : ds)
| Nothing <- prev = go Nothing (str:docs) ds
| Just decl <- prev = finished decl docs (go Nothing [str] ds)
go prev docs (L _ (DocD (DocCommentPrev str)) : ds) = go prev (str:docs) ds
go Nothing docs (d:ds) = go (Just d) docs ds
go (Just prev) docs (d:ds) = finished prev docs (go (Just d) [] ds)
finished decl docs rest = (decl, reverse docs) : rest
-- | Build the list of items that will become the documentation, from the
-- export list. At this point, the list of ExportItems is in terms of
-- original names.
--
-- We create the export items even if the module is hidden, since they
-- might be useful when creating the export items for other modules.
mkExportItems
:: IfaceMap
-> Module -- this module
-> WarningMap
-> GlobalRdrEnv
-> [Name] -- exported names (orig)
-> [LHsDecl Name]
-> Maps
-> FixMap
-> [SrcSpan] -- splice locations
-> Maybe [IE Name]
-> InstIfaceMap
-> DynFlags
-> ErrMsgGhc [ExportItem Name]
mkExportItems
modMap thisMod warnings gre exportedNames decls
maps@(docMap, argMap, subMap, declMap, instMap) fixMap splices optExports instIfaceMap dflags =
case optExports of
Nothing -> fullModuleContents dflags warnings gre maps fixMap splices decls
Just exports -> liftM concat $ mapM lookupExport exports
where
lookupExport (IEVar (L _ x)) = declWith x
lookupExport (IEThingAbs (L _ t)) = declWith t
lookupExport (IEThingAll (L _ t)) = declWith t
lookupExport (IEThingWith (L _ t) _) = declWith t
lookupExport (IEModuleContents (L _ m)) =
moduleExports thisMod m dflags warnings gre exportedNames decls modMap instIfaceMap maps fixMap splices
lookupExport (IEGroup lev docStr) = return $
return . ExportGroup lev "" $ processDocString dflags gre docStr
lookupExport (IEDoc docStr) = return $
return . ExportDoc $ processDocStringParas dflags gre docStr
lookupExport (IEDocNamed str) = liftErrMsg $
findNamedDoc str [ unL d | d <- decls ] >>= return . \case
Nothing -> []
Just doc -> return . ExportDoc $ processDocStringParas dflags gre doc
declWith :: Name -> ErrMsgGhc [ ExportItem Name ]
declWith t =
case findDecl t of
([L l (ValD _)], (doc, _)) -> do
-- Top-level binding without type signature
export <- hiValExportItem dflags t doc (l `elem` splices) $ M.lookup t fixMap
return [export]
(ds, docs_) | decl : _ <- filter (not . isValD . unLoc) ds ->
let declNames = getMainDeclBinder (unL decl)
in case () of
_
-- temp hack: we filter out separately exported ATs, since we haven't decided how
-- to handle them yet. We should really give an warning message also, and filter the
-- name out in mkVisibleNames...
| t `elem` declATs (unL decl) -> return []
-- We should not show a subordinate by itself if any of its
-- parents is also exported. See note [1].
| t `notElem` declNames,
Just p <- find isExported (parents t $ unL decl) ->
do liftErrMsg $ tell [
"Warning: " ++ moduleString thisMod ++ ": " ++
pretty dflags (nameOccName t) ++ " is exported separately but " ++
"will be documented under " ++ pretty dflags (nameOccName p) ++
". Consider exporting it together with its parent(s)" ++
" for code clarity." ]
return []
-- normal case
| otherwise -> case decl of
-- A single signature might refer to many names, but we
-- create an export item for a single name only. So we
-- modify the signature to contain only that single name.
L loc (SigD sig) ->
-- fromJust is safe since we already checked in guards
-- that 't' is a name declared in this declaration.
let newDecl = L loc . SigD . fromJust $ filterSigNames (== t) sig
in return [ mkExportDecl t newDecl docs_ ]
L loc (TyClD cl@ClassDecl{}) -> do
mdef <- liftGhcToErrMsgGhc $ minimalDef t
let sig = maybeToList $ fmap (noLoc . MinimalSig mempty . fmap noLoc) mdef
return [ mkExportDecl t
(L loc $ TyClD cl { tcdSigs = sig ++ tcdSigs cl }) docs_ ]
_ -> return [ mkExportDecl t decl docs_ ]
-- Declaration from another package
([], _) -> do
mayDecl <- hiDecl dflags t
case mayDecl of
Nothing -> return [ ExportNoDecl t [] ]
Just decl ->
-- We try to get the subs and docs
-- from the installed .haddock file for that package.
case M.lookup (nameModule t) instIfaceMap of
Nothing -> do
liftErrMsg $ tell
["Warning: Couldn't find .haddock for export " ++ pretty dflags t]
let subs_ = [ (n, noDocForDecl) | (n, _, _) <- subordinates instMap (unLoc decl) ]
return [ mkExportDecl t decl (noDocForDecl, subs_) ]
Just iface ->
return [ mkExportDecl t decl (lookupDocs t warnings (instDocMap iface) (instArgMap iface) (instSubMap iface)) ]
_ -> return []
mkExportDecl :: Name -> LHsDecl Name -> (DocForDecl Name, [(Name, DocForDecl Name)]) -> ExportItem Name
mkExportDecl name decl (doc, subs) = decl'
where
decl' = ExportDecl (restrictTo sub_names (extractDecl name mdl decl)) doc subs' [] fixities False
mdl = nameModule name
subs' = filter (isExported . fst) subs
sub_names = map fst subs'
fixities = [ (n, f) | n <- name:sub_names, Just f <- [M.lookup n fixMap] ]
isExported = (`elem` exportedNames)
findDecl :: Name -> ([LHsDecl Name], (DocForDecl Name, [(Name, DocForDecl Name)]))
findDecl n
| m == thisMod, Just ds <- M.lookup n declMap =
(ds, lookupDocs n warnings docMap argMap subMap)
| Just iface <- M.lookup m modMap, Just ds <- M.lookup n (ifaceDeclMap iface) =
(ds, lookupDocs n warnings (ifaceDocMap iface) (ifaceArgMap iface) (ifaceSubMap iface))
| otherwise = ([], (noDocForDecl, []))
where
m = nameModule n
hiDecl :: DynFlags -> Name -> ErrMsgGhc (Maybe (LHsDecl Name))
hiDecl dflags t = do
mayTyThing <- liftGhcToErrMsgGhc $ lookupName t
case mayTyThing of
Nothing -> do
liftErrMsg $ tell ["Warning: Not found in environment: " ++ pretty dflags t]
return Nothing
Just x -> case tyThingToLHsDecl x of
Left m -> liftErrMsg (tell [bugWarn m]) >> return Nothing
Right (m, t') -> liftErrMsg (tell $ map bugWarn m)
>> return (Just $ noLoc t')
where
warnLine x = O.text "haddock-bug:" O.<+> O.text x O.<>
O.comma O.<+> O.quotes (O.ppr t) O.<+>
O.text "-- Please report this on Haddock issue tracker!"
bugWarn = O.showSDoc dflags . warnLine
hiValExportItem :: DynFlags -> Name -> DocForDecl Name -> Bool -> Maybe Fixity -> ErrMsgGhc (ExportItem Name)
hiValExportItem dflags name doc splice fixity = do
mayDecl <- hiDecl dflags name
case mayDecl of
Nothing -> return (ExportNoDecl name [])
Just decl -> return (ExportDecl decl doc [] [] fixities splice)
where
fixities = case fixity of
Just f -> [(name, f)]
Nothing -> []
-- | Lookup docs for a declaration from maps.
lookupDocs :: Name -> WarningMap -> DocMap Name -> ArgMap Name -> SubMap
-> (DocForDecl Name, [(Name, DocForDecl Name)])
lookupDocs n warnings docMap argMap subMap =
let lookupArgDoc x = M.findWithDefault M.empty x argMap in
let doc = (lookupDoc n, lookupArgDoc n) in
let subs = M.findWithDefault [] n subMap in
let subDocs = [ (s, (lookupDoc s, lookupArgDoc s)) | s <- subs ] in
(doc, subDocs)
where
lookupDoc name = Documentation (M.lookup name docMap) (M.lookup name warnings)
-- | Return all export items produced by an exported module. That is, we're
-- interested in the exports produced by \"module B\" in such a scenario:
--
-- > module A (module B) where
-- > import B (...) hiding (...)
--
-- There are three different cases to consider:
--
-- 1) B is hidden, in which case we return all its exports that are in scope in A.
-- 2) B is visible, but not all its exports are in scope in A, in which case we
-- only return those that are.
-- 3) B is visible and all its exports are in scope, in which case we return
-- a single 'ExportModule' item.
moduleExports :: Module -- ^ Module A
-> ModuleName -- ^ The real name of B, the exported module
-> DynFlags -- ^ The flags used when typechecking A
-> WarningMap
-> GlobalRdrEnv -- ^ The renaming environment used for A
-> [Name] -- ^ All the exports of A
-> [LHsDecl Name] -- ^ All the declarations in A
-> IfaceMap -- ^ Already created interfaces
-> InstIfaceMap -- ^ Interfaces in other packages
-> Maps
-> FixMap
-> [SrcSpan] -- ^ Locations of all TH splices
-> ErrMsgGhc [ExportItem Name] -- ^ Resulting export items
moduleExports thisMod expMod dflags warnings gre _exports decls ifaceMap instIfaceMap maps fixMap splices
| m == thisMod = fullModuleContents dflags warnings gre maps fixMap splices decls
| otherwise =
case M.lookup m ifaceMap of
Just iface
| OptHide `elem` ifaceOptions iface -> return (ifaceExportItems iface)
| otherwise -> return [ ExportModule m ]
Nothing -> -- We have to try to find it in the installed interfaces
-- (external packages).
case M.lookup expMod (M.mapKeys moduleName instIfaceMap) of
Just iface -> return [ ExportModule (instMod iface) ]
Nothing -> do
liftErrMsg $
tell ["Warning: " ++ pretty dflags thisMod ++ ": Could not find " ++
"documentation for exported module: " ++ pretty dflags expMod]
return []
where
m = mkModule packageKey expMod
packageKey = modulePackageKey thisMod
-- Note [1]:
------------
-- It is unnecessary to document a subordinate by itself at the top level if
-- any of its parents is also documented. Furthermore, if the subordinate is a
-- record field or a class method, documenting it under its parent
-- indicates its special status.
--
-- A user might expect that it should show up separately, so we issue a
-- warning. It's a fine opportunity to also tell the user she might want to
-- export the subordinate through the parent export item for clarity.
--
-- The code removes top-level subordinates also when the parent is exported
-- through a 'module' export. I think that is fine.
--
-- (For more information, see Trac #69)
fullModuleContents :: DynFlags -> WarningMap -> GlobalRdrEnv -> Maps -> FixMap -> [SrcSpan]
-> [LHsDecl Name] -> ErrMsgGhc [ExportItem Name]
fullModuleContents dflags warnings gre (docMap, argMap, subMap, declMap, instMap) fixMap splices decls =
liftM catMaybes $ mapM mkExportItem (expandSig decls)
where
-- A type signature can have multiple names, like:
-- foo, bar :: Types..
--
-- We go through the list of declarations and expand type signatures, so
-- that every type signature has exactly one name!
expandSig :: [LHsDecl name] -> [LHsDecl name]
expandSig = foldr f []
where
f :: LHsDecl name -> [LHsDecl name] -> [LHsDecl name]
f (L l (SigD (TypeSig names t nwcs))) xs = foldr (\n acc -> L l (SigD (TypeSig [n] t nwcs)) : acc) xs names
f (L l (SigD (GenericSig names t))) xs = foldr (\n acc -> L l (SigD (GenericSig [n] t)) : acc) xs names
f x xs = x : xs
mkExportItem :: LHsDecl Name -> ErrMsgGhc (Maybe (ExportItem Name))
mkExportItem (L _ (DocD (DocGroup lev docStr))) = do
return . Just . ExportGroup lev "" $ processDocString dflags gre docStr
mkExportItem (L _ (DocD (DocCommentNamed _ docStr))) = do
return . Just . ExportDoc $ processDocStringParas dflags gre docStr
mkExportItem (L l (ValD d))
| name:_ <- collectHsBindBinders d, Just [L _ (ValD _)] <- M.lookup name declMap =
-- Top-level binding without type signature.
let (doc, _) = lookupDocs name warnings docMap argMap subMap in
fmap Just (hiValExportItem dflags name doc (l `elem` splices) $ M.lookup name fixMap)
| otherwise = return Nothing
mkExportItem decl@(L l (InstD d))
| Just name <- M.lookup (getInstLoc d) instMap =
let (doc, subs) = lookupDocs name warnings docMap argMap subMap in
return $ Just (ExportDecl decl doc subs [] (fixities name subs) (l `elem` splices))
mkExportItem (L l (TyClD cl@ClassDecl{ tcdLName = L _ name, tcdSigs = sigs })) = do
mdef <- liftGhcToErrMsgGhc $ minimalDef name
let sig = maybeToList $ fmap (noLoc . MinimalSig mempty . fmap noLoc) mdef
expDecl (L l (TyClD cl { tcdSigs = sig ++ sigs })) l name
mkExportItem decl@(L l d)
| name:_ <- getMainDeclBinder d = expDecl decl l name
| otherwise = return Nothing
fixities name subs = [ (n,f) | n <- name : map fst subs
, Just f <- [M.lookup n fixMap] ]
expDecl decl l name = return $ Just (ExportDecl decl doc subs [] (fixities name subs) (l `elem` splices))
where (doc, subs) = lookupDocs name warnings docMap argMap subMap
-- | Sometimes the declaration we want to export is not the "main" declaration:
-- it might be an individual record selector or a class method. In these
-- cases we have to extract the required declaration (and somehow cobble
-- together a type signature for it...).
extractDecl :: Name -> Module -> LHsDecl Name -> LHsDecl Name
extractDecl name mdl decl
| name `elem` getMainDeclBinder (unLoc decl) = decl
| otherwise =
case unLoc decl of
TyClD d@ClassDecl {} ->
let matches = [ sig | sig <- tcdSigs d, name `elem` sigName sig,
isVanillaLSig sig ] -- TODO: document fixity
in case matches of
[s0] -> let (n, tyvar_names) = (tcdName d, getTyVars d)
L pos sig = extractClassDecl n tyvar_names s0
in L pos (SigD sig)
_ -> error "internal: extractDecl (ClassDecl)"
TyClD d@DataDecl {} ->
let (n, tyvar_names) = (tcdName d, map toTypeNoLoc $ getTyVars d)
in SigD <$> extractRecSel name mdl n tyvar_names (dd_cons (tcdDataDefn d))
InstD (DataFamInstD DataFamInstDecl { dfid_tycon = L _ n
, dfid_pats = HsWB { hswb_cts = tys }
, dfid_defn = defn }) ->
SigD <$> extractRecSel name mdl n tys (dd_cons defn)
InstD (ClsInstD ClsInstDecl { cid_datafam_insts = insts }) ->
let matches = [ d | L _ d <- insts
, L _ ConDecl { con_details = RecCon rec } <- dd_cons (dfid_defn d)
, ConDeclField { cd_fld_names = ns } <- map unLoc (unLoc rec)
, L _ n <- ns
, n == name
]
in case matches of
[d0] -> extractDecl name mdl (noLoc . InstD $ DataFamInstD d0)
_ -> error "internal: extractDecl (ClsInstD)"
_ -> error "internal: extractDecl"
where
getTyVars = hsLTyVarLocNames . tyClDeclTyVars
toTypeNoLoc :: Located Name -> LHsType Name
toTypeNoLoc = noLoc . HsTyVar . unLoc
extractClassDecl :: Name -> [Located Name] -> LSig Name -> LSig Name
extractClassDecl c tvs0 (L pos (TypeSig lname ltype _)) = case ltype of
L _ (HsForAllTy expl _ tvs (L _ preds) ty) ->
L pos (TypeSig lname (noLoc (HsForAllTy expl Nothing tvs (lctxt preds) ty)) [])
_ -> L pos (TypeSig lname (noLoc (HsForAllTy Implicit Nothing emptyHsQTvs (lctxt []) ltype)) [])
where
lctxt = noLoc . ctxt
ctxt preds = nlHsTyConApp c (map toTypeNoLoc tvs0) : preds
extractClassDecl _ _ _ = error "extractClassDecl: unexpected decl"
extractRecSel :: Name -> Module -> Name -> [LHsType Name] -> [LConDecl Name]
-> LSig Name
extractRecSel _ _ _ _ [] = error "extractRecSel: selector not found"
extractRecSel nm mdl t tvs (L _ con : rest) =
case con_details con of
RecCon (L _ fields) | ((n,L _ (ConDeclField _nn ty _)) : _) <- matching_fields fields ->
L (getLoc n) (TypeSig [noLoc nm] (noLoc (HsFunTy data_ty (getBangType ty))) [])
_ -> extractRecSel nm mdl t tvs rest
where
matching_fields flds = [ (n,f) | f@(L _ (ConDeclField ns _ _)) <- flds, n <- ns, unLoc n == nm ]
data_ty
| ResTyGADT _ ty <- con_res con = ty
| otherwise = foldl' (\x y -> noLoc (HsAppTy x y)) (noLoc (HsTyVar t)) tvs
-- | Keep export items with docs.
pruneExportItems :: [ExportItem Name] -> [ExportItem Name]
pruneExportItems = filter hasDoc
where
hasDoc (ExportDecl{expItemMbDoc = (Documentation d _, _)}) = isJust d
hasDoc _ = True
mkVisibleNames :: Maps -> [ExportItem Name] -> [DocOption] -> [Name]
mkVisibleNames (_, _, _, _, instMap) exports opts
| OptHide `elem` opts = []
| otherwise = let ns = concatMap exportName exports
in seqList ns `seq` ns
where
exportName e@ExportDecl {} = name ++ subs
where subs = map fst (expItemSubDocs e)
name = case unLoc $ expItemDecl e of
InstD d -> maybeToList $ M.lookup (getInstLoc d) instMap
decl -> getMainDeclBinder decl
exportName ExportNoDecl {} = [] -- we don't count these as visible, since
-- we don't want links to go to them.
exportName _ = []
seqList :: [a] -> ()
seqList [] = ()
seqList (x : xs) = x `seq` seqList xs
-- | Find a stand-alone documentation comment by its name.
findNamedDoc :: String -> [HsDecl Name] -> ErrMsgM (Maybe HsDocString)
findNamedDoc name = search
where
search [] = do
tell ["Cannot find documentation for: $" ++ name]
return Nothing
search (DocD (DocCommentNamed name' doc) : rest)
| name == name' = return (Just doc)
| otherwise = search rest
search (_other_decl : rest) = search rest
|
mrBliss/haddock
|
haddock-api/src/Haddock/Interface/Create.hs
|
bsd-2-clause
| 36,249 | 0 | 31 | 9,953 | 10,698 | 5,453 | 5,245 | 584 | 17 |
import qualified Data.Sequence as Seq
import LogicGrowsOnTrees (exploreTree)
import LogicGrowsOnTrees.Examples.Queens (nqueensUsingBitsSolutions)
main = print . exploreTree . fmap Seq.singleton . nqueensUsingBitsSolutions $ 5
|
gcross/LogicGrowsOnTrees
|
LogicGrowsOnTrees/tutorial/tutorial-2.hs
|
bsd-2-clause
| 226 | 0 | 9 | 23 | 54 | 31 | 23 | 4 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies,
GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-| All RPC calls are run within this monad.
It encapsulates:
* IO operations,
* failures,
* working with the daemon state.
Code that is specific either to the configuration or the lock management, should
go into their corresponding dedicated modules.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.WConfd.Monad
( DaemonHandle
, dhConfigPath
, mkDaemonHandle
, WConfdMonadInt
, runWConfdMonadInt
, WConfdMonad
, daemonHandle
, modifyConfigState
, modifyConfigStateWithImmediate
, forceConfigStateDistribution
, readConfigState
, modifyConfigDataErr_
, modifyLockWaiting
, modifyLockWaiting_
, readLockWaiting
, readLockAllocation
, modifyTempResState
, modifyTempResStateErr
, readTempResState
) where
import Control.Applicative
import Control.Arrow ((&&&), second)
import Control.Monad
import Control.Monad.Base
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Control
import Data.Functor.Identity
import Data.IORef.Lifted
import Data.Monoid (Any(..))
import qualified Data.Set as S
import Data.Tuple (swap)
import System.Time (getClockTime)
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Errors
import Ganeti.JQueue (notifyJob)
import Ganeti.Lens
import Ganeti.Locking.Allocation (LockAllocation)
import Ganeti.Locking.Locks
import Ganeti.Locking.Waiting (getAllocation)
import Ganeti.Logging
import Ganeti.Logging.WriterLog
import Ganeti.Objects (ConfigData)
import Ganeti.Utils.AsyncWorker
import Ganeti.Utils.IORef
import Ganeti.WConfd.ConfigState
import Ganeti.WConfd.TempRes
-- * Pure data types used in the monad
-- | The state of the daemon, capturing both the configuration state and the
-- locking state.
data DaemonState = DaemonState
{ dsConfigState :: ConfigState
, dsLockWaiting :: GanetiLockWaiting
, dsTempRes :: TempResState
}
$(makeCustomLenses ''DaemonState)
data DaemonHandle = DaemonHandle
{ dhDaemonState :: IORef DaemonState -- ^ The current state of the daemon
, dhConfigPath :: FilePath -- ^ The configuration file path
-- all static information that doesn't change during the life-time of the
-- daemon should go here;
-- all IDs of threads that do asynchronous work should probably also go here
, dhSaveConfigWorker :: AsyncWorker Any ()
, dhSaveLocksWorker :: AsyncWorker () ()
, dhSaveTempResWorker :: AsyncWorker () ()
}
mkDaemonHandle :: FilePath
-> ConfigState
-> GanetiLockWaiting
-> TempResState
-> (IO ConfigState -> [AsyncWorker () ()]
-> ResultG (AsyncWorker Any ()))
-- ^ A function that creates a worker that asynchronously
-- saves the configuration to the master file.
-> (IO ConfigState -> ResultG (AsyncWorker () ()))
-- ^ A function that creates a worker that asynchronously
-- distributes the configuration to master candidates
-> (IO ConfigState -> ResultG (AsyncWorker () ()))
-- ^ A function that creates a worker that asynchronously
-- distributes SSConf to nodes
-> (IO GanetiLockWaiting -> ResultG (AsyncWorker () ()))
-- ^ A function that creates a worker that asynchronously
-- saves the lock allocation state.
-> (IO TempResState -> ResultG (AsyncWorker () ()))
-- ^ A function that creates a worker that asynchronously
-- saves the temporary reservations state.
-> ResultG DaemonHandle
mkDaemonHandle cpath cstat lstat trstat
saveWorkerFn distMCsWorkerFn distSSConfWorkerFn
saveLockWorkerFn saveTempResWorkerFn = do
ds <- newIORef $ DaemonState cstat lstat trstat
let readConfigIO = dsConfigState `liftM` readIORef ds :: IO ConfigState
ssconfWorker <- distSSConfWorkerFn readConfigIO
distMCsWorker <- distMCsWorkerFn readConfigIO
saveWorker <- saveWorkerFn readConfigIO [ distMCsWorker
, ssconfWorker ]
saveLockWorker <- saveLockWorkerFn $ dsLockWaiting `liftM` readIORef ds
saveTempResWorker <- saveTempResWorkerFn $ dsTempRes `liftM` readIORef ds
return $ DaemonHandle ds cpath saveWorker saveLockWorker saveTempResWorker
-- * The monad and its instances
-- | A type alias for easier referring to the actual content of the monad
-- when implementing its instances.
type WConfdMonadIntType = ReaderT DaemonHandle IO
-- | The internal part of the monad without error handling.
newtype WConfdMonadInt a = WConfdMonadInt
{ getWConfdMonadInt :: WConfdMonadIntType a }
deriving (Functor, Applicative, Monad, MonadIO, MonadBase IO, MonadLog)
instance MonadBaseControl IO WConfdMonadInt where
newtype StM WConfdMonadInt b = StMWConfdMonadInt
{ runStMWConfdMonadInt :: StM WConfdMonadIntType b }
liftBaseWith f = WConfdMonadInt . liftBaseWith
$ \r -> f (liftM StMWConfdMonadInt . r . getWConfdMonadInt)
restoreM = WConfdMonadInt . restoreM . runStMWConfdMonadInt
-- | Runs the internal part of the WConfdMonad monad on a given daemon
-- handle.
runWConfdMonadInt :: WConfdMonadInt a -> DaemonHandle -> IO a
runWConfdMonadInt (WConfdMonadInt k) = runReaderT k
-- | The complete monad with error handling.
type WConfdMonad = ResultT GanetiException WConfdMonadInt
-- | A pure monad that logs and reports errors used for atomic modifications.
type AtomicModifyMonad a = ResultT GanetiException WriterLog a
-- * Basic functions in the monad
-- | Returns the daemon handle.
daemonHandle :: WConfdMonad DaemonHandle
daemonHandle = lift . WConfdMonadInt $ ask
-- | Returns the current configuration, given a handle
readConfigState :: WConfdMonad ConfigState
readConfigState = liftM dsConfigState . readIORef . dhDaemonState
=<< daemonHandle
-- | Atomically modifies the configuration state in the WConfdMonad
-- with a computation that can possibly fail; immediately afterwards,
-- while config write is still going on, do the followup action. Return
-- only after replication is finished.
modifyConfigStateErrWithImmediate
:: (TempResState -> ConfigState -> AtomicModifyMonad (a, ConfigState))
-> WConfdMonad ()
-> WConfdMonad a
modifyConfigStateErrWithImmediate f immediateFollowup = do
dh <- daemonHandle
now <- liftIO getClockTime
-- If the configuration is modified, we also bump its serial number.
-- In order to determine if we need to save, we report if it's modified
-- as well as if it needs to be distributed synchronously.
let unpackResult cs (r, cs')
| cs /= cs' = ( (r, True, needsFullDist cs cs')
, over csConfigDataL (bumpSerial now) cs' )
| otherwise = ((r, False, False), cs')
let modCS ds@(DaemonState { dsTempRes = tr }) =
mapMOf2 dsConfigStateL (\cs -> liftM (unpackResult cs) (f tr cs)) ds
(r, modified, distSync) <- atomicModifyIORefErrLog (dhDaemonState dh)
(liftM swap . modCS)
if modified
then if distSync
then do
logDebug "Triggering config write\
\ together with full synchronous distribution"
res <- liftBase . triggerWithResult (Any True) $ dhSaveConfigWorker dh
immediateFollowup
wait res
logDebug "Config write and distribution finished"
else do
-- trigger the config. saving worker and wait for it
logDebug "Triggering config write\
\ and asynchronous distribution"
res <- liftBase . triggerWithResult (Any False) $ dhSaveConfigWorker dh
immediateFollowup
wait res
logDebug "Config writer finished with local task"
else
immediateFollowup
return r
-- | Atomically modifies the configuration state in the WConfdMonad
-- with a computation that can possibly fail.
modifyConfigStateErr
:: (TempResState -> ConfigState -> AtomicModifyMonad (a, ConfigState))
-> WConfdMonad a
modifyConfigStateErr = flip modifyConfigStateErrWithImmediate (return ())
-- | Atomically modifies the configuration state in the WConfdMonad
-- with a computation that can possibly fail.
modifyConfigStateErr_
:: (TempResState -> ConfigState -> AtomicModifyMonad ConfigState)
-> WConfdMonad ()
modifyConfigStateErr_ f = modifyConfigStateErr ((liftM ((,) ()) .) . f)
-- | Atomically modifies the configuration state in the WConfdMonad.
modifyConfigState :: (ConfigState -> (a, ConfigState)) -> WConfdMonad a
modifyConfigState f = modifyConfigStateErr ((return .) . const f)
-- | Atomically modifies the configuration state in WConfdMonad; immediately
-- afterwards (while the config write-out is not necessarily finished) do
-- another acation.
modifyConfigStateWithImmediate :: (ConfigState -> (a, ConfigState))
-> WConfdMonad ()
-> WConfdMonad a
modifyConfigStateWithImmediate f =
modifyConfigStateErrWithImmediate ((return .) . const f)
-- | Force the distribution of configuration without actually modifying it.
--
-- We need a separate call for this operation, because 'modifyConfigState' only
-- triggers the distribution when the configuration changes.
forceConfigStateDistribution :: WConfdMonad ()
forceConfigStateDistribution = do
logDebug "Forcing synchronous config write together with full distribution"
dh <- daemonHandle
liftBase . triggerAndWait (Any True) . dhSaveConfigWorker $ dh
logDebug "Forced config write and distribution finished"
-- | Atomically modifies the configuration data in the WConfdMonad
-- with a computation that can possibly fail.
modifyConfigDataErr_
:: (TempResState -> ConfigData -> AtomicModifyMonad ConfigData)
-> WConfdMonad ()
modifyConfigDataErr_ f =
modifyConfigStateErr_ (traverseOf csConfigDataL . f)
-- | Atomically modifies the state of temporary reservations in
-- WConfdMonad in the presence of possible errors.
modifyTempResStateErr
:: (ConfigData -> StateT TempResState ErrorResult a) -> WConfdMonad a
modifyTempResStateErr f = do
-- we use Compose to traverse the composition of applicative functors
-- @ErrorResult@ and @(,) a@
let f' ds = traverseOf2 dsTempResL
(runStateT (f (csConfigData . dsConfigState $ ds))) ds
dh <- daemonHandle
r <- toErrorBase $ atomicModifyIORefErr (dhDaemonState dh)
(liftM swap . f')
-- logDebug $ "Current temporary reservations: " ++ J.encode tr
logDebug "Triggering temporary reservations write"
liftBase . triggerAndWait_ . dhSaveTempResWorker $ dh
logDebug "Temporary reservations write finished"
return r
-- | Atomically modifies the state of temporary reservations in
-- WConfdMonad.
modifyTempResState :: (ConfigData -> State TempResState a) -> WConfdMonad a
modifyTempResState f =
modifyTempResStateErr (mapStateT (return . runIdentity) . f)
-- | Reads the state of of the configuration and temporary reservations
-- in WConfdMonad.
readTempResState :: WConfdMonad (ConfigData, TempResState)
readTempResState = liftM (csConfigData . dsConfigState &&& dsTempRes)
. readIORef . dhDaemonState
=<< daemonHandle
-- | Atomically modifies the lock waiting state in WConfdMonad.
modifyLockWaiting :: (GanetiLockWaiting -> ( GanetiLockWaiting
, (a, S.Set ClientId) ))
-> WConfdMonad a
modifyLockWaiting f = do
dh <- lift . WConfdMonadInt $ ask
let f' = (id &&& fst) . f
(lockAlloc, (r, nfy)) <- atomicModifyWithLens
(dhDaemonState dh) dsLockWaitingL f'
logDebug $ "Current lock status: " ++ J.encode lockAlloc
logDebug "Triggering lock state write"
liftBase . triggerAndWait_ . dhSaveLocksWorker $ dh
logDebug "Lock write finished"
unless (S.null nfy) $ do
logDebug . (++) "Locks became available for " . show $ S.toList nfy
liftIO . mapM_ (notifyJob . ciPid) $ S.toList nfy
logDebug "Finished notifying processes"
return r
-- | Atomically modifies the lock allocation state in WConfdMonad, not
-- producing any result
modifyLockWaiting_ :: (GanetiLockWaiting -> (GanetiLockWaiting, S.Set ClientId))
-> WConfdMonad ()
modifyLockWaiting_ = modifyLockWaiting . ((second $ (,) ()) .)
-- | Read the lock waiting state in WConfdMonad.
readLockWaiting :: WConfdMonad GanetiLockWaiting
readLockWaiting = liftM dsLockWaiting
. readIORef . dhDaemonState
=<< daemonHandle
-- | Read the underlying lock allocation.
readLockAllocation :: WConfdMonad (LockAllocation GanetiLocks ClientId)
readLockAllocation = liftM getAllocation readLockWaiting
|
ganeti-github-testing/ganeti-test-1
|
src/Ganeti/WConfd/Monad.hs
|
bsd-2-clause
| 14,252 | 0 | 19 | 3,094 | 2,352 | 1,247 | 1,105 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.