code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- File created: 2009-07-21 13:19:42
module Main (main, runOne) where
import Prelude hiding (catch)
import Control.Exception (catch)
import Data.IORef (newIORef, readIORef)
import System.Environment (getArgs)
import Haschoo.Running (runRepl, runFile, run, RunError)
import Haschoo.Stdlib (toplevelContext)
import Haschoo.Utils (void, errPrint)
main :: IO ()
main = do
ctx <- toplevelContext
args <- getArgs
if null args
then runRepl ctx
else do
initCtx <- mapM readIORef ctx
mapM_ (\f -> do
ctx' <- mapM newIORef initCtx
runFile ctx' f
`catch` \e -> errPrint (e :: RunError))
args
-- For GHCi use
runOne :: String -> IO ()
runOne s = toplevelContext >>= \ctx -> void (run ctx "runOne" s)
| Deewiant/haschoo | Haschoo/Main.hs | bsd-3-clause | 821 | 0 | 18 | 238 | 256 | 139 | 117 | 23 | 2 |
{-
(c) The University of Glasgow 2006
(c) The University of Glasgow, 1997-2006
Buffers for scanning string input stored in external arrays.
-}
{-# LANGUAGE BangPatterns, CPP, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O2 #-}
-- We always optimise this, otherwise performance of a non-optimised
-- compiler is severely affected
module StringBuffer
(
StringBuffer(..),
-- non-abstract for vs\/HaskellService
-- * Creation\/destruction
hGetStringBuffer,
hGetStringBufferBlock,
appendStringBuffers,
stringToStringBuffer,
-- * Inspection
nextChar,
currentChar,
prevChar,
atEnd,
-- * Moving and comparison
stepOn,
offsetBytes,
byteDiff,
atLine,
-- * Conversion
lexemeToString,
lexemeToFastString,
decodePrevNChars,
-- * Parsing integers
parseUnsignedInteger,
) where
#include "HsVersions.h"
import GhcPrelude
import Encoding
import FastString
import FastFunctions
import Outputable
import Util
import Data.Maybe
import Control.Exception
import System.IO
import System.IO.Unsafe ( unsafePerformIO )
import GHC.IO.Encoding.UTF8 ( mkUTF8 )
import GHC.IO.Encoding.Failure ( CodingFailureMode(IgnoreCodingFailure) )
import GHC.Exts
import Foreign
-- -----------------------------------------------------------------------------
-- The StringBuffer type
-- |A StringBuffer is an internal pointer to a sized chunk of bytes.
-- The bytes are intended to be *immutable*. There are pure
-- operations to read the contents of a StringBuffer.
--
-- A StringBuffer may have a finalizer, depending on how it was
-- obtained.
--
data StringBuffer
= StringBuffer {
buf :: {-# UNPACK #-} !(ForeignPtr Word8),
len :: {-# UNPACK #-} !Int, -- length
cur :: {-# UNPACK #-} !Int -- current pos
}
-- The buffer is assumed to be UTF-8 encoded, and furthermore
-- we add three '\0' bytes to the end as sentinels so that the
-- decoder doesn't have to check for overflow at every single byte
-- of a multibyte sequence.
instance Show StringBuffer where
showsPrec _ s = showString "<stringbuffer("
. shows (len s) . showString "," . shows (cur s)
. showString ")>"
-- -----------------------------------------------------------------------------
-- Creation / Destruction
-- | Read a file into a 'StringBuffer'. The resulting buffer is automatically
-- managed by the garbage collector.
hGetStringBuffer :: FilePath -> IO StringBuffer
hGetStringBuffer fname = do
h <- openBinaryFile fname ReadMode
size_i <- hFileSize h
offset_i <- skipBOM h size_i 0 -- offset is 0 initially
let size = fromIntegral $ size_i - offset_i
buf <- mallocForeignPtrArray (size+3)
withForeignPtr buf $ \ptr -> do
r <- if size == 0 then return 0 else hGetBuf h ptr size
hClose h
if (r /= size)
then ioError (userError "short read of file")
else newUTF8StringBuffer buf ptr size
hGetStringBufferBlock :: Handle -> Int -> IO StringBuffer
hGetStringBufferBlock handle wanted
= do size_i <- hFileSize handle
offset_i <- hTell handle >>= skipBOM handle size_i
let size = min wanted (fromIntegral $ size_i-offset_i)
buf <- mallocForeignPtrArray (size+3)
withForeignPtr buf $ \ptr ->
do r <- if size == 0 then return 0 else hGetBuf handle ptr size
if r /= size
then ioError (userError $ "short read of file: "++show(r,size,size_i,handle))
else newUTF8StringBuffer buf ptr size
-- | Skip the byte-order mark if there is one (see #1744 and #6016),
-- and return the new position of the handle in bytes.
--
-- This is better than treating #FEFF as whitespace,
-- because that would mess up layout. We don't have a concept
-- of zero-width whitespace in Haskell: all whitespace codepoints
-- have a width of one column.
skipBOM :: Handle -> Integer -> Integer -> IO Integer
skipBOM h size offset =
-- Only skip BOM at the beginning of a file.
if size > 0 && offset == 0
then do
-- Validate assumption that handle is in binary mode.
ASSERTM( hGetEncoding h >>= return . isNothing )
-- Temporarily select utf8 encoding with error ignoring,
-- to make `hLookAhead` and `hGetChar` return full Unicode characters.
bracket_ (hSetEncoding h safeEncoding) (hSetBinaryMode h True) $ do
c <- hLookAhead h
if c == '\xfeff'
then hGetChar h >> hTell h
else return offset
else return offset
where
safeEncoding = mkUTF8 IgnoreCodingFailure
newUTF8StringBuffer :: ForeignPtr Word8 -> Ptr Word8 -> Int -> IO StringBuffer
newUTF8StringBuffer buf ptr size = do
pokeArray (ptr `plusPtr` size :: Ptr Word8) [0,0,0]
-- sentinels for UTF-8 decoding
return $ StringBuffer buf size 0
appendStringBuffers :: StringBuffer -> StringBuffer -> IO StringBuffer
appendStringBuffers sb1 sb2
= do newBuf <- mallocForeignPtrArray (size+3)
withForeignPtr newBuf $ \ptr ->
withForeignPtr (buf sb1) $ \sb1Ptr ->
withForeignPtr (buf sb2) $ \sb2Ptr ->
do copyArray ptr (sb1Ptr `advancePtr` cur sb1) sb1_len
copyArray (ptr `advancePtr` sb1_len) (sb2Ptr `advancePtr` cur sb2) sb2_len
pokeArray (ptr `advancePtr` size) [0,0,0]
return (StringBuffer newBuf size 0)
where sb1_len = calcLen sb1
sb2_len = calcLen sb2
calcLen sb = len sb - cur sb
size = sb1_len + sb2_len
-- | Encode a 'String' into a 'StringBuffer' as UTF-8. The resulting buffer
-- is automatically managed by the garbage collector.
stringToStringBuffer :: String -> StringBuffer
stringToStringBuffer str =
unsafePerformIO $ do
let size = utf8EncodedLength str
buf <- mallocForeignPtrArray (size+3)
withForeignPtr buf $ \ptr -> do
utf8EncodeString ptr str
pokeArray (ptr `plusPtr` size :: Ptr Word8) [0,0,0]
-- sentinels for UTF-8 decoding
return (StringBuffer buf size 0)
-- -----------------------------------------------------------------------------
-- Grab a character
-- | Return the first UTF-8 character of a nonempty 'StringBuffer' and as well
-- the remaining portion (analogous to 'Data.List.uncons'). __Warning:__ The
-- behavior is undefined if the 'StringBuffer' is empty. The result shares
-- the same buffer as the original. Similar to 'utf8DecodeChar', if the
-- character cannot be decoded as UTF-8, '\0' is returned.
{-# INLINE nextChar #-}
nextChar :: StringBuffer -> (Char,StringBuffer)
nextChar (StringBuffer buf len (I# cur#)) =
-- Getting our fingers dirty a little here, but this is performance-critical
inlinePerformIO $ do
withForeignPtr buf $ \(Ptr a#) -> do
case utf8DecodeChar# (a# `plusAddr#` cur#) of
(# c#, nBytes# #) ->
let cur' = I# (cur# +# nBytes#) in
return (C# c#, StringBuffer buf len cur')
-- | Return the first UTF-8 character of a nonempty 'StringBuffer' (analogous
-- to 'Data.List.head'). __Warning:__ The behavior is undefined if the
-- 'StringBuffer' is empty. Similar to 'utf8DecodeChar', if the character
-- cannot be decoded as UTF-8, '\0' is returned.
currentChar :: StringBuffer -> Char
currentChar = fst . nextChar
prevChar :: StringBuffer -> Char -> Char
prevChar (StringBuffer _ _ 0) deflt = deflt
prevChar (StringBuffer buf _ cur) _ =
inlinePerformIO $ do
withForeignPtr buf $ \p -> do
p' <- utf8PrevChar (p `plusPtr` cur)
return (fst (utf8DecodeChar p'))
-- -----------------------------------------------------------------------------
-- Moving
-- | Return a 'StringBuffer' with the first UTF-8 character removed (analogous
-- to 'Data.List.tail'). __Warning:__ The behavior is undefined if the
-- 'StringBuffer' is empty. The result shares the same buffer as the
-- original.
stepOn :: StringBuffer -> StringBuffer
stepOn s = snd (nextChar s)
-- | Return a 'StringBuffer' with the first @n@ bytes removed. __Warning:__
-- If there aren't enough characters, the returned 'StringBuffer' will be
-- invalid and any use of it may lead to undefined behavior. The result
-- shares the same buffer as the original.
offsetBytes :: Int -- ^ @n@, the number of bytes
-> StringBuffer
-> StringBuffer
offsetBytes i s = s { cur = cur s + i }
-- | Compute the difference in offset between two 'StringBuffer's that share
-- the same buffer. __Warning:__ The behavior is undefined if the
-- 'StringBuffer's use separate buffers.
byteDiff :: StringBuffer -> StringBuffer -> Int
byteDiff s1 s2 = cur s2 - cur s1
-- | Check whether a 'StringBuffer' is empty (analogous to 'Data.List.null').
atEnd :: StringBuffer -> Bool
atEnd (StringBuffer _ l c) = l == c
-- | Computes a 'StringBuffer' which points to the first character of the
-- wanted line. Lines begin at 1.
atLine :: Int -> StringBuffer -> Maybe StringBuffer
atLine line sb@(StringBuffer buf len _) =
inlinePerformIO $
withForeignPtr buf $ \p -> do
p' <- skipToLine line len p
if p' == nullPtr
then return Nothing
else
let
delta = p' `minusPtr` p
in return $ Just (sb { cur = delta
, len = len - delta
})
skipToLine :: Int -> Int -> Ptr Word8 -> IO (Ptr Word8)
skipToLine !line !len !op0 = go 1 op0
where
!opend = op0 `plusPtr` len
go !i_line !op
| op >= opend = pure nullPtr
| i_line == line = pure op
| otherwise = do
w <- peek op :: IO Word8
case w of
10 -> go (i_line + 1) (plusPtr op 1)
13 -> do
-- this is safe because a 'StringBuffer' is
-- guaranteed to have 3 bytes sentinel values.
w' <- peek (plusPtr op 1) :: IO Word8
case w' of
10 -> go (i_line + 1) (plusPtr op 2)
_ -> go (i_line + 1) (plusPtr op 1)
_ -> go i_line (plusPtr op 1)
-- -----------------------------------------------------------------------------
-- Conversion
-- | Decode the first @n@ bytes of a 'StringBuffer' as UTF-8 into a 'String'.
-- Similar to 'utf8DecodeChar', if the character cannot be decoded as UTF-8,
-- they will be replaced with '\0'.
lexemeToString :: StringBuffer
-> Int -- ^ @n@, the number of bytes
-> String
lexemeToString _ 0 = ""
lexemeToString (StringBuffer buf _ cur) bytes =
utf8DecodeStringLazy buf cur bytes
lexemeToFastString :: StringBuffer
-> Int -- ^ @n@, the number of bytes
-> FastString
lexemeToFastString _ 0 = nilFS
lexemeToFastString (StringBuffer buf _ cur) len =
inlinePerformIO $
withForeignPtr buf $ \ptr ->
return $! mkFastStringBytes (ptr `plusPtr` cur) len
-- | Return the previous @n@ characters (or fewer if we are less than @n@
-- characters into the buffer.
decodePrevNChars :: Int -> StringBuffer -> String
decodePrevNChars n (StringBuffer buf _ cur) =
inlinePerformIO $ withForeignPtr buf $ \p0 ->
go p0 n "" (p0 `plusPtr` (cur - 1))
where
go :: Ptr Word8 -> Int -> String -> Ptr Word8 -> IO String
go buf0 n acc p | n == 0 || buf0 >= p = return acc
go buf0 n acc p = do
p' <- utf8PrevChar p
let (c,_) = utf8DecodeChar p'
go buf0 (n - 1) (c:acc) p'
-- -----------------------------------------------------------------------------
-- Parsing integer strings in various bases
parseUnsignedInteger :: StringBuffer -> Int -> Integer -> (Char->Int) -> Integer
parseUnsignedInteger (StringBuffer buf _ cur) len radix char_to_int
= inlinePerformIO $ withForeignPtr buf $ \ptr -> return $! let
go i x | i == len = x
| otherwise = case fst (utf8DecodeChar (ptr `plusPtr` (cur + i))) of
char -> go (i + 1) (x * radix + toInteger (char_to_int char))
in go 0 0
| ezyang/ghc | compiler/utils/StringBuffer.hs | bsd-3-clause | 12,130 | 0 | 21 | 3,116 | 2,596 | 1,348 | 1,248 | 193 | 4 |
module Network.EasyBitcoin.Internal.CurveConstants
where
-- SECP256k1 curve parameters
pairG :: (Integer, Integer)
pairG = ( 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
, 0X483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
)
curveP :: Integer
curveP = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
curveN :: Integer
curveN = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
integerB :: Integer
integerB = 7
integerA :: Integer
integerA = 0
| vwwv/easy-bitcoin | Network/EasyBitcoin/Internal/CurveConstants.hs | bsd-3-clause | 573 | 0 | 5 | 98 | 71 | 45 | 26 | 12 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
module TypeLevel.Number.Nat.Types ( I
, O
, Z
) where
-- | One bit.
data I n
-- | Zero bit.
data O n
-- | Bit stream terminator.
data Z
| Shimuuar/type-level-numbers | TypeLevel/Number/Nat/Types.hs | bsd-3-clause | 271 | 0 | 4 | 138 | 34 | 25 | 9 | -1 | -1 |
factors :: Int -> [Int]
factors n = [x | x <- [1..n], mod n x == 0]
prime :: Int -> Bool
prime x = factors x == [1, x]
main :: IO ()
main = print . sum $ take 1000 [x | x <- 2 : [3, 5..], prime x]
| nikai3d/ce-challenges | easy/sum_prime.hs | bsd-3-clause | 201 | 0 | 11 | 57 | 136 | 71 | 65 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Urls.Relativize.Tests
( tests
) where
import Test.Framework
import Test.HUnit hiding (Test)
import Hakyll.Web.Urls.Relativize
import TestSuite.Util
tests :: [Test]
tests = fromAssertions "relativizeUrls"
[ "<a href=\"../foo\">bar</a>" @=?
relativizeUrls ".." "<a href=\"/foo\">bar</a>"
, "<img src=\"../../images/lolcat.png\"></img>" @=?
relativizeUrls "../.." "<img src=\"/images/lolcat.png\" />"
, "<a href=\"http://haskell.org\">Haskell</a>" @=?
relativizeUrls "../.." "<a href=\"http://haskell.org\">Haskell</a>"
, "<a href=\"http://haskell.org\">Haskell</a>" @=?
relativizeUrls "../.." "<a href=\"http://haskell.org\">Haskell</a>"
, "<script src=\"//ajax.googleapis.com/jquery.min.js\"></script>" @=?
relativizeUrls "../.."
"<script src=\"//ajax.googleapis.com/jquery.min.js\"></script>"
]
| sol/hakyll | tests/Hakyll/Web/Urls/Relativize/Tests.hs | bsd-3-clause | 935 | 0 | 8 | 156 | 121 | 69 | 52 | 20 | 1 |
{-# LANGUAGE GADTs, DataKinds, KindSignatures #-}
import Data.Map.Strict (Map)
data Protocol = Protocol
{ pEndpoints :: [Endpoint]
, pVersion :: String
}
data Endpoint = Endpoint
{ eUrl :: String
, eTransmissions :: Map Method Transmission
}
data Method = GET | POST | OPTIONS | HEAD | PUT | DELETE
-- | A Transmission is a bundle consisting of a request and a response.
data Transmission = Transmission
{ tRequest :: Request
, tResponse :: Response
}
data Request = Request
{ rqFields :: [Field]
}
data Response = Response
{ reFields :: [Field]
}
data Field = Field
{ fName :: String
, fLabel :: String
, fDescription :: String
, fType :: FieldType
, fValidators :: [Validator]
}
data FieldType = FTInt
| FTString
| FTBool
| FTDouble
| FTList FieldType
| FTMap FieldType FieldType -- I'm not sure about this one. We'll see if it's needed.
-- | The validator type, I'm not at all sure about if this is a good idea.
data Validator = Required Bool -- ^ Is this field required? True by default.
| NullAllowed Bool -- ^ Do we get away with not supplying this field? False by default.
| LowerBound Int -- ^ Lower bound in length for strings and lists, and by value for numbers.
| UpperBound Int -- ^ Upper bound in length for strings and lists, and by value for numbers.
| Chain Validator Validator -- ^ Combine two validators, both must pass for this validator to pass.
-- | Datakind for token validity
data Token where
ValidToken :: Token
InvalidToken :: Token
deriving Show
-- | Side effects that we can get from tokens
data TokenSideEffect :: Token -> * where
Produce :: TokenSideEffect ValidToken
Require :: TokenSideEffect ValidToken
Consume :: TokenSideEffect InvalidToken
login :: TokenSideEffect ValidToken
login = Produce
updateUsername :: String -> TokenSideEffect ValidToken -> TokenSideEffect ValidToken
updateUsername = undefined
logout :: TokenSideEffect ValidToken -> TokenSideEffect InvalidToken
logout = const Consume
transaction = logout $ updateUsername "Bob" login
| MaximilianAlgehed/Haspec | src/Lang/Lang.hs | bsd-3-clause | 2,310 | 0 | 9 | 663 | 379 | 226 | 153 | 48 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
-------------------------------------------------------------------------------
-- |
-- Module : Data.Vector.Lens
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
-- This module provides lenses and traversals for working with generic vectors.
-------------------------------------------------------------------------------
module Data.Vector.Lens
( toVectorOf
-- * Isomorphisms
, vector
, reversed
, forced
-- * Lenses
, _head
, _tail
, _last
, _init
, sliced
-- * Traversal of individual indices
, ordinals
) where
import Control.Applicative
import Control.Lens
import Data.Vector as Vector hiding (zip, filter, indexed)
import Prelude hiding ((++), length, null, head, tail, init, last, map, reverse)
import Data.List (nub)
import Data.Monoid
-- | A 'Traversal' reading and writing to the 'head' of a 'Vector'
--
-- >>> Vector.fromList [1,2,3] ^? _head
-- Just 1
--
-- >>> Vector.empty ^? _head
-- Nothing
--
-- >>> Vector.fromList "abc" & _head .~ 'Q'
-- fromList "Qbc"
_head :: Traversal' (Vector a) a
_head f v
| null v = pure v
| otherwise = f (unsafeHead v) <&> \a -> v // [(0,a)]
{-# INLINE _head #-}
-- | A 'Traversal' reading and writing to the 'last' element of a 'Vector'
--
-- >>> Vector.fromList "abcde" ^? _last
-- Just 'e'
--
-- >>> Vector.empty ^? _last
-- Nothing
--
-- >>> Vector.fromList "abcde" & _last .~ 'Q'
-- fromList "abcdQ"
_last :: Traversal' (Vector a) a
_last f v
| null v = pure v
| otherwise = f (unsafeLast v) <&> \a -> v // [(length v - 1, a)]
{-# INLINE _last #-}
-- | A 'Traversal' reading and writing to the 'tail' of a 'Vector'
--
-- >>> Vector.fromList "abcde" ^? _tail
-- Just (fromList "bcde")
--
-- >>> Vector.empty ^? _tail
-- Nothing
--
-- >>> _tail .~ Vector.fromList [3,4,5] $ Vector.fromList [1,2]
-- fromList [1,3,4,5]
_tail :: Traversal' (Vector a) (Vector a)
_tail f v
| null v = pure v
| otherwise = f (unsafeTail v) <&> cons (unsafeHead v)
{-# INLINE _tail #-}
-- | A 'Traversal' reading and replacing all but the a 'last' element of a 'Vector'
--
-- >>> Vector.fromList [1,2,3,4] ^? _init
-- Just (fromList [1,2,3])
--
-- >>> Vector.empty ^? _init
-- Nothing
--
-- >>> Vector.fromList "abcdef" & _init.mapped %~ succ
-- fromList "bcdeff"
_init :: Traversal' (Vector a) (Vector a)
_init f v
| null v = pure v
| otherwise = f (unsafeInit v) <&> (`snoc` unsafeLast v)
{-# INLINE _init #-}
-- | @sliced i n@ provides a lens that edits the @n@ elements starting at index @i@ from a lens.
--
-- This is only a valid lens if you do not change the length of the resulting 'Vector'.
--
-- Attempting to return a longer or shorter vector will result in violations of the 'Lens' laws.
--
-- >>> Vector.fromList [1..10] ^. sliced 2 5
-- fromList [3,4,5,6,7]
--
-- >>> Vector.fromList [1..10] & sliced 2 5 . mapped .~ 0
-- fromList [1,2,0,0,0,0,0,8,9,10]
sliced :: Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> Lens' (Vector a) (Vector a)
sliced i n f v = f (slice i n v) <&> \ v0 -> v // zip [i..i+n-1] (toList v0)
{-# INLINE sliced #-}
-- | Similar to 'toListOf', but returning a 'Vector'.
--
-- >>> toVectorOf both (8,15)
-- fromList [8,15]
toVectorOf :: Getting (Endo [a]) s t a b -> s -> Vector a
toVectorOf l s = fromList (toListOf l s)
{-# INLINE toVectorOf #-}
-- | Convert a list to a 'Vector' (or back)
--
-- >>> [1,2,3] ^. vector
-- fromList [1,2,3]
--
-- >>> [1,2,3] ^. vector . from vector
-- [1,2,3]
--
-- >>> Vector.fromList [0,8,15] ^. from vector . vector
-- fromList [0,8,15]
vector :: Iso [a] [b] (Vector a) (Vector b)
vector = iso fromList toList
{-# INLINE vector #-}
-- | Convert a 'Vector' to a version with all the elements in the reverse order
--
-- >>> Vector.fromList [1,2,3] ^. reversed
-- fromList [3,2,1]
reversed :: Iso (Vector a) (Vector b) (Vector a) (Vector b)
reversed = iso reverse reverse
{-# INLINE reversed #-}
-- | Convert a 'Vector' to a version that doesn't retain any extra memory.
forced :: Iso (Vector a) (Vector b) (Vector a) (Vector b)
forced = iso force force
{-# INLINE forced #-}
-- | This 'Traversal' will ignore any duplicates in the supplied list of indices.
--
-- >>> toListOf (ordinals [1,3,2,5,9,10]) $ Vector.fromList [2,4..40]
-- [4,8,6,12,20,22]
ordinals :: [Int] -> IndexedTraversal' Int (Vector a) a
ordinals is f v = fmap (v //) $ traverse (\i -> (,) i <$> indexed f i (v ! i)) $ nub $ filter (\i -> 0 <= i && i < l) is where
l = length v
{-# INLINE ordinals #-}
| np/lens | src/Data/Vector/Lens.hs | bsd-3-clause | 4,796 | 0 | 14 | 940 | 953 | 544 | 409 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Block
( blockSpecs
) where
import Test.Hspec
import Data.Text (Text)
import Data.Conduit
import qualified Data.Conduit.List as CL
import Text.Markdown (def, MarkdownSettings(..))
import Text.Markdown.Block
import Data.Functor.Identity (runIdentity)
checkWith :: MarkdownSettings -> Text -> [Block Text] -> Expectation
checkWith ms md blocks = runIdentity (yield md $$ toBlocks ms =$ CL.consume) `shouldBe` blocks
check :: Text -> [Block Text] -> Expectation
check = checkWith def
blockSpecs :: Spec
blockSpecs = do
describe "tilde code" $ do
it "simple" $ check
"~~~haskell\nfoo\n\nbar\n~~~"
[BlockCode (Just "haskell") "foo\n\nbar"]
it "no lang" $ check
"~~~\nfoo\n\nbar\n~~~"
[BlockCode Nothing "foo\n\nbar"]
it "no close" $ check
"~~~\nfoo\n\nbar\n"
[BlockPara " ~~~\nfoo", BlockPara "bar"]
describe "list" $ do
it "simple unordered" $ check
"* foo\n\n* bar\n\n*\t\tqux"
[ BlockList Unordered (Right [BlockPara "foo"])
, BlockList Unordered (Right [BlockPara "bar"])
, BlockList Unordered (Right [BlockPara "qux"])
]
it "simple ordered" $ check
"1. foo\n\n3. bar\n\n17.\t\tqux"
[ BlockList Ordered (Right [BlockPara "foo"])
, BlockList Ordered (Right [BlockPara "bar"])
, BlockList Ordered (Right [BlockPara "qux"])
]
it "nested" $ check
"* foo\n* \n 1. bar\n 2. baz"
[ BlockList Unordered (Left "foo")
, BlockList Unordered (Right
[ BlockList Ordered $ Left "bar"
, BlockList Ordered $ Left "baz"
])
]
it "with blank" $ check
"* foo\n\n bar\n\n* baz"
[ BlockList Unordered $ Right
[ BlockPara "foo"
, BlockPara "bar"
]
, BlockList Unordered $ Right
[ BlockPara "baz"
]
]
it "without whitespace" $ check
"*foo\n\n1.bar"
[ BlockPara "*foo"
, BlockPara "1.bar"
]
describe "blockquote" $ do
it "simple" $ check
"> foo\n>\n> * bar"
[ BlockQuote
[ BlockPara "foo"
, BlockList Unordered $ Left "bar"
]
]
it "blank" $ check
"> foo\n\n> * bar"
[ BlockQuote [BlockPara "foo"]
, BlockQuote [BlockList Unordered $ Left "bar"]
]
it "require blank before blockquote" $ check
"foo\n> bar"
[ BlockPara "foo\n> bar" ]
it "no blank before blockquote" $ checkWith def { msBlankBeforeBlockquote = False }
"foo\n> bar"
[ BlockPara "foo", BlockQuote [BlockPara "bar"]]
describe "indented code" $ do
it "simple" $ check
" foo\n bar\n"
[ BlockCode Nothing "foo\nbar"
]
it "blank" $ check
" foo\n\n bar\n"
[ BlockCode Nothing "foo\n\nbar"
]
it "extra space" $ check
" foo\n\n bar\n"
[ BlockCode Nothing "foo\n\n bar"
]
describe "html" $ do
it "simple" $ check
"<p>Hello world!</p>"
[ BlockHtml "<p>Hello world!</p>"
]
it "multiline" $ check
"<p>Hello world!\n</p>"
[ BlockHtml "<p>Hello world!\n</p>"
]
| thefalconfeat/markdown | test/Block.hs | bsd-3-clause | 3,651 | 0 | 18 | 1,420 | 868 | 415 | 453 | 87 | 1 |
module Control.Monad.Trans.Except.Extended
( module Control.Monad.Trans.Except
, justToLeft
, MatcherT, runMatcherT
) where
import Control.Monad ((<=<))
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except
import Control.Monad.Trans.Maybe (MaybeT(..))
import Data.Foldable (traverse_)
import Prelude
-- | A Matcher is used an an early-termination matching mechanism
type MatcherT m a = ExceptT a m a
runMatcherT :: Functor m => MatcherT m a -> m a
runMatcherT = fmap uneither . runExceptT
justToLeft :: Monad m => MaybeT m a -> ExceptT a m ()
justToLeft = traverse_ throwE <=< lift . runMaybeT
uneither :: Either a a -> a
uneither = either id id
| lamdu/lamdu | src/Control/Monad/Trans/Except/Extended.hs | gpl-3.0 | 689 | 0 | 8 | 120 | 210 | 121 | 89 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CodeDeploy.ListOnPremisesInstances
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Gets a list of one or more on-premises instance names.
--
-- Unless otherwise specified, both registered and deregistered on-premises
-- instance names will be listed. To list only registered or deregistered
-- on-premises instance names, use the registration status parameter.
--
-- <http://docs.aws.amazon.com/codedeploy/latest/APIReference/API_ListOnPremisesInstances.html>
module Network.AWS.CodeDeploy.ListOnPremisesInstances
(
-- * Request
ListOnPremisesInstances
-- ** Request constructor
, listOnPremisesInstances
-- ** Request lenses
, lopiNextToken
, lopiRegistrationStatus
, lopiTagFilters
-- * Response
, ListOnPremisesInstancesResponse
-- ** Response constructor
, listOnPremisesInstancesResponse
-- ** Response lenses
, lopirInstanceNames
, lopirNextToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CodeDeploy.Types
import qualified GHC.Exts
data ListOnPremisesInstances = ListOnPremisesInstances
{ _lopiNextToken :: Maybe Text
, _lopiRegistrationStatus :: Maybe RegistrationStatus
, _lopiTagFilters :: List "tagFilters" TagFilter
} deriving (Eq, Read, Show)
-- | 'ListOnPremisesInstances' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lopiNextToken' @::@ 'Maybe' 'Text'
--
-- * 'lopiRegistrationStatus' @::@ 'Maybe' 'RegistrationStatus'
--
-- * 'lopiTagFilters' @::@ ['TagFilter']
--
listOnPremisesInstances :: ListOnPremisesInstances
listOnPremisesInstances = ListOnPremisesInstances
{ _lopiRegistrationStatus = Nothing
, _lopiTagFilters = mempty
, _lopiNextToken = Nothing
}
-- | An identifier that was returned from the previous list on-premises instances
-- call, which can be used to return the next set of on-premises instances in
-- the list.
lopiNextToken :: Lens' ListOnPremisesInstances (Maybe Text)
lopiNextToken = lens _lopiNextToken (\s a -> s { _lopiNextToken = a })
-- | The on-premises instances registration status:
--
-- Deregistered: Include in the resulting list deregistered on-premises
-- instances. Registered: Include in the resulting list registered on-premises
-- instances.
lopiRegistrationStatus :: Lens' ListOnPremisesInstances (Maybe RegistrationStatus)
lopiRegistrationStatus =
lens _lopiRegistrationStatus (\s a -> s { _lopiRegistrationStatus = a })
-- | The on-premises instance tags that will be used to restrict the corresponding
-- on-premises instance names that are returned.
lopiTagFilters :: Lens' ListOnPremisesInstances [TagFilter]
lopiTagFilters = lens _lopiTagFilters (\s a -> s { _lopiTagFilters = a }) . _List
data ListOnPremisesInstancesResponse = ListOnPremisesInstancesResponse
{ _lopirInstanceNames :: List "instanceNames" Text
, _lopirNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListOnPremisesInstancesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lopirInstanceNames' @::@ ['Text']
--
-- * 'lopirNextToken' @::@ 'Maybe' 'Text'
--
listOnPremisesInstancesResponse :: ListOnPremisesInstancesResponse
listOnPremisesInstancesResponse = ListOnPremisesInstancesResponse
{ _lopirInstanceNames = mempty
, _lopirNextToken = Nothing
}
-- | The list of matching on-premises instance names.
lopirInstanceNames :: Lens' ListOnPremisesInstancesResponse [Text]
lopirInstanceNames =
lens _lopirInstanceNames (\s a -> s { _lopirInstanceNames = a })
. _List
-- | If the amount of information that is returned is significantly large, an
-- identifier will also be returned, which can be used in a subsequent list
-- on-premises instances call to return the next set of on-premises instances in
-- the list.
lopirNextToken :: Lens' ListOnPremisesInstancesResponse (Maybe Text)
lopirNextToken = lens _lopirNextToken (\s a -> s { _lopirNextToken = a })
instance ToPath ListOnPremisesInstances where
toPath = const "/"
instance ToQuery ListOnPremisesInstances where
toQuery = const mempty
instance ToHeaders ListOnPremisesInstances
instance ToJSON ListOnPremisesInstances where
toJSON ListOnPremisesInstances{..} = object
[ "registrationStatus" .= _lopiRegistrationStatus
, "tagFilters" .= _lopiTagFilters
, "nextToken" .= _lopiNextToken
]
instance AWSRequest ListOnPremisesInstances where
type Sv ListOnPremisesInstances = CodeDeploy
type Rs ListOnPremisesInstances = ListOnPremisesInstancesResponse
request = post "ListOnPremisesInstances"
response = jsonResponse
instance FromJSON ListOnPremisesInstancesResponse where
parseJSON = withObject "ListOnPremisesInstancesResponse" $ \o -> ListOnPremisesInstancesResponse
<$> o .:? "instanceNames" .!= mempty
<*> o .:? "nextToken"
| romanb/amazonka | amazonka-codedeploy/gen/Network/AWS/CodeDeploy/ListOnPremisesInstances.hs | mpl-2.0 | 5,953 | 0 | 12 | 1,143 | 687 | 415 | 272 | 76 | 1 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main(main) where
import DB.Admin (resetDatabase)
import DB.Authentication (changePasswordForUser, createUser,
removeUser)
import Domain.Models (userId, userIdentityId)
import GetOpts
import Lib.ServantHelpers (liftIO)
import Network.Wai (Application)
import Network.Wai.Handler.Warp (run)
import Servant (serve)
import Servers.MainServer
runCommand :: OptionCommand -> IO ()
runCommand (ResetDB force) =
if force
then resetDatabase >>= putStrLn
else putStrLn "Use the --force option if you want this to work"
runCommand (RunServer port) = do
putStrLn $ "Running webserver on port " ++ show port
run port $ serve mainAPI mainServer
runCommand (CreateUser usernames pass) =
mapM_ (liftIO . handleCreateUser) usernames
where handleCreateUser name =
createUser name pass >>= putStrLn . status
where
status Nothing = "Creation of user " ++ name ++ "failed"
status (Just usr) = "User " ++ name ++ " created, id = " ++
show (userId usr) ++ ", identityId = " ++
show (userIdentityId usr)
runCommand (RemoveUser userIds) =
mapM_ (liftIO . handleRemoveUser) userIds
where handleRemoveUser uId = do
putStrLn $ "Removing user with id=" ++ show uId
removeUser uId
runCommand (ChangePass userIds pass) =
mapM_ (liftIO . handleChangePass) userIds
where handleChangePass uId = do
changePasswordForUser uId pass
putStrLn $ "Changed password for user with id=" ++ show uId
main :: IO ()
main = parseArgsToCommands >>= runCommand
| virtualsaleslab/simplestore | src/Main.hs | unlicense | 1,916 | 0 | 14 | 625 | 440 | 226 | 214 | 41 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module TestService_Iface where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import Control.Applicative (ZipList(..), (<*>))
import Control.Exception
import Control.Monad ( liftM, ap, when )
import Data.ByteString.Lazy (ByteString)
import Data.Functor ( (<$>) )
import Data.Hashable
import Data.Int
import Data.Maybe (catMaybes)
import Data.Text.Lazy ( Text )
import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as T
import Data.Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import Test.QuickCheck.Arbitrary ( Arbitrary(..) )
import Test.QuickCheck ( elements )
import Thrift hiding (ProtocolExnType(..))
import qualified Thrift (ProtocolExnType(..))
import Thrift.Types
import Thrift.Arbitraries
import Module_Types
class TestService_Iface a where
init :: a -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> IO Int64
| chjp2046/fbthrift | thrift/compiler/test/fixtures/service-fuzzer/gen-hs/TestService_Iface.hs | apache-2.0 | 1,942 | 0 | 19 | 370 | 438 | 291 | 147 | 38 | 0 |
{-# Language RankNTypes #-}
{-# Language TypeOperators #-}
{-# Language BangPatterns #-}
module IO where
-- import Data.Array
import Prelude hiding (traverse)
import Data.Vector.Unboxed hiding (force)
import qualified Data.Vector.Unboxed as V
import Data.Array.Repa
import Types
import System.CPUTime
import Data.Array.Repa.IO.DevIL
import Data.Word
import qualified Data.Array.Repa as R
import Debug.Trace
import System.Directory
import Control.Exception
import Control.DeepSeq
import Data.Array.Repa.Repr.ForeignPtr
import System.IO.Error hiding (catch)
import Data.Array.Accelerate.IO
import qualified Data.Array.Accelerate as A
import Text.Printf
import qualified Codec.Picture as Codec
import Control.DeepSeq
import Data.Time.Clock
printDiff :: UTCTime -> UTCTime -> IO ()
printDiff start end = do
let s = show (diffUTCTime end start)
putStrLn (Prelude.init s) -- drops the "s" from the end
-- | time monadic computation.
printTimeIO :: IO a -> IO a
printTimeIO action = do
start <- getCurrentTime
a <- action
end <- getCurrentTime
printDiff start end
return a
-- see about `seq` in the following benchmark
-- https://github.com/AccelerateHS/accelerate/issues/208
-- | time evaluation of pure computation, in picoseconds.
printTime :: a -> IO a
printTime f = do
start <- getCurrentTime
end <- seq f getCurrentTime
printDiff start end
return f
printTimeDeep :: (NFData a) => a -> IO a
printTimeDeep f = do
start <- getCurrentTime
end <- deepseq f getCurrentTime
printDiff start end
return f
writeVectorImage :: String -> VectorImage -> IO ()
writeVectorImage fname vecImg = Codec.writePng fname img
where
img = Codec.generateImage (\x y -> (fromIntegral $ (V.!) (pixels vecImg) ((width vecImg)*y + x))::Word8) (width vecImg) (height vecImg)
readImgAsVector :: String -> IO VectorImage
readImgAsVector fname = do
(Right !img) <- Codec.readImage fname
case img of
Codec.ImageRGB8 rgbImg -> do
let Codec.Image !imgWidth !imgHeight _ = rgbImg
positions = Prelude.concatMap (\h -> Prelude.map (\w -> (w,h)) [0..imgWidth-1]) [0..imgHeight-1]
!vec = fromList (Prelude.map (\(x,y) -> let (Codec.PixelRGB8 r g b) = Codec.pixelAt rgbImg x y
in rgbToGreyPixel r g b) positions)
img = deepseq vec (VectorImage vec imgWidth imgHeight)
return img
_ -> error "readImgAsVector: unsupported image type."
rgbToGreyPixel :: Word8 -> Word8 -> Word8 -> Int
rgbToGreyPixel r g b = ceiling $ (0.21::Double) * fromIntegral r + 0.71 * fromIntegral g + 0.07 * fromIntegral b
readImgAsAccelerateArray :: String -> IO (A.Acc AccelerateImage)
readImgAsAccelerateArray fname = do
arr <- readImgAsManifestRepaArray fname
let accImg = seq (A.use (fromRepa arr)) (A.use (fromRepa arr))
return (accImg)
readImgAsRepaArray :: String -> IO RepaImage
readImgAsRepaArray fname = do
arr <- readImgAsManifestRepaArray fname
return (delay arr)
readImgAsManifestRepaArray :: String -> IO (Array A (Z :. Int :. Int) Int)
readImgAsManifestRepaArray fname = do
!img <- readImg
return img
where
readImg =
runIL $ do
(RGB a) <- readImage fname
img <- computeP $ traverse a (\(Z :. x :. y :. _) -> (Z :. x :. y)) luminosity :: IL (Array A DIM2 Int)
return img
writeAccelerateImg :: String -> AccelerateImage -> IO ()
writeAccelerateImg fname img = do
repaImage <- copyP (toRepa img)
writeRepaImg fname repaImage
-- writeRepaImg :: String -> RepaImageComputed -> IO ()
writeRepaImg fname img = do
removeIfExists fname
im <- demote img
runIL $ do
writeImage fname (Grey im)
removeIfExists :: FilePath -> IO ()
removeIfExists fileName = removeFile fileName `catch` handleExists
where handleExists e
| isDoesNotExistError e = return ()
| otherwise = throwIO e
luminosity :: (DIM3 -> Word8) -> DIM2 -> Int
luminosity f (Z :. i :. j) = round $ (0.21::Double) * r + 0.71 * g + 0.07 * b
where
r = fromIntegral $ f (Z :. i :. j :. 0)
g = fromIntegral $ f (Z :. i :. j :. 1)
b = fromIntegral $ f (Z :. i :. j :. 2)
demote :: Monad m => Array U DIM2 Int -> m (Array F DIM2 Word8)
demote arr
= computeP $ R.map ffs arr
where {-# INLINE ffs #-}
ffs :: Int -> Word8
ffs x = fromIntegral (x :: Int)
{-# NOINLINE demote #-}
| robstewart57/small-image-processing-dsl-implementations | haskell/small-image-processing-dsl/src/IO.hs | bsd-3-clause | 4,421 | 0 | 26 | 998 | 1,516 | 767 | 749 | 109 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Clean a project.
module Stack.Clean
(clean
,CleanOpts(..)
,StackCleanException(..)
) where
import Control.Exception (Exception)
import Control.Monad.Catch (throwM)
import Data.Foldable (forM_)
import Data.List ((\\),intercalate)
import qualified Data.Map.Strict as Map
import Data.Maybe (mapMaybe)
import Data.Typeable (Typeable)
import Path (Path, Abs, Dir)
import Path.IO (ignoringAbsence, removeDirRecur)
import Stack.Build.Source (getLocalPackageViews)
import Stack.Build.Target (LocalPackageView(..))
import Stack.Config (getLocalPackages)
import Stack.Constants (distDirFromDir, workDirFromDir)
import Stack.Types.PackageName
import Stack.Types.Config
import Stack.Types.StackT
-- | Deletes build artifacts in the current project.
--
-- Throws 'StackCleanException'.
clean
:: (StackM env m, HasEnvConfig env)
=> CleanOpts
-> m ()
clean cleanOpts = do
dirs <- dirsToDelete cleanOpts
forM_ dirs (ignoringAbsence . removeDirRecur)
dirsToDelete
:: (StackM env m, HasEnvConfig env)
=> CleanOpts
-> m [Path Abs Dir]
dirsToDelete cleanOpts = do
packages <- getLocalPackages
case cleanOpts of
CleanShallow [] ->
-- Filter out packages listed as extra-deps
mapM distDirFromDir . Map.keys . Map.filter (== False) $ packages
CleanShallow targets -> do
localPkgViews <- getLocalPackageViews
let localPkgNames = Map.keys localPkgViews
getPkgDir pkgName = fmap (lpvRoot . fst) (Map.lookup pkgName localPkgViews)
case targets \\ localPkgNames of
[] -> mapM distDirFromDir (mapMaybe getPkgDir targets)
xs -> throwM (NonLocalPackages xs)
CleanFull -> do
pkgWorkDirs <- mapM workDirFromDir (Map.keys packages)
projectWorkDir <- getProjectWorkDir
return (projectWorkDir : pkgWorkDirs)
-- | Options for @stack clean@.
data CleanOpts
= CleanShallow [PackageName]
-- ^ Delete the "dist directories" as defined in 'Stack.Constants.distRelativeDir'
-- for the given local packages. If no packages are given, all project packages
-- should be cleaned.
| CleanFull
-- ^ Delete all work directories in the project.
-- | Exceptions during cleanup.
newtype StackCleanException
= NonLocalPackages [PackageName]
deriving (Typeable)
instance Show StackCleanException where
show (NonLocalPackages pkgs) =
"The following packages are not part of this project: " ++
intercalate ", " (map show pkgs)
instance Exception StackCleanException
| mrkkrp/stack | src/Stack/Clean.hs | bsd-3-clause | 2,851 | 0 | 18 | 736 | 600 | 330 | 270 | 61 | 4 |
{-
(c) Rahul Muttineni 2016-2017
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Loading interface files
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ETA.Iface.LoadIface (
-- Importing one thing
tcLookupImported_maybe, importDecl,
checkWiredInTyCon, ifCheckWiredInThing,
-- RnM/TcM functions
loadModuleInterface, loadModuleInterfaces,
loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
loadInterface, loadWiredInHomeIface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
initExternalPackageState,
ifaceStats, pprModIface, showIface
) where
import {-# SOURCE #-} ETA.Iface.TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo, tcIfaceAnnotations )
import ETA.Main.DynFlags
import ETA.Iface.IfaceSyn
import ETA.Iface.IfaceEnv
import ETA.Main.HscTypes
import ETA.BasicTypes.BasicTypes hiding (SuccessFlag(..))
import ETA.TypeCheck.TcRnMonad
import ETA.Main.Constants
import ETA.Prelude.PrelNames
import ETA.Prelude.PrelInfo
import ETA.Prelude.PrimOp ( allThePrimOps, primOpFixity, primOpOcc )
import ETA.BasicTypes.MkId ( seqId )
import ETA.Specialise.Rules
import ETA.Types.TyCon
import ETA.Main.Annotations
import ETA.Types.InstEnv
import ETA.Types.FamInstEnv
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameEnv
import ETA.BasicTypes.Avail
import ETA.BasicTypes.Module
import ETA.Utils.Maybes
import ETA.Main.ErrUtils
import ETA.Main.Finder
import ETA.Utils.UniqFM
import ETA.BasicTypes.SrcLoc
import ETA.Utils.Outputable
import qualified ETA.Utils.Outputable as Outputable
import ETA.Iface.BinIface
import ETA.Utils.Panic
import ETA.Utils.Util
import ETA.Utils.FastString
import ETA.Utils.Fingerprint
import ETA.Main.Hooks
import Control.Monad
import Data.IORef
import System.FilePath
#include "HsVersions.h"
{-
************************************************************************
* *
* tcImportDecl is the key function for "faulting in" *
* imported things
* *
************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
it in the EPS type envt. So it
1 loads GHC.Base.hi
2 gets the decl for GHC.Base.map
3 typechecks it via tcIfaceDecl
4 and adds it to the type env in the EPS
Note that DURING STEP 4, we may find that map's type mentions a type
constructor that also
Notice that for imported things we read the current version from the EPS
mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
-}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
= do { hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of
Just thing -> return (Succeeded thing)
Nothing -> tcImportDecl_maybe name }
tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Entry point for *source-code* uses of importDecl
tcImportDecl_maybe name
| Just thing <- wiredInNameTyThing_maybe name
= do { when (needWiredInHomeIface thing)
(initIfaceTcRn (loadWiredInHomeIface name))
-- See Note [Loading instances for wired-in things]
; return (Succeeded thing) }
| otherwise
= initIfaceTcRn (importDecl name)
importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing)
-- Get the TyThing for this Name from an interface file
-- It's not a wired-in thing -- the caller caught that
importDecl name
= ASSERT( not (isWiredInName name) )
do { traceIf nd_doc
-- Load the interface, which should populate the PTE
; mb_iface <- ASSERT2( isExternalName name, ppr name )
loadInterface nd_doc (nameModule name) ImportBySystem
; case mb_iface of {
Failed err_msg -> return (Failed err_msg) ;
Succeeded _ -> do
-- Now look it up again; this time we should find it
{ eps <- getEps
; case lookupTypeEnv (eps_PTE eps) name of
Just thing -> return (Succeeded thing)
Nothing -> return (Failed not_found_msg)
}}}
where
nd_doc = ptext (sLit "Need decl for") <+> ppr name
not_found_msg = hang (ptext (sLit "Can't find interface-file declaration for") <+>
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [ptext (sLit "Probable cause: bug in .hi-boot file, or inconsistent .hi file"),
ptext (sLit "Use -ddump-if-trace to get an idea of which file caused the error")])
{-
************************************************************************
* *
Checks for wired-in things
* *
************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to make sure that we have at least *read* the interface files
for any module with an instance decl or RULE that we might want.
* If the instance decl is an orphan, we have a whole separate mechanism
(loadOrphanModules)
* If the instance decl is not an orphan, then the act of looking at the
TyCon or Class will force in the defining module for the
TyCon/Class, and hence the instance decl
* BUT, if the TyCon is a wired-in TyCon, we don't really need its interface;
but we must make sure we read its interface in case it has instances or
rules. That is what LoadIface.loadWiredInHomeInterface does. It's called
from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing}
* HOWEVER, only do this for TyCons. There are no wired-in Classes. There
are some wired-in Ids, but we don't want to load their interfaces. For
example, Control.Exception.Base.recSelError is wired in, but that module
is compiled late in the base library, and we don't want to force it to
load before it's been compiled!
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
-}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
-- It might not be a wired-in tycon (see the calls in TcUnify),
-- in which case this is a no-op.
checkWiredInTyCon tc
| not (isWiredInName tc_name)
= return ()
| otherwise
= do { mod <- getModule
; ASSERT( isExternalName tc_name )
when (mod /= nameModule tc_name)
(initIfaceTcRn (loadWiredInHomeIface tc_name))
-- Don't look for (non-existent) Float.hi when
-- compiling Float.lhs, which mentions Float of course
-- A bit yukky to call initIfaceTcRn here
}
where
tc_name = tyConName tc
ifCheckWiredInThing :: TyThing -> IfL ()
-- Even though we are in an interface file, we want to make
-- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double)
-- Ditto want to ensure that RULES are loaded too
-- See Note [Loading instances for wired-in things]
ifCheckWiredInThing thing
= do { mod <- getIfModule
-- Check whether we are typechecking the interface for this
-- very module. E.g when compiling the base library in --make mode
-- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in
-- the HPT, so without the test we'll demand-load it into the PIT!
-- C.f. the same test in checkWiredInTyCon above
; let name = getName thing
; ASSERT2( isExternalName name, ppr name )
when (needWiredInHomeIface thing && mod /= nameModule name)
(loadWiredInHomeIface name) }
needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
{-
************************************************************************
* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
These three are called from TcM-land
* *
************************************************************************
-}
-- Note [Un-ambiguous multiple interfaces]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- When a user writes an import statement, this usually causes a *single*
-- interface file to be loaded. However, the game is different when
-- signatures are being imported. Suppose in packages p and q we have
-- signatures:
--
-- module A where
-- foo :: Int
--
-- module A where
-- bar :: Int
--
-- If both packages are exposed and I am importing A, I should see a
-- "unified" signature:
--
-- module A where
-- foo :: Int
-- bar :: Int
--
-- The way we achieve this is having the module lookup for A load and return
-- multiple interface files, which we will then process as if there were
-- "multiple" imports:
--
-- import "p" A
-- import "q" A
--
-- Doing so does not cause any ambiguity, because any overlapping identifiers
-- are guaranteed to have the same name if the backing implementations of the
-- two signatures are the same (a condition which is checked by 'Packages'.)
-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
-- See Note [Un-ambiguous multiple interfaces] for why the return type
-- is @[ModIface]@
loadSrcInterface :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM [ModIface]
loadSrcInterface doc mod want_boot maybe_pkg
= do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg
; case res of
Failed err -> failWithTc err
Succeeded ifaces -> return ifaces }
-- | Like 'loadSrcInterface', but returns a 'MaybeErr'. See also
-- Note [Un-ambiguous multiple interfaces]
loadSrcInterface_maybe :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM (MaybeErr MsgDoc [ModIface])
loadSrcInterface_maybe doc mod want_boot maybe_pkg
-- We must first find which Module this import refers to. This involves
-- calling the Finder, which as a side effect will search the filesystem
-- and create a ModLocation. If successful, loadIface will read the
-- interface; it will call the Finder again, but the ModLocation will be
-- cached from the first search.
= do { hsc_env <- getTopEnv
-- ToDo: findImportedModule should return a list of interfaces
; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg
; case res of
Found _ mod -> fmap (fmap (:[]))
. initIfaceTcRn
$ loadInterface doc mod (ImportByUser want_boot)
err -> return (Failed (cannotFindInterface (hsc_dflags hsc_env) mod err)) }
-- | Load interface directly for a fully qualified 'Module'. (This is a fairly
-- rare operation, but in particular it is used to load orphan modules
-- in order to pull their instances into the global package table and to
-- handle some operations in GHCi).
loadModuleInterface :: SDoc -> Module -> TcM ModIface
loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod)
-- | Load interfaces for a collection of modules.
loadModuleInterfaces :: SDoc -> [Module] -> TcM ()
loadModuleInterfaces doc mods
| null mods = return ()
| otherwise = initIfaceTcRn (mapM_ load mods)
where
load mod = loadSysInterface (doc <+> parens (ppr mod)) mod
-- | Loads the interface for a given Name.
-- Should only be called for an imported name;
-- otherwise loadSysInterface may not find the interface
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
= do { when debugIsOn $ -- Check pre-condition
do { this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) }
; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name) }
-- | Loads the interface for a given Module.
loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface
loadInterfaceForModule doc m
= do
-- Should not be called with this module
when debugIsOn $ do
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
{-
*********************************************************
* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
* *
*********************************************************
-}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things] in TcIface
loadWiredInHomeIface :: Name -> IfM lcl ()
loadWiredInHomeIface name
= ASSERT( isWiredInName name )
do _ <- loadSysInterface doc (nameModule name); return ()
where
doc = ptext (sLit "Need home interface for wired-in thing") <+> ppr name
------------------
-- | Loads a system interface and throws an exception if it fails
loadSysInterface :: SDoc -> Module -> IfM lcl ModIface
loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem
------------------
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
loadPluginInterface doc mod_name
= loadInterfaceWithException doc mod_name ImportByPlugin
------------------
-- | A wrapper for 'loadInterface' that throws an exception if it fails
loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= do { mb_iface <- loadInterface doc mod_name where_from
; dflags <- getDynFlags
; case mb_iface of
Failed err -> liftIO $ throwGhcExceptionIO (ProgramError (showSDoc dflags err))
Succeeded iface -> return iface }
------------------
loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
-- (to avoid repeated complaints)
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
= do { -- Read the state
(eps,hpt) <- getEpsAndHpt
; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from)
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
-- if an earlier import had a before we got to real imports. I think.
_ -> do {
-- READ THE MODULE IN
; read_result <- case (wantHiBootFile dflags eps mod from) of
Failed err -> return (Failed err)
Succeeded hi_boot_file -> findAndReadIface doc_str mod hi_boot_file
; case read_result of {
Failed err -> do
{ let fake_iface = emptyModIface mod
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
-- Not found, so add an empty iface to
-- the EPS map so that we don't look again
; return (Failed err) } ;
-- Found and parsed!
-- We used to have a sanity check here that looked for:
-- * System importing ..
-- * a home package module ..
-- * that we know nothing about (mb_dep == Nothing)!
--
-- But this is no longer valid because thNameToGhcName allows users to
-- cause the system to load arbitrary interfaces (by supplying an appropriate
-- Template Haskell original-name).
Succeeded (iface, file_path) ->
let
loc_doc = text file_path
in
initIfaceLcl mod loc_doc $ do
-- Load the new ModIface into the External Package State
-- Even home-package interfaces loaded by loadInterface
-- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
-- The main thing is to add the ModIface to the PIT, but
-- we also take the
-- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo
-- out of the ModIface and put them into the big EPS pools
-- NB: *first* we do loadDecl, so that the provenance of all the locally-defined
--- names is done correctly (notably, whether this is an .hi file or .hi-boot file).
-- If we do loadExport first the wrong info gets into the cache (unless we
-- explicitly tag each export which seems a bit of a bore)
; ignore_prags <- goptM Opt_IgnoreInterfacePragmas
; new_eps_decls <- loadDecls ignore_prags (mi_decls iface)
; new_eps_insts <- mapM tcIfaceInst (mi_insts iface)
; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface)
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
mi_rules = panic "No mi_rules in PIT",
mi_anns = panic "No mi_anns in PIT"
}
}
; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) then eps else
case from of -- See Note [Care with plugin imports]
ImportByPlugin -> eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls}
_ -> eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
; return (Succeeded final_iface)
}}}}
wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-> MaybeErr MsgDoc IsBootInterface
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
ImportByPlugin
-> Succeeded False
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
-- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
-- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == moduleUnitId mod
badSourceImport :: Module -> SDoc
badSourceImport mod
= hang (ptext (sLit "You cannot {-# SOURCE #-} import a module from another package"))
2 (ptext (sLit "but") <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (moduleUnitId mod)))
{-
Note [Care with plugin imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When dynamically loading a plugin (via loadPluginInterface) we
populate the same External Package State (EPS), even though plugin
modules are to link with the compiler itself, and not with the
compiled program. That's fine: mostly the EPS is just a cache for
the interace files on disk.
But it's NOT ok for the RULES or instance environment. We do not want
to fire a RULE from the plugin on the code we are compiling, otherwise
the code we are compiling will have a reference to a RHS of the rule
that exists only in the compiler! This actually happened to Daniel,
via a RULE arising from a specialisation of (^) in the plugin.
Solution: when loading plugins, do not extend the rule and instance
environments. We are only interested in the type environment, so that
we can check that the plugin exports a function with the type that the
compiler expects.
-}
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
-- its package info, so that we can build a Name for
-- each binder with the right package info in it
-- All subsequent lookups, including crucially lookups during typechecking
-- the declaration itself, will find the fully-glorious Name
--
-- We handle ATs specially. They are not main declarations, but also not
-- implicit things (in particular, adding them to `implicitTyThings' would mess
-- things up in the renaming/type checking of source programs).
-----------------------------------------------------
addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv
addDeclsToPTE pte things = extendNameEnvList pte things
loadDecls :: Bool
-> [(Fingerprint, IfaceDecl)]
-> IfL [(Name,TyThing)]
loadDecls ignore_prags ver_decls
= do { mod <- getIfModule
; thingss <- mapM (loadDecl ignore_prags mod) ver_decls
; return (concat thingss)
}
loadDecl :: Bool -- Don't load pragmas into the decl pool
-> Module
-> (Fingerprint, IfaceDecl)
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags mod (_version, decl)
= do { -- Populate the name cache with final versions of all
-- the names associated with the decl
main_name <- lookupOrig mod (ifName decl)
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
-- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
; thing <- forkM doc $ do { bumpDeclStats main_name
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
--
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
-- buildable from 'thing' without demanding any of the things
-- 'forkM'd by tcIfaceDecl.
--
-- In more detail: Consider the example
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
--
-- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
-- We do this by mapping the implicit_names to the associated
-- TyThings. By the invariant on ifaceDeclImplicitBndrs and
-- implicitTyThings, we can use getOccName on the implicit
-- TyThings to make this association: each Name's OccName should
-- be the OccName of exactly one implicitTyThing. So the key is
-- to define a "mini-env"
--
-- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ]
-- where the 'MkT' here is the *OccName* associated with MkT.
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
-- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
-- type-checking before the recursive knot has been tied,
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
--
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
--
-- The code below works because of the following invariant:
-- getOccName on a TyThing does not force the suspended type
-- checks in order to extract the name. For example, we don't
-- poke on the "T a" type of <selector x> on the way to
-- extracting <selector x>'s OccName. Of course, there is no
-- reason in principle why getting the OccName should force the
-- thunks, but this means we need to be careful in
-- implicitTyThings and its helper functions.
--
-- All a bit too finely-balanced for my liking.
-- This mini-env and lookup function mediates between the
--'Name's n and the map from 'OccName's to the implicit TyThings
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM (lookupOrig mod) (ifaceDeclImplicitBndrs decl)
-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names)
; return $ (main_name, thing) :
-- uses the invariant that implicit_names and
-- implicitTyThings are bijective
[(n, lookup n) | n <- implicit_names]
}
where
doc = ptext (sLit "Declaration for") <+> ppr (ifName decl)
bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used
bumpDeclStats name
= do { traceIf (text "Loading decl for" <+> ppr name)
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
{-
*********************************************************
* *
\subsection{Reading an interface file}
* *
*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the sought-for interface is in the current package (as determined
by -package-name flag) then it jolly well should already be in the HPT
because we process home-package modules in dependency order. (Except
in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
* We compile both packages.
* Now we edit package Q so that it somehow depends on P
* Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
-}
findAndReadIface :: SDoc -> Module
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
-- Just x <=> successfully found and parsed
-- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod hi_boot_file
= do traceIf (sep [hsep [ptext (sLit "Reading"),
if hi_boot_file
then ptext (sLit "[boot]")
else Outputable.empty,
ptext (sLit "interface for"),
ppr mod <> semi],
nest 4 (ptext (sLit "reason:") <+> doc_str)])
-- Check for GHC.Prim, and return its static interface
if mod == gHC_PRIM
then do
iface <- getHooked ghcPrimIfaceHook ghcPrimIface
return (Succeeded (iface,
"<built in interface for GHC.Prim>"))
else do
dflags <- getDynFlags
-- Look for the file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
Found loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
(ml_hi_file loc)
-- See Note [Home module load error]
if thisPackage dflags == moduleUnitId mod &&
not (isOneShot (ghcMode dflags))
then return (Failed (homeModError mod loc))
else do r <- read_file file_path
checkBuildDynamicToo r
return r
err -> do
traceIf (ptext (sLit "...not found"))
dflags <- getDynFlags
return (Failed (cannotFindInterface dflags
(moduleName mod) err))
where read_file file_path = do
traceIf (ptext (sLit "readIFace") <+> text file_path)
read_result <- readIface mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
Succeeded iface
| mi_module iface /= mod ->
return (Failed (wrongIfaceModErr iface mod file_path))
| otherwise ->
return (Succeeded (iface, file_path))
-- Don't forget to fill in the package name...
checkBuildDynamicToo (Succeeded (iface, filePath)) = do
dflags <- getDynFlags
whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do
let ref = canGenerateDynamicToo dflags
dynFilePath = addBootSuffix_maybe hi_boot_file
$ replaceExtension filePath (dynHiSuf dflags)
r <- read_file dynFilePath
case r of
Succeeded (dynIface, _)
| mi_mod_hash iface == mi_mod_hash dynIface ->
return ()
| otherwise ->
do traceIf (text "Dynamic hash doesn't match")
liftIO $ writeIORef ref False
Failed err ->
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-- @readIface@ tries just the one file.
readIface :: Module -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
-- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; case res of
Right iface
| wanted_mod == actual_mod -> return (Succeeded iface)
| otherwise -> return (Failed err)
where
actual_mod = mi_module iface
err = hiModuleNameMismatchWarn wanted_mod actual_mod
Left exn -> return (Failed (text (showException exn)))
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
= EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_PTE = emptyTypeEnv,
eps_inst_env = emptyInstEnv,
eps_fam_inst_env = emptyFamInstEnv,
eps_rule_base = mkRuleBase builtinRules,
-- Initialise the EPS rule pool with the built-in rules
eps_mod_fam_inst_env
= emptyModuleEnv,
eps_vect_info = noVectInfo,
eps_ann_env = emptyAnnEnv,
eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
mi_exports = ghcPrimExports,
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
}
where
fixities = (getOccName seqId, Fixity 0 InfixR) -- seq is infixr 0
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
{-
*********************************************************
* *
\subsection{Statistics}
* *
*********************************************************
-}
ifaceStats :: ExternalPackageState -> SDoc
ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
{-
************************************************************************
* *
Printing interfaces
* *
************************************************************************
-}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
-- skip the hi way check; we don't want to worry about profiled vs.
-- non-profiled interfaces, for example.
iface <- initTcRnIf 's' hsc_env () () $
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (pprModIface iface)
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
= vcat [ ptext (sLit "interface")
<+> ppr (mi_module iface) <+> pp_boot
<+> (if mi_orphan iface then ptext (sLit "[orphan module]") else Outputable.empty)
<+> (if mi_finsts iface then ptext (sLit "[family instance module]") else Outputable.empty)
<+> (if mi_hpc iface then ptext (sLit "[hpc]") else Outputable.empty)
<+> integer hiVersion
, nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface))
, nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface))
, nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface))
, nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface))
, nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface))
, nest 2 (text "sig of:" <+> ppr (mi_sig_of iface))
, nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface))
, nest 2 (ptext (sLit "where"))
, ptext (sLit "exports:")
, nest 2 (vcat (map pprExport (mi_exports iface)))
, pprDeps (mi_deps iface)
, vcat (map pprUsage (mi_usages iface))
, vcat (map pprIfaceAnnotation (mi_anns iface))
, pprFixities (mi_fixities iface)
, vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface]
, vcat (map ppr (mi_insts iface))
, vcat (map ppr (mi_fam_insts iface))
, vcat (map ppr (mi_rules iface))
, pprVectInfo (mi_vect_info iface)
, ppr (mi_warns iface)
, pprTrustInfo (mi_trust iface)
, pprTrustPkg (mi_trust_pkg iface)
]
where
pp_boot | mi_boot iface = ptext (sLit "[boot]")
| otherwise = Outputable.empty
{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
-}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ []) = Outputable.empty
pprExport (AvailTC n (n':ns))
| n==n' = ppr n <> pp_export ns
| otherwise = ppr n <> char '|' <> pp_export (n':ns)
where
pp_export [] = Outputable.empty
pp_export names = braces (hsep (map ppr names))
pprUsage :: Usage -> SDoc
pprUsage usage@UsagePackageModule{}
= pprUsageImport usage usg_mod
pprUsage usage@UsageHomeModule{}
= pprUsageImport usage usg_mod_name $$
nest 2 (
maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$
vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ]
)
pprUsage usage@UsageFile{}
= hsep [ptext (sLit "addDependentFile"),
doubleQuotes (text (usg_file_path usage))]
pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc
pprUsageImport usage usg_mod'
= hsep [ptext (sLit "import"), safe, ppr (usg_mod' usage),
ppr (usg_mod_hash usage)]
where
safe | usg_safe usage = ptext $ sLit "safe"
| otherwise = ptext $ sLit " -/ "
pprDeps :: Dependencies -> SDoc
pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs,
dep_finsts = finsts })
= vcat [ptext (sLit "module dependencies:") <+> fsep (map ppr_mod mods),
ptext (sLit "package dependencies:") <+> fsep (map ppr_pkg pkgs),
ptext (sLit "orphans:") <+> fsep (map ppr orphs),
ptext (sLit "family instance modules:") <+> fsep (map ppr finsts)
]
where
ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot
ppr_pkg (pkg,trust_req) = ppr pkg <>
(if trust_req then text "*" else Outputable.empty)
ppr_boot True = text "[boot]"
ppr_boot False = Outputable.empty
pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = ptext (sLit "fixities") <+> pprWithCommas pprFix fixes
where
pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
}) =
vcat
[ ptext (sLit "vectorised variables:") <+> hsep (map ppr vars)
, ptext (sLit "vectorised tycons:") <+> hsep (map ppr tycons)
, ptext (sLit "vectorised reused tycons:") <+> hsep (map ppr tyconsReuse)
, ptext (sLit "parallel variables:") <+> hsep (map ppr parallelVars)
, ptext (sLit "parallel tycons:") <+> hsep (map ppr parallelTyCons)
]
pprTrustInfo :: IfaceTrustInfo -> SDoc
pprTrustInfo trust = ptext (sLit "trusted:") <+> ppr trust
pprTrustPkg :: Bool -> SDoc
pprTrustPkg tpkg = ptext (sLit "require own pkg trusted:") <+> ppr tpkg
instance Outputable Warnings where
ppr = pprWarns
pprWarns :: Warnings -> SDoc
pprWarns NoWarnings = Outputable.empty
pprWarns (WarnAll txt) = ptext (sLit "Warn all") <+> ppr txt
pprWarns (WarnSome prs) = ptext (sLit "Warnings")
<+> vcat (map pprWarning prs)
where pprWarning (name, txt) = ppr name <+> ppr txt
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> ptext (sLit "annotated by") <+> ppr serialized
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
= vcat [ptext (sLit "Bad interface file:") <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: Module -> Module -> MsgDoc
hiModuleNameMismatchWarn requested_mod read_mod =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle alwaysQualify AllTheWay) $
-- we want the Modules below to be qualified with package names,
-- so reset the PrintUnqualified setting.
hsep [ ptext (sLit "Something is amiss; requested module ")
, ppr requested_mod
, ptext (sLit "differs from name found in the interface file")
, ppr read_mod
]
wrongIfaceModErr :: ModIface -> Module -> String -> SDoc
wrongIfaceModErr iface mod_name file_path
= sep [ptext (sLit "Interface file") <+> iface_file,
ptext (sLit "contains module") <+> quotes (ppr (mi_module iface)) <> comma,
ptext (sLit "but we were expecting module") <+> quotes (ppr mod_name),
sep [ptext (sLit "Probable cause: the source code which generated"),
nest 2 iface_file,
ptext (sLit "has an incompatible module name")
]
]
where iface_file = doubleQuotes (text file_path)
homeModError :: Module -> ModLocation -> SDoc
-- See Note [Home module load error]
homeModError mod location
= ptext (sLit "attempting to use module ") <> quotes (ppr mod)
<> (case ml_hs_file location of
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> ptext (sLit "which is not loaded")
| pparkkin/eta | compiler/ETA/Iface/LoadIface.hs | bsd-3-clause | 47,743 | 377 | 20 | 15,133 | 7,307 | 3,915 | 3,392 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Text.XML.Expat.Tree
import Control.Monad
import Data.Text (Text)
import qualified Data.Text as T
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as L
import Data.Maybe
main :: IO ()
main = do
bs <- L.readFile "ROADS.xml"
let Element _ _ chs = parseThrowing defaultParseOptions bs :: UNode Text
forM_ chs $ \ch -> do
case ch of
elt@(Element "shape" _ _) -> do
putStrLn $ T.unpack $ fromMaybe "" $ getAttribute elt "FULL_NAME"
_ -> return ()
| sol/hexpat | test/test2.hs | bsd-3-clause | 570 | 1 | 19 | 140 | 191 | 98 | 93 | 17 | 2 |
module Faktor.Prim where
-- $Id$
smallish :: [ Integer ]
smallish = primes 10
mediumish :: [ Integer ]
mediumish = primes 100
-- | list of prime numbers up to q*q
primes :: Integral b
=> b -> [ b ]
primes q = 2 : sieve [ 3, 5 .. q * q - 1 ] where
sieve (x : ys) =
x : if x > q
then ys
else sieve ( filter ( \ y -> 0 /= mod y x ) ys )
-- | by trial division
-- but: return Nothing if no factorisation was found
factor :: Integer -> Maybe [ (Integer, Int) ]
factor x = trial mediumish x
trial :: [ Integer ]
-> Integer
-> Maybe [ (Integer, Int) ]
trial [] x = Nothing
trial (p : ps) x =
if x == 1
then return []
else if p * p > x
then return [(x, 1)] -- is prime
else do
let (k, q) = divvy x p
rest <- trial ps q
return $ [ (p, k) | k > 0 ] ++ rest
divvy :: Integral b
=> b -> b -> (Int, b)
divvy x p =
let (q, r) = divMod x p
(k', q') = divvy q p
in if 0 < r then (0, x)
else (succ k', q')
| florianpilz/autotool | src/Faktor/Prim.hs | gpl-2.0 | 991 | 10 | 13 | 330 | 458 | 246 | 212 | 34 | 3 |
{-# LANGUAGE NoRebindableSyntax #-}
-- | This file contains the template haskell code for deriving SubHask class instances from Base instances.
-- All of the standard instances are created in "SubHask.Compatibility.Base".
-- This module is exported so that you can easily make instances for your own types without any extra work.
-- To do this, just put the line
--
-- > deriveAll
--
-- at the bottom of your file.
-- Any types in scope that do not already have SubHask instances will have them created automatically.
--
-- FIXME:
-- Most classes aren't implemented yet.
-- I don't want to go through the work until their definitions stabilize somewhat.
module SubHask.TemplateHaskell.Base
where
import qualified Prelude as Base
import qualified Control.Applicative as Base
import qualified Control.Monad as Base
import Language.Haskell.TH
import System.IO
import SubHask.Category
import SubHask.Algebra
import SubHask.Monad
import SubHask.Internal.Prelude
import Debug.Trace
--------------------------------------------------------------------------------
-- We need these instances to get anything done
type instance Logic Name = Bool
instance Eq_ Name where (==) = (Base.==)
type instance Logic Dec = Bool
instance Eq_ Dec where (==) = (Base.==)
type instance Logic Type = Bool
instance Eq_ Type where (==) = (Base.==)
--------------------------------------------------------------------------------
-- generic helper functions
-- | Derives instances for all data types in scope.
-- This is the only function you should need to use.
-- The other functions are exported only for debugging purposes if this function should fail.
deriveAll :: Q [Dec]
deriveAll = Base.liftM concat $ Base.mapM go
[ (''Base.Eq, mkPreludeEq)
, (''Base.Functor, mkPreludeFunctor)
, (''Base.Applicative,mkPreludeApplicative)
, (''Base.Monad,mkPreludeMonad)
]
where
go (n,f) = forAllInScope n f
-- | Constructs an instance using the given function for everything in scope.
forAllInScope :: Name -> (Cxt -> Q Type -> Q [Dec]) -> Q [Dec]
forAllInScope preludename f = do
info <- reify preludename
case info of
ClassI _ xs -> Base.liftM concat $ Base.sequence $ map mgo $ Base.filter fgo xs
where
mgo (InstanceD ctx (AppT _ t) _) = f ctx (Base.return t)
fgo (InstanceD _ (AppT _ t) _ ) = not elem '>' $ show t
-- | This is an internal helper function.
-- It prevents us from defining two instances for the same class/type pair.
runIfNotInstance :: Name -> Type -> Q [Dec] -> Q [Dec]
runIfNotInstance n t q = do
inst <- alreadyInstance n t
if inst
then trace ("skipping instance: "++show n++" / "++show t) $ Base.return []
else trace ("deriving instance: "++show n++" / "++show t) $ q
where
alreadyInstance :: Name -> Type -> Q Bool
alreadyInstance n t = do
info <- reify n
Base.return $ case info of
ClassI _ xs -> or $ map (genericTypeEq t.rmInstanceD) xs
-- FIXME:
-- This function was introduced to fix a name capture problem where `Eq a` and `Eq b` are not recognized as the same type.
-- The current solution is not correct, but works for some cases.
genericTypeEq (AppT s1 t1) (AppT s2 t2) = genericTypeEq s1 s2 && genericTypeEq t1 t2
genericTypeEq (ConT n1) (ConT n2) = n1==n2
genericTypeEq (VarT _) (VarT _) = true
genericTypeEq (SigT _ _) (SigT _ _) = true
genericTypeEq (TupleT n1) (TupleT n2) = n1==n2
genericTypeEq ArrowT ArrowT = true
genericTypeEq ListT ListT = true
genericTypeEq _ _ = false
rmInstanceD (InstanceD _ (AppT _ t) _) = t
--------------------------------------------------------------------------------
-- comparison hierarchy
-- | Create an "Eq" instance from a "Prelude.Eq" instance.
mkPreludeEq :: Cxt -> Q Type -> Q [Dec]
mkPreludeEq ctx qt = do
t <- qt
runIfNotInstance ''Eq_ t $ Base.return
[ TySynInstD
( mkName "Logic" )
( TySynEqn
[ t ]
( ConT $ mkName "Bool" )
)
, InstanceD
ctx
( AppT ( ConT $ mkName "Eq_" ) t )
[ FunD ( mkName "==" ) [ Clause [] (NormalB $ VarE $ mkName "Base.==") [] ]
]
]
--------------------------------------------------------------------------------
-- monad hierarchy
-- | Create a "Functor" instance from a "Prelude.Functor" instance.
mkPreludeFunctor :: Cxt -> Q Type -> Q [Dec]
mkPreludeFunctor ctx qt = do
t <- qt
runIfNotInstance ''Functor t $ Base.return
[ InstanceD
ctx
( AppT
( AppT
( ConT $ mkName "Functor" )
( ConT $ mkName "Hask" )
)
t
)
[ FunD ( mkName "fmap" ) [ Clause [] (NormalB $ VarE $ mkName "Base.fmap") [] ]
]
]
-- | Create an "Applicative" instance from a "Prelude.Applicative" instance.
mkPreludeApplicative :: Cxt -> Q Type -> Q [Dec]
mkPreludeApplicative cxt qt = do
t <- qt
runIfNotInstance ''Applicative t $ Base.return
[ InstanceD
cxt
( AppT
( AppT
( ConT $ mkName "Applicative" )
( ConT $ mkName "Hask" )
)
t
)
[ FunD ( mkName "pure" ) [ Clause [] (NormalB $ VarE $ mkName "Base.pure") [] ]
, FunD ( mkName "<*>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.<*>") [] ]
]
]
-- | Create a "Monad" instance from a "Prelude.Monad" instance.
--
-- FIXME:
-- Monad transformers still require their parameter monad to be an instance of "Prelude.Monad".
mkPreludeMonad :: Cxt -> Q Type -> Q [Dec]
mkPreludeMonad cxt qt = do
t <- qt
-- can't call
-- > runIfNotInstance ''Monad t $
-- due to lack of TH support for type families
trace ("deriving instance: Monad / "++show t) $ if cannotDeriveMonad t
then Base.return []
else Base.return
[ InstanceD
cxt
( AppT
( ConT $ mkName "Then" )
t
)
[ FunD ( mkName ">>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>>") [] ]
]
, InstanceD
-- ( ClassP ''Functor [ ConT ''Hask , t ] : cxt )
( AppT (AppT (ConT ''Functor) (ConT ''Hask)) t : cxt )
( AppT
( AppT
( ConT $ mkName "Monad" )
( ConT $ mkName "Hask" )
)
t
)
[ FunD ( mkName "return_" ) [ Clause [] (NormalB $ VarE $ mkName "Base.return") [] ]
, FunD ( mkName "join" ) [ Clause [] (NormalB $ VarE $ mkName "Base.join" ) [] ]
, FunD ( mkName ">>=" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>>=" ) [] ]
, FunD ( mkName ">=>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>=>" ) [] ]
, FunD ( mkName "=<<" ) [ Clause [] (NormalB $ VarE $ mkName "Base.=<<" ) [] ]
, FunD ( mkName "<=<" ) [ Clause [] (NormalB $ VarE $ mkName "Base.<=<" ) [] ]
]
]
where
-- | This helper function "filters out" monads for which we can't automatically derive an implementation.
-- This failure can be due to missing Functor instances or weird type errors.
cannotDeriveMonad t = elem (show $ getName t) badmonad
where
getName :: Type -> Name
getName t = case t of
(ConT t) -> t
ListT -> mkName "[]"
(SigT t _) -> getName t
(AppT (ConT t) _) -> t
(AppT (AppT (ConT t) _) _) -> t
(AppT (AppT (AppT (ConT t) _) _) _) -> t
(AppT (AppT (AppT (AppT (ConT t) _) _) _) _) -> t
(AppT (AppT (AppT (AppT (AppT (ConT t) _) _) _) _) _) -> t
(AppT (AppT (AppT (AppT (AppT (AppT (ConT t) _) _) _) _) _) _) -> t
t -> error ("cannotDeriveMonad error="++show t)
badmonad =
[ "Text.ParserCombinators.ReadBase.P"
, "Control.Monad.ST.Lazy.Imp.ST"
, "Data.Proxy.Proxy"
]
| abailly/subhask | src/SubHask/TemplateHaskell/Base.hs | bsd-3-clause | 8,576 | 0 | 26 | 2,910 | 2,233 | 1,149 | 1,084 | -1 | -1 |
{-# LANGUAGE BangPatterns, CPP, RecordWildCards, GADTs #-}
module CmmLayoutStack (
cmmLayoutStack, setInfoTableStackMap
) where
import GhcPrelude hiding ((<*>))
import StgCmmUtils ( callerSaveVolatileRegs ) -- XXX layering violation
import StgCmmForeign ( saveThreadState, loadThreadState ) -- XXX layering violation
import BasicTypes
import Cmm
import CmmInfo
import BlockId
import CLabel
import CmmUtils
import MkGraph
import ForeignCall
import CmmLive
import CmmProcPoint
import SMRep
import Hoopl.Block
import Hoopl.Collections
import Hoopl.Dataflow
import Hoopl.Graph
import Hoopl.Label
import UniqSupply
import StgCmmUtils ( newTemp )
import Maybes
import UniqFM
import Util
import DynFlags
import FastString
import Outputable hiding ( isEmpty )
import qualified Data.Set as Set
import Control.Monad.Fix
import Data.Array as Array
import Data.Bits
import Data.List (nub)
#include "HsVersions.h"
{- Note [Stack Layout]
The job of this pass is to
- replace references to abstract stack Areas with fixed offsets from Sp.
- replace the CmmHighStackMark constant used in the stack check with
the maximum stack usage of the proc.
- save any variables that are live across a call, and reload them as
necessary.
Before stack allocation, local variables remain live across native
calls (CmmCall{ cmm_cont = Just _ }), and after stack allocation local
variables are clobbered by native calls.
We want to do stack allocation so that as far as possible
- stack use is minimized, and
- unnecessary stack saves and loads are avoided.
The algorithm we use is a variant of linear-scan register allocation,
where the stack is our register file.
We proceed in two passes, see Note [Two pass approach] for why they are not easy
to merge into one.
Pass 1:
- First, we do a liveness analysis, which annotates every block with
the variables live on entry to the block.
- We traverse blocks in reverse postorder DFS; that is, we visit at
least one predecessor of a block before the block itself. The
stack layout flowing from the predecessor of the block will
determine the stack layout on entry to the block.
- We maintain a data structure
Map Label StackMap
which describes the contents of the stack and the stack pointer on
entry to each block that is a successor of a block that we have
visited.
- For each block we visit:
- Look up the StackMap for this block.
- If this block is a proc point (or a call continuation, if we aren't
splitting proc points), we need to reload all the live variables from the
stack - but this is done in Pass 2, which calculates more precise liveness
information (see description of Pass 2).
- Walk forwards through the instructions:
- At an assignment x = Sp[loc]
- Record the fact that Sp[loc] contains x, so that we won't
need to save x if it ever needs to be spilled.
- At an assignment x = E
- If x was previously on the stack, it isn't any more
- At the last node, if it is a call or a jump to a proc point
- Lay out the stack frame for the call (see setupStackFrame)
- emit instructions to save all the live variables
- Remember the StackMaps for all the successors
- emit an instruction to adjust Sp
- If the last node is a branch, then the current StackMap is the
StackMap for the successors.
- Manifest Sp: replace references to stack areas in this block
with real Sp offsets. We cannot do this until we have laid out
the stack area for the successors above.
In this phase we also eliminate redundant stores to the stack;
see elimStackStores.
- There is one important gotcha: sometimes we'll encounter a control
transfer to a block that we've already processed (a join point),
and in that case we might need to rearrange the stack to match
what the block is expecting. (exactly the same as in linear-scan
register allocation, except here we have the luxury of an infinite
supply of temporary variables).
- Finally, we update the magic CmmHighStackMark constant with the
stack usage of the function, and eliminate the whole stack check
if there was no stack use. (in fact this is done as part of the
main traversal, by feeding the high-water-mark output back in as
an input. I hate cyclic programming, but it's just too convenient
sometimes.)
There are plenty of tricky details: update frames, proc points, return
addresses, foreign calls, and some ad-hoc optimisations that are
convenient to do here and effective in common cases. Comments in the
code below explain these.
Pass 2:
- Calculate live registers, but taking into account that nothing is live at the
entry to a proc point.
- At each proc point and call continuation insert reloads of live registers from
the stack (they were saved by Pass 1).
Note [Two pass approach]
The main reason for Pass 2 is being able to insert only the reloads that are
needed and the fact that the two passes need different liveness information.
Let's consider an example:
.....
\ /
D <- proc point
/ \
E F
\ /
G <- proc point
|
X
Pass 1 needs liveness assuming that local variables are preserved across calls.
This is important because it needs to save any local registers to the stack
(e.g., if register a is used in block X, it must be saved before any native
call).
However, for Pass 2, where we want to reload registers from stack (in a proc
point), this is overly conservative and would lead us to generate reloads in D
for things used in X, even though we're going to generate reloads in G anyway
(since it's also a proc point).
So Pass 2 calculates liveness knowing that nothing is live at the entry to a
proc point. This means that in D we only need to reload things used in E or F.
This can be quite important, for an extreme example see testcase for #3294.
Merging the two passes is not trivial - Pass 2 is a backward rewrite and Pass 1
is a forward one. Furthermore, Pass 1 is creating code that uses local registers
(saving them before a call), which the liveness analysis for Pass 2 must see to
be correct.
-}
-- All stack locations are expressed as positive byte offsets from the
-- "base", which is defined to be the address above the return address
-- on the stack on entry to this CmmProc.
--
-- Lower addresses have higher StackLocs.
--
type StackLoc = ByteOff
{-
A StackMap describes the stack at any given point. At a continuation
it has a particular layout, like this:
| | <- base
|-------------|
| ret0 | <- base + 8
|-------------|
. upd frame . <- base + sm_ret_off
|-------------|
| |
. vars .
. (live/dead) .
| | <- base + sm_sp - sm_args
|-------------|
| ret1 |
. ret vals . <- base + sm_sp (<--- Sp points here)
|-------------|
Why do we include the final return address (ret0) in our stack map? I
have absolutely no idea, but it seems to be done that way consistently
in the rest of the code generator, so I played along here. --SDM
Note that we will be constructing an info table for the continuation
(ret1), which needs to describe the stack down to, but not including,
the update frame (or ret0, if there is no update frame).
-}
data StackMap = StackMap
{ sm_sp :: StackLoc
-- ^ the offset of Sp relative to the base on entry
-- to this block.
, sm_args :: ByteOff
-- ^ the number of bytes of arguments in the area for this block
-- Defn: the offset of young(L) relative to the base is given by
-- (sm_sp - sm_args) of the StackMap for block L.
, sm_ret_off :: ByteOff
-- ^ Number of words of stack that we do not describe with an info
-- table, because it contains an update frame.
, sm_regs :: UniqFM (LocalReg,StackLoc)
-- ^ regs on the stack
}
instance Outputable StackMap where
ppr StackMap{..} =
text "Sp = " <> int sm_sp $$
text "sm_args = " <> int sm_args $$
text "sm_ret_off = " <> int sm_ret_off $$
text "sm_regs = " <> pprUFM sm_regs ppr
cmmLayoutStack :: DynFlags -> ProcPointSet -> ByteOff -> CmmGraph
-> UniqSM (CmmGraph, LabelMap StackMap)
cmmLayoutStack dflags procpoints entry_args
graph@(CmmGraph { g_entry = entry })
= do
-- We need liveness info. Dead assignments are removed later
-- by the sinking pass.
let liveness = cmmLocalLiveness dflags graph
blocks = postorderDfs graph
(final_stackmaps, _final_high_sp, new_blocks) <-
mfix $ \ ~(rec_stackmaps, rec_high_sp, _new_blocks) ->
layout dflags procpoints liveness entry entry_args
rec_stackmaps rec_high_sp blocks
blocks_with_reloads <-
insertReloadsAsNeeded dflags procpoints final_stackmaps entry new_blocks
new_blocks' <- mapM (lowerSafeForeignCall dflags) blocks_with_reloads
return (ofBlockList entry new_blocks', final_stackmaps)
-- -----------------------------------------------------------------------------
-- Pass 1
-- -----------------------------------------------------------------------------
layout :: DynFlags
-> LabelSet -- proc points
-> LabelMap CmmLocalLive -- liveness
-> BlockId -- entry
-> ByteOff -- stack args on entry
-> LabelMap StackMap -- [final] stack maps
-> ByteOff -- [final] Sp high water mark
-> [CmmBlock] -- [in] blocks
-> UniqSM
( LabelMap StackMap -- [out] stack maps
, ByteOff -- [out] Sp high water mark
, [CmmBlock] -- [out] new blocks
)
layout dflags procpoints liveness entry entry_args final_stackmaps final_sp_high blocks
= go blocks init_stackmap entry_args []
where
(updfr, cont_info) = collectContInfo blocks
init_stackmap = mapSingleton entry StackMap{ sm_sp = entry_args
, sm_args = entry_args
, sm_ret_off = updfr
, sm_regs = emptyUFM
}
go [] acc_stackmaps acc_hwm acc_blocks
= return (acc_stackmaps, acc_hwm, acc_blocks)
go (b0 : bs) acc_stackmaps acc_hwm acc_blocks
= do
let (entry0@(CmmEntry entry_lbl tscope), middle0, last0) = blockSplit b0
let stack0@StackMap { sm_sp = sp0 }
= mapFindWithDefault
(pprPanic "no stack map for" (ppr entry_lbl))
entry_lbl acc_stackmaps
-- (a) Update the stack map to include the effects of
-- assignments in this block
let stack1 = foldBlockNodesF (procMiddle acc_stackmaps) middle0 stack0
-- (b) Look at the last node and if we are making a call or
-- jumping to a proc point, we must save the live
-- variables, adjust Sp, and construct the StackMaps for
-- each of the successor blocks. See handleLastNode for
-- details.
(middle1, sp_off, last1, fixup_blocks, out)
<- handleLastNode dflags procpoints liveness cont_info
acc_stackmaps stack1 tscope middle0 last0
-- (c) Manifest Sp: run over the nodes in the block and replace
-- CmmStackSlot with CmmLoad from Sp with a concrete offset.
--
-- our block:
-- middle0 -- the original middle nodes
-- middle1 -- live variable saves from handleLastNode
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last1 -- the last node
--
let middle_pre = blockToList $ foldl blockSnoc middle0 middle1
let final_blocks =
manifestSp dflags final_stackmaps stack0 sp0 final_sp_high
entry0 middle_pre sp_off last1 fixup_blocks
let acc_stackmaps' = mapUnion acc_stackmaps out
-- If this block jumps to the GC, then we do not take its
-- stack usage into account for the high-water mark.
-- Otherwise, if the only stack usage is in the stack-check
-- failure block itself, we will do a redundant stack
-- check. The stack has a buffer designed to accommodate
-- the largest amount of stack needed for calling the GC.
--
this_sp_hwm | isGcJump last0 = 0
| otherwise = sp0 - sp_off
hwm' = maximum (acc_hwm : this_sp_hwm : map sm_sp (mapElems out))
go bs acc_stackmaps' hwm' (final_blocks ++ acc_blocks)
-- -----------------------------------------------------------------------------
-- Not foolproof, but GCFun is the culprit we most want to catch
isGcJump :: CmmNode O C -> Bool
isGcJump (CmmCall { cml_target = CmmReg (CmmGlobal l) })
= l == GCFun || l == GCEnter1
isGcJump _something_else = False
-- -----------------------------------------------------------------------------
-- This doesn't seem right somehow. We need to find out whether this
-- proc will push some update frame material at some point, so that we
-- can avoid using that area of the stack for spilling. The
-- updfr_space field of the CmmProc *should* tell us, but it doesn't
-- (I think maybe it gets filled in later when we do proc-point
-- splitting).
--
-- So we'll just take the max of all the cml_ret_offs. This could be
-- unnecessarily pessimistic, but probably not in the code we
-- generate.
collectContInfo :: [CmmBlock] -> (ByteOff, LabelMap ByteOff)
collectContInfo blocks
= (maximum ret_offs, mapFromList (catMaybes mb_argss))
where
(mb_argss, ret_offs) = mapAndUnzip get_cont blocks
get_cont :: Block CmmNode x C -> (Maybe (Label, ByteOff), ByteOff)
get_cont b =
case lastNode b of
CmmCall { cml_cont = Just l, .. }
-> (Just (l, cml_ret_args), cml_ret_off)
CmmForeignCall { .. }
-> (Just (succ, ret_args), ret_off)
_other -> (Nothing, 0)
-- -----------------------------------------------------------------------------
-- Updating the StackMap from middle nodes
-- Look for loads from stack slots, and update the StackMap. This is
-- purely for optimisation reasons, so that we can avoid saving a
-- variable back to a different stack slot if it is already on the
-- stack.
--
-- This happens a lot: for example when function arguments are passed
-- on the stack and need to be immediately saved across a call, we
-- want to just leave them where they are on the stack.
--
procMiddle :: LabelMap StackMap -> CmmNode e x -> StackMap -> StackMap
procMiddle stackmaps node sm
= case node of
CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot area off) _)
-> sm { sm_regs = addToUFM (sm_regs sm) r (r,loc) }
where loc = getStackLoc area off stackmaps
CmmAssign (CmmLocal r) _other
-> sm { sm_regs = delFromUFM (sm_regs sm) r }
_other
-> sm
getStackLoc :: Area -> ByteOff -> LabelMap StackMap -> StackLoc
getStackLoc Old n _ = n
getStackLoc (Young l) n stackmaps =
case mapLookup l stackmaps of
Nothing -> pprPanic "getStackLoc" (ppr l)
Just sm -> sm_sp sm - sm_args sm + n
-- -----------------------------------------------------------------------------
-- Handling stack allocation for a last node
-- We take a single last node and turn it into:
--
-- C1 (some statements)
-- Sp = Sp + N
-- C2 (some more statements)
-- call f() -- the actual last node
--
-- plus possibly some more blocks (we may have to add some fixup code
-- between the last node and the continuation).
--
-- C1: is the code for saving the variables across this last node onto
-- the stack, if the continuation is a call or jumps to a proc point.
--
-- C2: if the last node is a safe foreign call, we have to inject some
-- extra code that goes *after* the Sp adjustment.
handleLastNode
:: DynFlags -> ProcPointSet -> LabelMap CmmLocalLive -> LabelMap ByteOff
-> LabelMap StackMap -> StackMap -> CmmTickScope
-> Block CmmNode O O
-> CmmNode O C
-> UniqSM
( [CmmNode O O] -- nodes to go *before* the Sp adjustment
, ByteOff -- amount to adjust Sp
, CmmNode O C -- new last node
, [CmmBlock] -- new blocks
, LabelMap StackMap -- stackmaps for the continuations
)
handleLastNode dflags procpoints liveness cont_info stackmaps
stack0@StackMap { sm_sp = sp0 } tscp middle last
= case last of
-- At each return / tail call,
-- adjust Sp to point to the last argument pushed, which
-- is cml_args, after popping any other junk from the stack.
CmmCall{ cml_cont = Nothing, .. } -> do
let sp_off = sp0 - cml_args
return ([], sp_off, last, [], mapEmpty)
-- At each CmmCall with a continuation:
CmmCall{ cml_cont = Just cont_lbl, .. } ->
return $ lastCall cont_lbl cml_args cml_ret_args cml_ret_off
CmmForeignCall{ succ = cont_lbl, .. } -> do
return $ lastCall cont_lbl (wORD_SIZE dflags) ret_args ret_off
-- one word of args: the return address
CmmBranch {} -> handleBranches
CmmCondBranch {} -> handleBranches
CmmSwitch {} -> handleBranches
where
-- Calls and ForeignCalls are handled the same way:
lastCall :: BlockId -> ByteOff -> ByteOff -> ByteOff
-> ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, LabelMap StackMap
)
lastCall lbl cml_args cml_ret_args cml_ret_off
= ( assignments
, spOffsetForCall sp0 cont_stack cml_args
, last
, [] -- no new blocks
, mapSingleton lbl cont_stack )
where
(assignments, cont_stack) = prepareStack lbl cml_ret_args cml_ret_off
prepareStack lbl cml_ret_args cml_ret_off
| Just cont_stack <- mapLookup lbl stackmaps
-- If we have already seen this continuation before, then
-- we just have to make the stack look the same:
= (fixupStack stack0 cont_stack, cont_stack)
-- Otherwise, we have to allocate the stack frame
| otherwise
= (save_assignments, new_cont_stack)
where
(new_cont_stack, save_assignments)
= setupStackFrame dflags lbl liveness cml_ret_off cml_ret_args stack0
-- For other last nodes (branches), if any of the targets is a
-- proc point, we have to set up the stack to match what the proc
-- point is expecting.
--
handleBranches :: UniqSM ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, LabelMap StackMap )
handleBranches
-- Note [diamond proc point]
| Just l <- futureContinuation middle
, (nub $ filter (`setMember` procpoints) $ successors last) == [l]
= do
let cont_args = mapFindWithDefault 0 l cont_info
(assigs, cont_stack) = prepareStack l cont_args (sm_ret_off stack0)
out = mapFromList [ (l', cont_stack)
| l' <- successors last ]
return ( assigs
, spOffsetForCall sp0 cont_stack (wORD_SIZE dflags)
, last
, []
, out)
| otherwise = do
pps <- mapM handleBranch (successors last)
let lbl_map :: LabelMap Label
lbl_map = mapFromList [ (l,tmp) | (l,tmp,_,_) <- pps ]
fix_lbl l = mapFindWithDefault l l lbl_map
return ( []
, 0
, mapSuccessors fix_lbl last
, concat [ blk | (_,_,_,blk) <- pps ]
, mapFromList [ (l, sm) | (l,_,sm,_) <- pps ] )
-- For each successor of this block
handleBranch :: BlockId -> UniqSM (BlockId, BlockId, StackMap, [CmmBlock])
handleBranch l
-- (a) if the successor already has a stackmap, we need to
-- shuffle the current stack to make it look the same.
-- We have to insert a new block to make this happen.
| Just stack2 <- mapLookup l stackmaps
= do
let assigs = fixupStack stack0 stack2
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs
return (l, tmp_lbl, stack2, block)
-- (b) if the successor is a proc point, save everything
-- on the stack.
| l `setMember` procpoints
= do
let cont_args = mapFindWithDefault 0 l cont_info
(stack2, assigs) =
setupStackFrame dflags l liveness (sm_ret_off stack0)
cont_args stack0
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs
return (l, tmp_lbl, stack2, block)
-- (c) otherwise, the current StackMap is the StackMap for
-- the continuation. But we must remember to remove any
-- variables from the StackMap that are *not* live at
-- the destination, because this StackMap might be used
-- by fixupStack if this is a join point.
| otherwise = return (l, l, stack1, [])
where live = mapFindWithDefault (panic "handleBranch") l liveness
stack1 = stack0 { sm_regs = filterUFM is_live (sm_regs stack0) }
is_live (r,_) = r `elemRegSet` live
makeFixupBlock :: DynFlags -> ByteOff -> Label -> StackMap
-> CmmTickScope -> [CmmNode O O]
-> UniqSM (Label, [CmmBlock])
makeFixupBlock dflags sp0 l stack tscope assigs
| null assigs && sp0 == sm_sp stack = return (l, [])
| otherwise = do
tmp_lbl <- newBlockId
let sp_off = sp0 - sm_sp stack
maybeAddUnwind block
| debugLevel dflags > 0
= block `blockSnoc` CmmUnwind [(Sp, Just unwind_val)]
| otherwise
= block
where unwind_val = cmmOffset dflags (CmmReg spReg) (sm_sp stack)
block = blockJoin (CmmEntry tmp_lbl tscope)
( maybeAddSpAdj dflags sp_off
$ maybeAddUnwind
$ blockFromList assigs )
(CmmBranch l)
return (tmp_lbl, [block])
-- Sp is currently pointing to current_sp,
-- we want it to point to
-- (sm_sp cont_stack - sm_args cont_stack + args)
-- so the difference is
-- sp0 - (sm_sp cont_stack - sm_args cont_stack + args)
spOffsetForCall :: ByteOff -> StackMap -> ByteOff -> ByteOff
spOffsetForCall current_sp cont_stack args
= current_sp - (sm_sp cont_stack - sm_args cont_stack + args)
-- | create a sequence of assignments to establish the new StackMap,
-- given the old StackMap.
fixupStack :: StackMap -> StackMap -> [CmmNode O O]
fixupStack old_stack new_stack = concatMap move new_locs
where
old_map = sm_regs old_stack
new_locs = stackSlotRegs new_stack
move (r,n)
| Just (_,m) <- lookupUFM old_map r, n == m = []
| otherwise = [CmmStore (CmmStackSlot Old n)
(CmmReg (CmmLocal r))]
setupStackFrame
:: DynFlags
-> BlockId -- label of continuation
-> LabelMap CmmLocalLive -- liveness
-> ByteOff -- updfr
-> ByteOff -- bytes of return values on stack
-> StackMap -- current StackMap
-> (StackMap, [CmmNode O O])
setupStackFrame dflags lbl liveness updfr_off ret_args stack0
= (cont_stack, assignments)
where
-- get the set of LocalRegs live in the continuation
live = mapFindWithDefault Set.empty lbl liveness
-- the stack from the base to updfr_off is off-limits.
-- our new stack frame contains:
-- * saved live variables
-- * the return address [young(C) + 8]
-- * the args for the call,
-- which are replaced by the return values at the return
-- point.
-- everything up to updfr_off is off-limits
-- stack1 contains updfr_off, plus everything we need to save
(stack1, assignments) = allocate dflags updfr_off live stack0
-- And the Sp at the continuation is:
-- sm_sp stack1 + ret_args
cont_stack = stack1{ sm_sp = sm_sp stack1 + ret_args
, sm_args = ret_args
, sm_ret_off = updfr_off
}
-- -----------------------------------------------------------------------------
-- Note [diamond proc point]
--
-- This special case looks for the pattern we get from a typical
-- tagged case expression:
--
-- Sp[young(L1)] = L1
-- if (R1 & 7) != 0 goto L1 else goto L2
-- L2:
-- call [R1] returns to L1
-- L1: live: {y}
-- x = R1
--
-- If we let the generic case handle this, we get
--
-- Sp[-16] = L1
-- if (R1 & 7) != 0 goto L1a else goto L2
-- L2:
-- Sp[-8] = y
-- Sp = Sp - 16
-- call [R1] returns to L1
-- L1a:
-- Sp[-8] = y
-- Sp = Sp - 16
-- goto L1
-- L1:
-- x = R1
--
-- The code for saving the live vars is duplicated in each branch, and
-- furthermore there is an extra jump in the fast path (assuming L1 is
-- a proc point, which it probably is if there is a heap check).
--
-- So to fix this we want to set up the stack frame before the
-- conditional jump. How do we know when to do this, and when it is
-- safe? The basic idea is, when we see the assignment
--
-- Sp[young(L)] = L
--
-- we know that
-- * we are definitely heading for L
-- * there can be no more reads from another stack area, because young(L)
-- overlaps with it.
--
-- We don't necessarily know that everything live at L is live now
-- (some might be assigned between here and the jump to L). So we
-- simplify and only do the optimisation when we see
--
-- (1) a block containing an assignment of a return address L
-- (2) ending in a branch where one (and only) continuation goes to L,
-- and no other continuations go to proc points.
--
-- then we allocate the stack frame for L at the end of the block,
-- before the branch.
--
-- We could generalise (2), but that would make it a bit more
-- complicated to handle, and this currently catches the common case.
futureContinuation :: Block CmmNode O O -> Maybe BlockId
futureContinuation middle = foldBlockNodesB f middle Nothing
where f :: CmmNode a b -> Maybe BlockId -> Maybe BlockId
f (CmmStore (CmmStackSlot (Young l) _) (CmmLit (CmmBlock _))) _
= Just l
f _ r = r
-- -----------------------------------------------------------------------------
-- Saving live registers
-- | Given a set of live registers and a StackMap, save all the registers
-- on the stack and return the new StackMap and the assignments to do
-- the saving.
--
allocate :: DynFlags -> ByteOff -> LocalRegSet -> StackMap
-> (StackMap, [CmmNode O O])
allocate dflags ret_off live stackmap@StackMap{ sm_sp = sp0
, sm_regs = regs0 }
=
-- we only have to save regs that are not already in a slot
let to_save = filter (not . (`elemUFM` regs0)) (Set.elems live)
regs1 = filterUFM (\(r,_) -> elemRegSet r live) regs0
in
-- make a map of the stack
let stack = reverse $ Array.elems $
accumArray (\_ x -> x) Empty (1, toWords dflags (max sp0 ret_off)) $
ret_words ++ live_words
where ret_words =
[ (x, Occupied)
| x <- [ 1 .. toWords dflags ret_off] ]
live_words =
[ (toWords dflags x, Occupied)
| (r,off) <- nonDetEltsUFM regs1,
-- See Note [Unique Determinism and code generation]
let w = localRegBytes dflags r,
x <- [ off, off - wORD_SIZE dflags .. off - w + 1] ]
in
-- Pass over the stack: find slots to save all the new live variables,
-- choosing the oldest slots first (hence a foldr).
let
save slot ([], stack, n, assigs, regs) -- no more regs to save
= ([], slot:stack, plusW dflags n 1, assigs, regs)
save slot (to_save, stack, n, assigs, regs)
= case slot of
Occupied -> (to_save, Occupied:stack, plusW dflags n 1, assigs, regs)
Empty
| Just (stack', r, to_save') <-
select_save to_save (slot:stack)
-> let assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
n' = plusW dflags n 1
in
(to_save', stack', n', assig : assigs, (r,(r,n')):regs)
| otherwise
-> (to_save, slot:stack, plusW dflags n 1, assigs, regs)
-- we should do better here: right now we'll fit the smallest first,
-- but it would make more sense to fit the biggest first.
select_save :: [LocalReg] -> [StackSlot]
-> Maybe ([StackSlot], LocalReg, [LocalReg])
select_save regs stack = go regs []
where go [] _no_fit = Nothing
go (r:rs) no_fit
| Just rest <- dropEmpty words stack
= Just (replicate words Occupied ++ rest, r, rs++no_fit)
| otherwise
= go rs (r:no_fit)
where words = localRegWords dflags r
-- fill in empty slots as much as possible
(still_to_save, save_stack, n, save_assigs, save_regs)
= foldr save (to_save, [], 0, [], []) stack
-- push any remaining live vars on the stack
(push_sp, push_assigs, push_regs)
= foldr push (n, [], []) still_to_save
where
push r (n, assigs, regs)
= (n', assig : assigs, (r,(r,n')) : regs)
where
n' = n + localRegBytes dflags r
assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
trim_sp
| not (null push_regs) = push_sp
| otherwise
= plusW dflags n (- length (takeWhile isEmpty save_stack))
final_regs = regs1 `addListToUFM` push_regs
`addListToUFM` save_regs
in
-- XXX should be an assert
if ( n /= max sp0 ret_off ) then pprPanic "allocate" (ppr n <+> ppr sp0 <+> ppr ret_off) else
if (trim_sp .&. (wORD_SIZE dflags - 1)) /= 0 then pprPanic "allocate2" (ppr trim_sp <+> ppr final_regs <+> ppr push_sp) else
( stackmap { sm_regs = final_regs , sm_sp = trim_sp }
, push_assigs ++ save_assigs )
-- -----------------------------------------------------------------------------
-- Manifesting Sp
-- | Manifest Sp: turn all the CmmStackSlots into CmmLoads from Sp. The
-- block looks like this:
--
-- middle_pre -- the middle nodes
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last -- the last node
--
-- And we have some extra blocks too (that don't contain Sp adjustments)
--
-- The adjustment for middle_pre will be different from that for
-- middle_post, because the Sp adjustment intervenes.
--
manifestSp
:: DynFlags
-> LabelMap StackMap -- StackMaps for other blocks
-> StackMap -- StackMap for this block
-> ByteOff -- Sp on entry to the block
-> ByteOff -- SpHigh
-> CmmNode C O -- first node
-> [CmmNode O O] -- middle
-> ByteOff -- sp_off
-> CmmNode O C -- last node
-> [CmmBlock] -- new blocks
-> [CmmBlock] -- final blocks with Sp manifest
manifestSp dflags stackmaps stack0 sp0 sp_high
first middle_pre sp_off last fixup_blocks
= final_block : fixup_blocks'
where
area_off = getAreaOff stackmaps
adj_pre_sp, adj_post_sp :: CmmNode e x -> CmmNode e x
adj_pre_sp = mapExpDeep (areaToSp dflags sp0 sp_high area_off)
adj_post_sp = mapExpDeep (areaToSp dflags (sp0 - sp_off) sp_high area_off)
-- Add unwind pseudo-instruction at the beginning of each block to
-- document Sp level for debugging
add_initial_unwind block
| debugLevel dflags > 0
= CmmUnwind [(Sp, Just sp_unwind)] `blockCons` block
| otherwise
= block
where sp_unwind = CmmRegOff spReg (sp0 - wORD_SIZE dflags)
-- Add unwind pseudo-instruction right before the Sp adjustment
-- if there is one.
add_adj_unwind block
| debugLevel dflags > 0
, sp_off /= 0
= block `blockSnoc` CmmUnwind [(Sp, Just sp_unwind)]
| otherwise
= block
where sp_unwind = CmmRegOff spReg (sp0 - wORD_SIZE dflags - sp_off)
final_middle = maybeAddSpAdj dflags sp_off
. add_adj_unwind
. add_initial_unwind
. blockFromList
. map adj_pre_sp
. elimStackStores stack0 stackmaps area_off
$ middle_pre
final_last = optStackCheck (adj_post_sp last)
final_block = blockJoin first final_middle final_last
fixup_blocks' = map (mapBlock3' (id, adj_post_sp, id)) fixup_blocks
getAreaOff :: LabelMap StackMap -> (Area -> StackLoc)
getAreaOff _ Old = 0
getAreaOff stackmaps (Young l) =
case mapLookup l stackmaps of
Just sm -> sm_sp sm - sm_args sm
Nothing -> pprPanic "getAreaOff" (ppr l)
maybeAddSpAdj :: DynFlags -> ByteOff -> Block CmmNode O O -> Block CmmNode O O
maybeAddSpAdj _ 0 block = block
maybeAddSpAdj dflags sp_off block = block `blockSnoc` adj
where
adj = CmmAssign spReg (cmmOffset dflags (CmmReg spReg) sp_off)
{- Note [SP old/young offsets]
Sp(L) is the Sp offset on entry to block L relative to the base of the
OLD area.
SpArgs(L) is the size of the young area for L, i.e. the number of
arguments.
- in block L, each reference to [old + N] turns into
[Sp + Sp(L) - N]
- in block L, each reference to [young(L') + N] turns into
[Sp + Sp(L) - Sp(L') + SpArgs(L') - N]
- be careful with the last node of each block: Sp has already been adjusted
to be Sp + Sp(L) - Sp(L')
-}
areaToSp :: DynFlags -> ByteOff -> ByteOff -> (Area -> StackLoc) -> CmmExpr -> CmmExpr
areaToSp dflags sp_old _sp_hwm area_off (CmmStackSlot area n)
= cmmOffset dflags (CmmReg spReg) (sp_old - area_off area - n)
-- Replace (CmmStackSlot area n) with an offset from Sp
areaToSp dflags _ sp_hwm _ (CmmLit CmmHighStackMark)
= mkIntExpr dflags sp_hwm
-- Replace CmmHighStackMark with the number of bytes of stack used,
-- the sp_hwm. See Note [Stack usage] in StgCmmHeap
areaToSp dflags _ _ _ (CmmMachOp (MO_U_Lt _) args)
| falseStackCheck args
= zeroExpr dflags
areaToSp dflags _ _ _ (CmmMachOp (MO_U_Ge _) args)
| falseStackCheck args
= mkIntExpr dflags 1
-- Replace a stack-overflow test that cannot fail with a no-op
-- See Note [Always false stack check]
areaToSp _ _ _ _ other = other
-- | Determine whether a stack check cannot fail.
falseStackCheck :: [CmmExpr] -> Bool
falseStackCheck [ CmmMachOp (MO_Sub _)
[ CmmRegOff (CmmGlobal Sp) x_off
, CmmLit (CmmInt y_lit _)]
, CmmReg (CmmGlobal SpLim)]
= fromIntegral x_off >= y_lit
falseStackCheck _ = False
-- Note [Always false stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We can optimise stack checks of the form
--
-- if ((Sp + x) - y < SpLim) then .. else ..
--
-- where are non-negative integer byte offsets. Since we know that
-- SpLim <= Sp (remember the stack grows downwards), this test must
-- yield False if (x >= y), so we can rewrite the comparison to False.
-- A subsequent sinking pass will later drop the dead code.
-- Optimising this away depends on knowing that SpLim <= Sp, so it is
-- really the job of the stack layout algorithm, hence we do it now.
--
-- The control flow optimiser may negate a conditional to increase
-- the likelihood of a fallthrough if the branch is not taken. But
-- not every conditional is inverted as the control flow optimiser
-- places some requirements on the predecessors of both branch targets.
-- So we better look for the inverted comparison too.
optStackCheck :: CmmNode O C -> CmmNode O C
optStackCheck n = -- Note [Always false stack check]
case n of
CmmCondBranch (CmmLit (CmmInt 0 _)) _true false _ -> CmmBranch false
CmmCondBranch (CmmLit (CmmInt _ _)) true _false _ -> CmmBranch true
other -> other
-- -----------------------------------------------------------------------------
-- | Eliminate stores of the form
--
-- Sp[area+n] = r
--
-- when we know that r is already in the same slot as Sp[area+n]. We
-- could do this in a later optimisation pass, but that would involve
-- a separate analysis and we already have the information to hand
-- here. It helps clean up some extra stack stores in common cases.
--
-- Note that we may have to modify the StackMap as we walk through the
-- code using procMiddle, since an assignment to a variable in the
-- StackMap will invalidate its mapping there.
--
elimStackStores :: StackMap
-> LabelMap StackMap
-> (Area -> ByteOff)
-> [CmmNode O O]
-> [CmmNode O O]
elimStackStores stackmap stackmaps area_off nodes
= go stackmap nodes
where
go _stackmap [] = []
go stackmap (n:ns)
= case n of
CmmStore (CmmStackSlot area m) (CmmReg (CmmLocal r))
| Just (_,off) <- lookupUFM (sm_regs stackmap) r
, area_off area + m == off
-> go stackmap ns
_otherwise
-> n : go (procMiddle stackmaps n stackmap) ns
-- -----------------------------------------------------------------------------
-- Update info tables to include stack liveness
setInfoTableStackMap :: DynFlags -> LabelMap StackMap -> CmmDecl -> CmmDecl
setInfoTableStackMap dflags stackmaps (CmmProc top_info@TopInfo{..} l v g)
= CmmProc top_info{ info_tbls = mapMapWithKey fix_info info_tbls } l v g
where
fix_info lbl info_tbl@CmmInfoTable{ cit_rep = StackRep _ } =
info_tbl { cit_rep = StackRep (get_liveness lbl) }
fix_info _ other = other
get_liveness :: BlockId -> Liveness
get_liveness lbl
= case mapLookup lbl stackmaps of
Nothing -> pprPanic "setInfoTableStackMap" (ppr lbl <+> ppr info_tbls)
Just sm -> stackMapToLiveness dflags sm
setInfoTableStackMap _ _ d = d
stackMapToLiveness :: DynFlags -> StackMap -> Liveness
stackMapToLiveness dflags StackMap{..} =
reverse $ Array.elems $
accumArray (\_ x -> x) True (toWords dflags sm_ret_off + 1,
toWords dflags (sm_sp - sm_args)) live_words
where
live_words = [ (toWords dflags off, False)
| (r,off) <- nonDetEltsUFM sm_regs
, isGcPtrType (localRegType r) ]
-- See Note [Unique Determinism and code generation]
-- -----------------------------------------------------------------------------
-- Pass 2
-- -----------------------------------------------------------------------------
insertReloadsAsNeeded
:: DynFlags
-> ProcPointSet
-> LabelMap StackMap
-> BlockId
-> [CmmBlock]
-> UniqSM [CmmBlock]
insertReloadsAsNeeded dflags procpoints final_stackmaps entry blocks = do
toBlockList . fst <$>
rewriteCmmBwd liveLattice rewriteCC (ofBlockList entry blocks) mapEmpty
where
rewriteCC :: RewriteFun CmmLocalLive
rewriteCC (BlockCC e_node middle0 x_node) fact_base0 = do
let entry_label = entryLabel e_node
stackmap = case mapLookup entry_label final_stackmaps of
Just sm -> sm
Nothing -> panic "insertReloadsAsNeeded: rewriteCC: stackmap"
-- Merge the liveness from successor blocks and analyse the last
-- node.
joined = gen_kill dflags x_node $!
joinOutFacts liveLattice x_node fact_base0
-- What is live at the start of middle0.
live_at_middle0 = foldNodesBwdOO (gen_kill dflags) middle0 joined
-- If this is a procpoint we need to add the reloads, but only if
-- they're actually live. Furthermore, nothing is live at the entry
-- to a proc point.
(middle1, live_with_reloads)
| entry_label `setMember` procpoints
= let reloads = insertReloads dflags stackmap live_at_middle0
in (foldr blockCons middle0 reloads, emptyRegSet)
| otherwise
= (middle0, live_at_middle0)
-- Final liveness for this block.
!fact_base2 = mapSingleton entry_label live_with_reloads
return (BlockCC e_node middle1 x_node, fact_base2)
insertReloads :: DynFlags -> StackMap -> CmmLocalLive -> [CmmNode O O]
insertReloads dflags stackmap live =
[ CmmAssign (CmmLocal reg)
-- This cmmOffset basically corresponds to manifesting
-- @CmmStackSlot Old sp_off@, see Note [SP old/young offsets]
(CmmLoad (cmmOffset dflags (CmmReg spReg) (sp_off - reg_off))
(localRegType reg))
| (reg, reg_off) <- stackSlotRegs stackmap
, reg `elemRegSet` live
]
where
sp_off = sm_sp stackmap
-- -----------------------------------------------------------------------------
-- Lowering safe foreign calls
{-
Note [Lower safe foreign calls]
We start with
Sp[young(L1)] = L1
,-----------------------
| r1 = foo(x,y,z) returns to L1
'-----------------------
L1:
R1 = r1 -- copyIn, inserted by mkSafeCall
...
the stack layout algorithm will arrange to save and reload everything
live across the call. Our job now is to expand the call so we get
Sp[young(L1)] = L1
,-----------------------
| SAVE_THREAD_STATE()
| token = suspendThread(BaseReg, interruptible)
| r = foo(x,y,z)
| BaseReg = resumeThread(token)
| LOAD_THREAD_STATE()
| R1 = r -- copyOut
| jump Sp[0]
'-----------------------
L1:
r = R1 -- copyIn, inserted by mkSafeCall
...
Note the copyOut, which saves the results in the places that L1 is
expecting them (see Note [safe foreign call convention]). Note also
that safe foreign call is replace by an unsafe one in the Cmm graph.
-}
lowerSafeForeignCall :: DynFlags -> CmmBlock -> UniqSM CmmBlock
lowerSafeForeignCall dflags block
| (entry@(CmmEntry _ tscp), middle, CmmForeignCall { .. }) <- blockSplit block
= do
-- Both 'id' and 'new_base' are KindNonPtr because they're
-- RTS-only objects and are not subject to garbage collection
id <- newTemp (bWord dflags)
new_base <- newTemp (cmmRegType dflags (CmmGlobal BaseReg))
let (caller_save, caller_load) = callerSaveVolatileRegs dflags
save_state_code <- saveThreadState dflags
load_state_code <- loadThreadState dflags
let suspend = save_state_code <*>
caller_save <*>
mkMiddle (callSuspendThread dflags id intrbl)
midCall = mkUnsafeCall tgt res args
resume = mkMiddle (callResumeThread new_base id) <*>
-- Assign the result to BaseReg: we
-- might now have a different Capability!
mkAssign (CmmGlobal BaseReg) (CmmReg (CmmLocal new_base)) <*>
caller_load <*>
load_state_code
(_, regs, copyout) =
copyOutOflow dflags NativeReturn Jump (Young succ)
(map (CmmReg . CmmLocal) res)
ret_off []
-- NB. after resumeThread returns, the top-of-stack probably contains
-- the stack frame for succ, but it might not: if the current thread
-- received an exception during the call, then the stack might be
-- different. Hence we continue by jumping to the top stack frame,
-- not by jumping to succ.
jump = CmmCall { cml_target = entryCode dflags $
CmmLoad (CmmReg spReg) (bWord dflags)
, cml_cont = Just succ
, cml_args_regs = regs
, cml_args = widthInBytes (wordWidth dflags)
, cml_ret_args = ret_args
, cml_ret_off = ret_off }
graph' <- lgraphOfAGraph ( suspend <*>
midCall <*>
resume <*>
copyout <*>
mkLast jump, tscp)
case toBlockList graph' of
[one] -> let (_, middle', last) = blockSplit one
in return (blockJoin entry (middle `blockAppend` middle') last)
_ -> panic "lowerSafeForeignCall0"
-- Block doesn't end in a safe foreign call:
| otherwise = return block
foreignLbl :: FastString -> CmmExpr
foreignLbl name = CmmLit (CmmLabel (mkForeignLabel name Nothing ForeignLabelInExternalPackage IsFunction))
callSuspendThread :: DynFlags -> LocalReg -> Bool -> CmmNode O O
callSuspendThread dflags id intrbl =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "suspendThread"))
(ForeignConvention CCallConv [AddrHint, NoHint] [AddrHint] CmmMayReturn))
[id] [CmmReg (CmmGlobal BaseReg), mkIntExpr dflags (fromEnum intrbl)]
callResumeThread :: LocalReg -> LocalReg -> CmmNode O O
callResumeThread new_base id =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "resumeThread"))
(ForeignConvention CCallConv [AddrHint] [AddrHint] CmmMayReturn))
[new_base] [CmmReg (CmmLocal id)]
-- -----------------------------------------------------------------------------
plusW :: DynFlags -> ByteOff -> WordOff -> ByteOff
plusW dflags b w = b + w * wORD_SIZE dflags
data StackSlot = Occupied | Empty
-- Occupied: a return address or part of an update frame
instance Outputable StackSlot where
ppr Occupied = text "XXX"
ppr Empty = text "---"
dropEmpty :: WordOff -> [StackSlot] -> Maybe [StackSlot]
dropEmpty 0 ss = Just ss
dropEmpty n (Empty : ss) = dropEmpty (n-1) ss
dropEmpty _ _ = Nothing
isEmpty :: StackSlot -> Bool
isEmpty Empty = True
isEmpty _ = False
localRegBytes :: DynFlags -> LocalReg -> ByteOff
localRegBytes dflags r
= roundUpToWords dflags (widthInBytes (typeWidth (localRegType r)))
localRegWords :: DynFlags -> LocalReg -> WordOff
localRegWords dflags = toWords dflags . localRegBytes dflags
toWords :: DynFlags -> ByteOff -> WordOff
toWords dflags x = x `quot` wORD_SIZE dflags
stackSlotRegs :: StackMap -> [(LocalReg, StackLoc)]
stackSlotRegs sm = nonDetEltsUFM (sm_regs sm)
-- See Note [Unique Determinism and code generation]
| ezyang/ghc | compiler/cmm/CmmLayoutStack.hs | bsd-3-clause | 47,277 | 1 | 25 | 13,763 | 7,858 | 4,190 | 3,668 | 562 | 6 |
-- |
-- Module : Network.TLS.X509
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- X509 helpers
--
module Network.TLS.X509
( CertificateChain(..)
, Certificate(..)
, SignedCertificate
, getCertificate
, isNullCertificateChain
, getCertificateChainLeaf
, CertificateRejectReason(..)
, CertificateUsage(..)
, CertificateStore
, ValidationCache
, exceptionValidationCache
, validateDefault
, FailedReason
, ServiceID
, wrapCertificateChecks
) where
import Data.X509
import Data.X509.Validation
import Data.X509.CertificateStore
isNullCertificateChain :: CertificateChain -> Bool
isNullCertificateChain (CertificateChain l) = null l
getCertificateChainLeaf :: CertificateChain -> SignedExact Certificate
getCertificateChainLeaf (CertificateChain []) = error "empty certificate chain"
getCertificateChainLeaf (CertificateChain (x:_)) = x
-- | Certificate and Chain rejection reason
data CertificateRejectReason =
CertificateRejectExpired
| CertificateRejectRevoked
| CertificateRejectUnknownCA
| CertificateRejectOther String
deriving (Show,Eq)
-- | Certificate Usage callback possible returns values.
data CertificateUsage =
CertificateUsageAccept -- ^ usage of certificate accepted
| CertificateUsageReject CertificateRejectReason -- ^ usage of certificate rejected
deriving (Show,Eq)
wrapCertificateChecks :: [FailedReason] -> CertificateUsage
wrapCertificateChecks [] = CertificateUsageAccept
wrapCertificateChecks l
| Expired `elem` l = CertificateUsageReject $ CertificateRejectExpired
| InFuture `elem` l = CertificateUsageReject $ CertificateRejectExpired
| UnknownCA `elem` l = CertificateUsageReject $ CertificateRejectUnknownCA
| otherwise = CertificateUsageReject $ CertificateRejectOther (show l)
| beni55/hs-tls | core/Network/TLS/X509.hs | bsd-3-clause | 1,995 | 0 | 9 | 396 | 336 | 195 | 141 | 41 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Ord
-- Copyright : (c) The University of Glasgow 2005
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- Orderings
--
-----------------------------------------------------------------------------
module Data.Ord (
Ord(..),
Ordering(..),
Down(..),
comparing,
) where
import GHC.Base
import GHC.Show
import GHC.Read
-- |
-- > comparing p x y = compare (p x) (p y)
--
-- Useful combinator for use in conjunction with the @xxxBy@ family
-- of functions from "Data.List", for example:
--
-- > ... sortBy (comparing fst) ...
comparing :: (Ord a) => (b -> a) -> b -> b -> Ordering
comparing p x y = compare (p x) (p y)
-- | The 'Down' type allows you to reverse sort order conveniently. A value of type
-- @'Down' a@ contains a value of type @a@ (represented as @'Down' a@).
-- If @a@ has an @'Ord'@ instance associated with it then comparing two
-- values thus wrapped will give you the opposite of their normal sort order.
-- This is particularly useful when sorting in generalised list comprehensions,
-- as in: @then sortWith by 'Down' x@
--
-- Provides 'Show' and 'Read' instances (/since: 4.7.0.0/).
--
-- /Since: 4.6.0.0/
newtype Down a = Down a deriving (Eq, Show, Read)
instance Ord a => Ord (Down a) where
compare (Down x) (Down y) = y `compare` x
| frantisekfarka/ghc-dsi | libraries/base/Data/Ord.hs | bsd-3-clause | 1,568 | 0 | 8 | 293 | 215 | 135 | 80 | 15 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Passive Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | ccgreen13/zap-extensions | src/org/zaproxy/zap/extension/pscanrules/resources/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 980 | 80 | 66 | 161 | 417 | 211 | 206 | -1 | -1 |
{-# LANGUAGE RankNTypes, TypeInType #-}
module T11640 where
import Data.Kind
data HEq :: forall k1. k1 -> forall k2. k2 -> Type where
| ezyang/ghc | testsuite/tests/polykinds/T11640.hs | bsd-3-clause | 137 | 0 | 8 | 26 | 35 | 22 | 13 | -1 | -1 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[SimplMonad]{The simplifier Monad}
-}
{-# LANGUAGE CPP #-}
module SimplEnv (
InId, InBind, InExpr, InAlt, InArg, InType, InBndr, InVar,
OutId, OutTyVar, OutBind, OutExpr, OutAlt, OutArg, OutType, OutBndr, OutVar,
InCoercion, OutCoercion,
-- The simplifier mode
setMode, getMode, updMode,
-- Environments
SimplEnv(..), StaticEnv, pprSimplEnv, -- Temp not abstract
mkSimplEnv, extendIdSubst, SimplEnv.extendTvSubst, SimplEnv.extendCvSubst,
zapSubstEnv, setSubstEnv,
getInScope, setInScope, setInScopeSet, modifyInScope, addNewInScopeIds,
getSimplRules,
SimplSR(..), mkContEx, substId, lookupRecBndr, refineFromInScope,
simplNonRecBndr, simplRecBndrs,
simplBinder, simplBinders,
substTy, substTyVar, getTvSubst,
getCvSubst, substCo, substCoVar,
-- Floats
Floats, emptyFloats, isEmptyFloats, addNonRec, addFloats, extendFloats,
wrapFloats, setFloats, zapFloats, addRecFloats, mapFloats,
doFloatFromRhs, getFloatBinds
) where
#include "HsVersions.h"
import SimplMonad
import CoreMonad ( SimplifierMode(..) )
import CoreSyn
import CoreUtils
import Var
import VarEnv
import VarSet
import OrdList
import Id
import MkCore ( mkWildValBinder )
import TysWiredIn
import qualified Type
import Type hiding ( substTy, substTyVarBndr, substTyVar )
import qualified Coercion
import Coercion hiding ( substCo, substTy, substCoVar, substCoVarBndr, substTyVarBndr )
import BasicTypes
import MonadUtils
import Outputable
import FastString
import Util
import Data.List
{-
************************************************************************
* *
\subsection[Simplify-types]{Type declarations}
* *
************************************************************************
-}
type InBndr = CoreBndr
type InVar = Var -- Not yet cloned
type InId = Id -- Not yet cloned
type InType = Type -- Ditto
type InBind = CoreBind
type InExpr = CoreExpr
type InAlt = CoreAlt
type InArg = CoreArg
type InCoercion = Coercion
type OutBndr = CoreBndr
type OutVar = Var -- Cloned
type OutId = Id -- Cloned
type OutTyVar = TyVar -- Cloned
type OutType = Type -- Cloned
type OutCoercion = Coercion
type OutBind = CoreBind
type OutExpr = CoreExpr
type OutAlt = CoreAlt
type OutArg = CoreArg
{-
************************************************************************
* *
\subsubsection{The @SimplEnv@ type}
* *
************************************************************************
-}
data SimplEnv
= SimplEnv {
----------- Static part of the environment -----------
-- Static in the sense of lexically scoped,
-- wrt the original expression
seMode :: SimplifierMode,
-- The current substitution
seTvSubst :: TvSubstEnv, -- InTyVar |--> OutType
seCvSubst :: CvSubstEnv, -- InCoVar |--> OutCoercion
seIdSubst :: SimplIdSubst, -- InId |--> OutExpr
----------- Dynamic part of the environment -----------
-- Dynamic in the sense of describing the setup where
-- the expression finally ends up
-- The current set of in-scope variables
-- They are all OutVars, and all bound in this module
seInScope :: InScopeSet, -- OutVars only
-- Includes all variables bound by seFloats
seFloats :: Floats
-- See Note [Simplifier floats]
}
type StaticEnv = SimplEnv -- Just the static part is relevant
pprSimplEnv :: SimplEnv -> SDoc
-- Used for debugging; selective
pprSimplEnv env
= vcat [ptext (sLit "TvSubst:") <+> ppr (seTvSubst env),
ptext (sLit "IdSubst:") <+> ppr (seIdSubst env),
ptext (sLit "InScope:") <+> vcat (map ppr_one in_scope_vars)
]
where
in_scope_vars = varEnvElts (getInScopeVars (seInScope env))
ppr_one v | isId v = ppr v <+> ppr (idUnfolding v)
| otherwise = ppr v
type SimplIdSubst = IdEnv SimplSR -- IdId |--> OutExpr
-- See Note [Extending the Subst] in CoreSubst
data SimplSR
= DoneEx OutExpr -- Completed term
| DoneId OutId -- Completed term variable
| ContEx TvSubstEnv -- A suspended substitution
CvSubstEnv
SimplIdSubst
InExpr
instance Outputable SimplSR where
ppr (DoneEx e) = ptext (sLit "DoneEx") <+> ppr e
ppr (DoneId v) = ptext (sLit "DoneId") <+> ppr v
ppr (ContEx _tv _cv _id e) = vcat [ptext (sLit "ContEx") <+> ppr e {-,
ppr (filter_env tv), ppr (filter_env id) -}]
-- where
-- fvs = exprFreeVars e
-- filter_env env = filterVarEnv_Directly keep env
-- keep uniq _ = uniq `elemUFM_Directly` fvs
{-
Note [SimplEnv invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~
seInScope:
The in-scope part of Subst includes *all* in-scope TyVars and Ids
The elements of the set may have better IdInfo than the
occurrences of in-scope Ids, and (more important) they will
have a correctly-substituted type. So we use a lookup in this
set to replace occurrences
The Ids in the InScopeSet are replete with their Rules,
and as we gather info about the unfolding of an Id, we replace
it in the in-scope set.
The in-scope set is actually a mapping OutVar -> OutVar, and
in case expressions we sometimes bind
seIdSubst:
The substitution is *apply-once* only, because InIds and OutIds
can overlap.
For example, we generally omit mappings
a77 -> a77
from the substitution, when we decide not to clone a77, but it's quite
legitimate to put the mapping in the substitution anyway.
Furthermore, consider
let x = case k of I# x77 -> ... in
let y = case k of I# x77 -> ... in ...
and suppose the body is strict in both x and y. Then the simplifier
will pull the first (case k) to the top; so the second (case k) will
cancel out, mapping x77 to, well, x77! But one is an in-Id and the
other is an out-Id.
Of course, the substitution *must* applied! Things in its domain
simply aren't necessarily bound in the result.
* substId adds a binding (DoneId new_id) to the substitution if
the Id's unique has changed
Note, though that the substitution isn't necessarily extended
if the type of the Id changes. Why not? Because of the next point:
* We *always, always* finish by looking up in the in-scope set
any variable that doesn't get a DoneEx or DoneVar hit in the substitution.
Reason: so that we never finish up with a "old" Id in the result.
An old Id might point to an old unfolding and so on... which gives a space
leak.
[The DoneEx and DoneVar hits map to "new" stuff.]
* It follows that substExpr must not do a no-op if the substitution is empty.
substType is free to do so, however.
* When we come to a let-binding (say) we generate new IdInfo, including an
unfolding, attach it to the binder, and add this newly adorned binder to
the in-scope set. So all subsequent occurrences of the binder will get
mapped to the full-adorned binder, which is also the one put in the
binding site.
* The in-scope "set" usually maps x->x; we use it simply for its domain.
But sometimes we have two in-scope Ids that are synomyms, and should
map to the same target: x->x, y->x. Notably:
case y of x { ... }
That's why the "set" is actually a VarEnv Var
-}
mkSimplEnv :: SimplifierMode -> SimplEnv
mkSimplEnv mode
= SimplEnv { seMode = mode
, seInScope = init_in_scope
, seFloats = emptyFloats
, seTvSubst = emptyVarEnv
, seCvSubst = emptyVarEnv
, seIdSubst = emptyVarEnv }
-- The top level "enclosing CC" is "SUBSUMED".
init_in_scope :: InScopeSet
init_in_scope = mkInScopeSet (unitVarSet (mkWildValBinder unitTy))
-- See Note [WildCard binders]
{-
Note [WildCard binders]
~~~~~~~~~~~~~~~~~~~~~~~
The program to be simplified may have wild binders
case e of wild { p -> ... }
We want to *rename* them away, so that there are no
occurrences of 'wild-id' (with wildCardKey). The easy
way to do that is to start of with a representative
Id in the in-scope set
There can be be *occurrences* of wild-id. For example,
MkCore.mkCoreApp transforms
e (a /# b) --> case (a /# b) of wild { DEFAULT -> e wild }
This is ok provided 'wild' isn't free in 'e', and that's the delicate
thing. Generally, you want to run the simplifier to get rid of the
wild-ids before doing much else.
It's a very dark corner of GHC. Maybe it should be cleaned up.
-}
getMode :: SimplEnv -> SimplifierMode
getMode env = seMode env
setMode :: SimplifierMode -> SimplEnv -> SimplEnv
setMode mode env = env { seMode = mode }
updMode :: (SimplifierMode -> SimplifierMode) -> SimplEnv -> SimplEnv
updMode upd env = env { seMode = upd (seMode env) }
---------------------
extendIdSubst :: SimplEnv -> Id -> SimplSR -> SimplEnv
extendIdSubst env@(SimplEnv {seIdSubst = subst}) var res
= ASSERT2( isId var && not (isCoVar var), ppr var )
env {seIdSubst = extendVarEnv subst var res}
extendTvSubst :: SimplEnv -> TyVar -> Type -> SimplEnv
extendTvSubst env@(SimplEnv {seTvSubst = subst}) var res
= env {seTvSubst = extendVarEnv subst var res}
extendCvSubst :: SimplEnv -> CoVar -> Coercion -> SimplEnv
extendCvSubst env@(SimplEnv {seCvSubst = subst}) var res
= env {seCvSubst = extendVarEnv subst var res}
---------------------
getInScope :: SimplEnv -> InScopeSet
getInScope env = seInScope env
setInScopeSet :: SimplEnv -> InScopeSet -> SimplEnv
setInScopeSet env in_scope = env {seInScope = in_scope}
setInScope :: SimplEnv -> SimplEnv -> SimplEnv
-- Set the in-scope set, and *zap* the floats
setInScope env env_with_scope
= env { seInScope = seInScope env_with_scope,
seFloats = emptyFloats }
setFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Set the in-scope set *and* the floats
setFloats env env_with_floats
= env { seInScope = seInScope env_with_floats,
seFloats = seFloats env_with_floats }
addNewInScopeIds :: SimplEnv -> [CoreBndr] -> SimplEnv
-- The new Ids are guaranteed to be freshly allocated
addNewInScopeIds env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst }) vs
= env { seInScope = in_scope `extendInScopeSetList` vs,
seIdSubst = id_subst `delVarEnvList` vs }
-- Why delete? Consider
-- let x = a*b in (x, \x -> x+3)
-- We add [x |-> a*b] to the substitution, but we must
-- _delete_ it from the substitution when going inside
-- the (\x -> ...)!
modifyInScope :: SimplEnv -> CoreBndr -> SimplEnv
-- The variable should already be in scope, but
-- replace the existing version with this new one
-- which has more information
modifyInScope env@(SimplEnv {seInScope = in_scope}) v
= env {seInScope = extendInScopeSet in_scope v}
---------------------
zapSubstEnv :: SimplEnv -> SimplEnv
zapSubstEnv env = env {seTvSubst = emptyVarEnv, seCvSubst = emptyVarEnv, seIdSubst = emptyVarEnv}
setSubstEnv :: SimplEnv -> TvSubstEnv -> CvSubstEnv -> SimplIdSubst -> SimplEnv
setSubstEnv env tvs cvs ids = env { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }
mkContEx :: SimplEnv -> InExpr -> SimplSR
mkContEx (SimplEnv { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }) e = ContEx tvs cvs ids e
{-
************************************************************************
* *
\subsection{Floats}
* *
************************************************************************
Note [Simplifier floats]
~~~~~~~~~~~~~~~~~~~~~~~~~
The Floats is a bunch of bindings, classified by a FloatFlag.
* All of them satisfy the let/app invariant
Examples
NonRec x (y:ys) FltLifted
Rec [(x,rhs)] FltLifted
NonRec x* (p:q) FltOKSpec -- RHS is WHNF. Question: why not FltLifted?
NonRec x# (y +# 3) FltOkSpec -- Unboxed, but ok-for-spec'n
NonRec x* (f y) FltCareful -- Strict binding; might fail or diverge
Can't happen:
NonRec x# (a /# b) -- Might fail; does not satisfy let/app
NonRec x# (f y) -- Might diverge; does not satisfy let/app
-}
data Floats = Floats (OrdList OutBind) FloatFlag
-- See Note [Simplifier floats]
data FloatFlag
= FltLifted -- All bindings are lifted and lazy
-- Hence ok to float to top level, or recursive
| FltOkSpec -- All bindings are FltLifted *or*
-- strict (perhaps because unlifted,
-- perhaps because of a strict binder),
-- *and* ok-for-speculation
-- Hence ok to float out of the RHS
-- of a lazy non-recursive let binding
-- (but not to top level, or into a rec group)
| FltCareful -- At least one binding is strict (or unlifted)
-- and not guaranteed cheap
-- Do not float these bindings out of a lazy let
instance Outputable Floats where
ppr (Floats binds ff) = ppr ff $$ ppr (fromOL binds)
instance Outputable FloatFlag where
ppr FltLifted = ptext (sLit "FltLifted")
ppr FltOkSpec = ptext (sLit "FltOkSpec")
ppr FltCareful = ptext (sLit "FltCareful")
andFF :: FloatFlag -> FloatFlag -> FloatFlag
andFF FltCareful _ = FltCareful
andFF FltOkSpec FltCareful = FltCareful
andFF FltOkSpec _ = FltOkSpec
andFF FltLifted flt = flt
doFloatFromRhs :: TopLevelFlag -> RecFlag -> Bool -> OutExpr -> SimplEnv -> Bool
-- If you change this function look also at FloatIn.noFloatFromRhs
doFloatFromRhs lvl rec str rhs (SimplEnv {seFloats = Floats fs ff})
= not (isNilOL fs) && want_to_float && can_float
where
want_to_float = isTopLevel lvl || exprIsCheap rhs || exprIsExpandable rhs
-- See Note [Float when cheap or expandable]
can_float = case ff of
FltLifted -> True
FltOkSpec -> isNotTopLevel lvl && isNonRec rec
FltCareful -> isNotTopLevel lvl && isNonRec rec && str
{-
Note [Float when cheap or expandable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to float a let from a let if the residual RHS is
a) cheap, such as (\x. blah)
b) expandable, such as (f b) if f is CONLIKE
But there are
- cheap things that are not expandable (eg \x. expensive)
- expandable things that are not cheap (eg (f b) where b is CONLIKE)
so we must take the 'or' of the two.
-}
emptyFloats :: Floats
emptyFloats = Floats nilOL FltLifted
unitFloat :: OutBind -> Floats
-- This key function constructs a singleton float with the right form
unitFloat bind = Floats (unitOL bind) (flag bind)
where
flag (Rec {}) = FltLifted
flag (NonRec bndr rhs)
| not (isStrictId bndr) = FltLifted
| exprOkForSpeculation rhs = FltOkSpec -- Unlifted, and lifted but ok-for-spec (eg HNF)
| otherwise = ASSERT2( not (isUnLiftedType (idType bndr)), ppr bndr )
FltCareful
-- Unlifted binders can only be let-bound if exprOkForSpeculation holds
addNonRec :: SimplEnv -> OutId -> OutExpr -> SimplEnv
-- Add a non-recursive binding and extend the in-scope set
-- The latter is important; the binder may already be in the
-- in-scope set (although it might also have been created with newId)
-- but it may now have more IdInfo
addNonRec env id rhs
= id `seq` -- This seq forces the Id, and hence its IdInfo,
-- and hence any inner substitutions
env { seFloats = seFloats env `addFlts` unitFloat (NonRec id rhs),
seInScope = extendInScopeSet (seInScope env) id }
extendFloats :: SimplEnv -> OutBind -> SimplEnv
-- Add these bindings to the floats, and extend the in-scope env too
extendFloats env bind
= env { seFloats = seFloats env `addFlts` unitFloat bind,
seInScope = extendInScopeSetList (seInScope env) bndrs }
where
bndrs = bindersOf bind
addFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Add the floats for env2 to env1;
-- *plus* the in-scope set for env2, which is bigger
-- than that for env1
addFloats env1 env2
= env1 {seFloats = seFloats env1 `addFlts` seFloats env2,
seInScope = seInScope env2 }
addFlts :: Floats -> Floats -> Floats
addFlts (Floats bs1 l1) (Floats bs2 l2)
= Floats (bs1 `appOL` bs2) (l1 `andFF` l2)
zapFloats :: SimplEnv -> SimplEnv
zapFloats env = env { seFloats = emptyFloats }
addRecFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Flattens the floats from env2 into a single Rec group,
-- prepends the floats from env1, and puts the result back in env2
-- This is all very specific to the way recursive bindings are
-- handled; see Simplify.simplRecBind
addRecFloats env1 env2@(SimplEnv {seFloats = Floats bs ff})
= ASSERT2( case ff of { FltLifted -> True; _ -> False }, ppr (fromOL bs) )
env2 {seFloats = seFloats env1 `addFlts` unitFloat (Rec (flattenBinds (fromOL bs)))}
wrapFloats :: SimplEnv -> OutExpr -> OutExpr
-- Wrap the floats around the expression; they should all
-- satisfy the let/app invariant, so mkLets should do the job just fine
wrapFloats (SimplEnv {seFloats = Floats bs _}) body
= foldrOL Let body bs
getFloatBinds :: SimplEnv -> [CoreBind]
getFloatBinds (SimplEnv {seFloats = Floats bs _})
= fromOL bs
isEmptyFloats :: SimplEnv -> Bool
isEmptyFloats (SimplEnv {seFloats = Floats bs _})
= isNilOL bs
mapFloats :: SimplEnv -> ((Id,CoreExpr) -> (Id,CoreExpr)) -> SimplEnv
mapFloats env@SimplEnv { seFloats = Floats fs ff } fun
= env { seFloats = Floats (mapOL app fs) ff }
where
app (NonRec b e) = case fun (b,e) of (b',e') -> NonRec b' e'
app (Rec bs) = Rec (map fun bs)
{-
************************************************************************
* *
Substitution of Vars
* *
************************************************************************
Note [Global Ids in the substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We look up even a global (eg imported) Id in the substitution. Consider
case X.g_34 of b { (a,b) -> ... case X.g_34 of { (p,q) -> ...} ... }
The binder-swap in the occurrence analyser will add a binding
for a LocalId version of g (with the same unique though):
case X.g_34 of b { (a,b) -> let g_34 = b in
... case X.g_34 of { (p,q) -> ...} ... }
So we want to look up the inner X.g_34 in the substitution, where we'll
find that it has been substituted by b. (Or conceivably cloned.)
-}
substId :: SimplEnv -> InId -> SimplSR
-- Returns DoneEx only on a non-Var expression
substId (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
= case lookupVarEnv ids v of -- Note [Global Ids in the substitution]
Nothing -> DoneId (refineFromInScope in_scope v)
Just (DoneId v) -> DoneId (refineFromInScope in_scope v)
Just (DoneEx (Var v)) -> DoneId (refineFromInScope in_scope v)
Just res -> res -- DoneEx non-var, or ContEx
-- Get the most up-to-date thing from the in-scope set
-- Even though it isn't in the substitution, it may be in
-- the in-scope set with better IdInfo
refineFromInScope :: InScopeSet -> Var -> Var
refineFromInScope in_scope v
| isLocalId v = case lookupInScope in_scope v of
Just v' -> v'
Nothing -> WARN( True, ppr v ) v -- This is an error!
| otherwise = v
lookupRecBndr :: SimplEnv -> InId -> OutId
-- Look up an Id which has been put into the envt by simplRecBndrs,
-- but where we have not yet done its RHS
lookupRecBndr (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
= case lookupVarEnv ids v of
Just (DoneId v) -> v
Just _ -> pprPanic "lookupRecBndr" (ppr v)
Nothing -> refineFromInScope in_scope v
{-
************************************************************************
* *
\section{Substituting an Id binder}
* *
************************************************************************
These functions are in the monad only so that they can be made strict via seq.
-}
simplBinders :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplBinders env bndrs = mapAccumLM simplBinder env bndrs
-------------
simplBinder :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr)
-- Used for lambda and case-bound variables
-- Clone Id if necessary, substitute type
-- Return with IdInfo already substituted, but (fragile) occurrence info zapped
-- The substitution is extended only if the variable is cloned, because
-- we *don't* need to use it to track occurrence info.
simplBinder env bndr
| isTyVar bndr = do { let (env', tv) = substTyVarBndr env bndr
; seqTyVar tv `seq` return (env', tv) }
| otherwise = do { let (env', id) = substIdBndr env bndr
; seqId id `seq` return (env', id) }
---------------
simplNonRecBndr :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr)
-- A non-recursive let binder
simplNonRecBndr env id
= do { let (env1, id1) = substIdBndr env id
; seqId id1 `seq` return (env1, id1) }
---------------
simplRecBndrs :: SimplEnv -> [InBndr] -> SimplM SimplEnv
-- Recursive let binders
simplRecBndrs env@(SimplEnv {}) ids
= do { let (env1, ids1) = mapAccumL substIdBndr env ids
; seqIds ids1 `seq` return env1 }
---------------
substIdBndr :: SimplEnv -> InBndr -> (SimplEnv, OutBndr)
-- Might be a coercion variable
substIdBndr env bndr
| isCoVar bndr = substCoVarBndr env bndr
| otherwise = substNonCoVarIdBndr env bndr
---------------
substNonCoVarIdBndr
:: SimplEnv
-> InBndr -- Env and binder to transform
-> (SimplEnv, OutBndr)
-- Clone Id if necessary, substitute its type
-- Return an Id with its
-- * Type substituted
-- * UnfoldingInfo, Rules, WorkerInfo zapped
-- * Fragile OccInfo (only) zapped: Note [Robust OccInfo]
-- * Robust info, retained especially arity and demand info,
-- so that they are available to occurrences that occur in an
-- earlier binding of a letrec
--
-- For the robust info, see Note [Arity robustness]
--
-- Augment the substitution if the unique changed
-- Extend the in-scope set with the new Id
--
-- Similar to CoreSubst.substIdBndr, except that
-- the type of id_subst differs
-- all fragile info is zapped
substNonCoVarIdBndr env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst })
old_id
= ASSERT2( not (isCoVar old_id), ppr old_id )
(env { seInScope = in_scope `extendInScopeSet` new_id,
seIdSubst = new_subst }, new_id)
where
id1 = uniqAway in_scope old_id
id2 = substIdType env id1
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_subst | new_id /= old_id
= extendVarEnv id_subst old_id (DoneId new_id)
| otherwise
= delVarEnv id_subst old_id
------------------------------------
seqTyVar :: TyVar -> ()
seqTyVar b = b `seq` ()
seqId :: Id -> ()
seqId id = seqType (idType id) `seq`
idInfo id `seq`
()
seqIds :: [Id] -> ()
seqIds [] = ()
seqIds (id:ids) = seqId id `seq` seqIds ids
{-
Note [Arity robustness]
~~~~~~~~~~~~~~~~~~~~~~~
We *do* transfer the arity from from the in_id of a let binding to the
out_id. This is important, so that the arity of an Id is visible in
its own RHS. For example:
f = \x. ....g (\y. f y)....
We can eta-reduce the arg to g, because f is a value. But that
needs to be visible.
This interacts with the 'state hack' too:
f :: Bool -> IO Int
f = \x. case x of
True -> f y
False -> \s -> ...
Can we eta-expand f? Only if we see that f has arity 1, and then we
take advantage of the 'state hack' on the result of
(f y) :: State# -> (State#, Int) to expand the arity one more.
There is a disadvantage though. Making the arity visible in the RHS
allows us to eta-reduce
f = \x -> f x
to
f = f
which technically is not sound. This is very much a corner case, so
I'm not worried about it. Another idea is to ensure that f's arity
never decreases; its arity started as 1, and we should never eta-reduce
below that.
Note [Robust OccInfo]
~~~~~~~~~~~~~~~~~~~~~
It's important that we *do* retain the loop-breaker OccInfo, because
that's what stops the Id getting inlined infinitely, in the body of
the letrec.
-}
{-
************************************************************************
* *
Impedence matching to type substitution
* *
************************************************************************
-}
getTvSubst :: SimplEnv -> TvSubst
getTvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env })
= mkTvSubst in_scope tv_env
getCvSubst :: SimplEnv -> CvSubst
getCvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env, seCvSubst = cv_env })
= CvSubst in_scope tv_env cv_env
substTy :: SimplEnv -> Type -> Type
substTy env ty = Type.substTy (getTvSubst env) ty
substTyVar :: SimplEnv -> TyVar -> Type
substTyVar env tv = Type.substTyVar (getTvSubst env) tv
substTyVarBndr :: SimplEnv -> TyVar -> (SimplEnv, TyVar)
substTyVarBndr env tv
= case Type.substTyVarBndr (getTvSubst env) tv of
(TvSubst in_scope' tv_env', tv')
-> (env { seInScope = in_scope', seTvSubst = tv_env' }, tv')
substCoVar :: SimplEnv -> CoVar -> Coercion
substCoVar env tv = Coercion.substCoVar (getCvSubst env) tv
substCoVarBndr :: SimplEnv -> CoVar -> (SimplEnv, CoVar)
substCoVarBndr env cv
= case Coercion.substCoVarBndr (getCvSubst env) cv of
(CvSubst in_scope' tv_env' cv_env', cv')
-> (env { seInScope = in_scope', seTvSubst = tv_env', seCvSubst = cv_env' }, cv')
substCo :: SimplEnv -> Coercion -> Coercion
substCo env co = Coercion.substCo (getCvSubst env) co
------------------
substIdType :: SimplEnv -> Id -> Id
substIdType (SimplEnv { seInScope = in_scope, seTvSubst = tv_env }) id
| isEmptyVarEnv tv_env || isEmptyVarSet (tyVarsOfType old_ty) = id
| otherwise = Id.setIdType id (Type.substTy (TvSubst in_scope tv_env) old_ty)
-- The tyVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
| urbanslug/ghc | compiler/simplCore/SimplEnv.hs | bsd-3-clause | 27,858 | 0 | 15 | 7,517 | 4,421 | 2,447 | 1,974 | 303 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
module UnitTests.Options ( OptionShowSolverLog(..)
, OptionMtimeChangeDelay(..)
, extraOptions )
where
import Data.Proxy
import Data.Typeable
import Test.Tasty.Options
{-------------------------------------------------------------------------------
Test options
-------------------------------------------------------------------------------}
extraOptions :: [OptionDescription]
extraOptions =
[ Option (Proxy :: Proxy OptionShowSolverLog)
, Option (Proxy :: Proxy OptionMtimeChangeDelay)
]
newtype OptionShowSolverLog = OptionShowSolverLog Bool
deriving Typeable
instance IsOption OptionShowSolverLog where
defaultValue = OptionShowSolverLog False
parseValue = fmap OptionShowSolverLog . safeRead
optionName = return "show-solver-log"
optionHelp = return "Show full log from the solver"
optionCLParser = flagCLParser Nothing (OptionShowSolverLog True)
newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int
deriving Typeable
instance IsOption OptionMtimeChangeDelay where
defaultValue = OptionMtimeChangeDelay 0
parseValue = fmap OptionMtimeChangeDelay . safeRead
optionName = return "mtime-change-delay"
optionHelp = return $ "How long to wait before attempting to detect"
++ "file modification, in microseconds"
| mydaum/cabal | cabal-install/tests/UnitTests/Options.hs | bsd-3-clause | 1,399 | 0 | 8 | 267 | 217 | 122 | 95 | 27 | 1 |
{-# LANGUAGE TypeFamilies, LiberalTypeSynonyms #-}
-- ^ crucial for exercising the code paths to be
-- tested here
module ShouldCompile where
type family Element c :: *
f :: x -> Element x
f x = undefined
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/indexed-types/should_compile/Simple19.hs | bsd-3-clause | 264 | 0 | 6 | 96 | 36 | 22 | 14 | 5 | 1 |
module Annfail12 where
-- Testing errors hidden in annotations
{-# ANN f (error "You were meant to see this error!" :: Int) #-}
f x = x | wxwxwwxxx/ghc | testsuite/tests/annotations/should_fail/annfail12.hs | bsd-3-clause | 137 | 0 | 5 | 28 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Text.Pin ( Pin(..), tag, empty, simple, isSelf, fromName ) where
import Control.Applicative hiding ( many, (<|>), empty, optional )
import Control.Name
import Data.Either
import Data.List hiding ( find )
import Data.Set ( Set, fromList )
import Data.String.Utils ( strip )
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.TagWiki
import Text.Printf
import Text.Utils
import qualified Control.Modifier as Mods
import qualified Data.Set as Set
import qualified Text.Tag as Tag
-- A reference to another file and/or event
data Pin = Pin
{ categories :: Set String
, qualifiers :: Set Pin
, text :: String -- The original text, for display
}
tag :: Pin -> String -- The normalized text, for checking equality
tag = slugify . text
empty :: Pin
empty = simple ""
simple :: String -> Pin
simple = Pin Set.empty Set.empty
isSelf :: Pin -> Bool
isSelf = (== "") . text
fromName :: Name -> Pin
fromName = simple . namePart
instance Eq Pin where
x == y = tag x == tag y
&& categories x == categories y
&& qualifiers x == qualifiers y
instance Ord Pin where
x <= y = tag x <= tag y
&& categories x <= categories y
&& qualifiers x <= qualifiers y
instance Parseable Pin where
parser = do
(names, mods) <- partitionEithers <$> many pinPart
-- We need at least one name.
-- If we failed to pick up a name in pinPart, pick one up now.
-- This will probably cause an error, but at least it will be
-- the 'right' "no name" error.
name <- if null names then Tag.tag else pure $ unwords names
pure Pin{ text = name
, categories = fromList $ Mods.categories mods
, qualifiers = fromList $ Mods.qualifiers mods }
where pinPart = try (Left <$> Tag.tag)
<|> (Right <$> Mods.catOrQual)
<?> "text, category, or qualifier"
instance Show Pin where
show (Pin cs qs t) = printf "%s%s%s" (strip t) cstr qstr where
cstr = if null cs' then "" else " #" ++ intercalate " #" cs'
qstr = if null qs' then "" else " (" ++ intercalate ") (" qs' ++ ")"
cs' = Set.toList cs
qs' = map show $ Set.toList qs
| Soares/tagwiki | src/Text/Pin.hs | mit | 2,306 | 0 | 13 | 650 | 656 | 358 | 298 | 53 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
module Data.Frame.Types (
Val(..),
Type(..),
subsumes,
subsume,
like,
typeVal,
lub
) where
import Data.Data
-- import Data.DateTime
import Data.Text (Text, pack)
import Data.Frame.Internal (Default(..))
import Control.DeepSeq (NFData(..))
-------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
-- Columns types have a subsumption rule which dictates when we upcast the type of the values in column. If we
-- have a column of Int values with a single String element in the middle of the data then then we upcast to
-- String. If the user specifes (Maybe a) type for the column then the column treats mismatched values as
-- missing values.
--
-- a <: a
-- a <: b |- Maybe a <: Maybe b
-- Double <: String
-- Bool <: String
-- Datetime <: String
-- Int <: Double
subsumes :: Type -> Type -> Bool
subsumes (MT a) b = subsumes a b
subsumes ST DT = True
subsumes ST BT = True
subsumes ST IT = True
subsumes ST TT = True
subsumes DT IT = True
subsumes _ Any = True
subsumes a b = a == b
subsume :: Type -> Val -> Val
subsume ST v = case v of
D x -> S (pack $ show x)
I x -> S (pack $ show x)
S x -> S x
B True -> S (pack "true")
B False -> S (pack "false")
-- T x -> S (pack $ show x)
{-M x -> error "maybe case"-}
subsume DT v = case v of
D x -> D x
I x -> D (fromIntegral x)
{-M x -> error "maybe case"-}
subsume IT v = case v of
I x -> I x
M x -> error "maybe case"
subsume BT v = case v of
B x -> B x
subsume (MT IT) v = case v of
I x -> I x
_ -> NA
subsume (MT DT) v = case v of
D x -> D x
I x -> D (fromIntegral x)
_ -> NA
subsume (MT ST) v = case v of
S x -> S x
_ -> NA
subsume (MT BT) v = case v of
B x -> B x
_ -> NA
subsume (MT Any) v = NA
like :: Val -> Val -> Bool
like (D _) (D _) = True
like (I _) (I _) = True
like (S _) (S _) = True
like (B _) (B _) = True
-- like (T _) (T _) = True
like (M (Just a)) (M (Just b)) = like a b
like (M (Just _)) (M Nothing) = True
like (M (Nothing)) (M (Just _)) = True
like (M (Nothing)) (M Nothing) = True
like _ _ = False
data Type = DT | IT | ST | BT | MT Type | TT | Any
deriving (Eq, Show, Ord)
-- Heterogeneous value
data Val
= D {-# UNPACK #-} !Double
| I {-# UNPACK #-} !Int
| S {-# UNPACK #-} !Text
| B !Bool
| M !(Maybe Val)
-- | T !DateTime
| NA
deriving (Eq, Show, Ord, Data, Typeable)
instance NFData Val where
rnf (D _) = ()
rnf (I _) = ()
rnf (S _) = ()
rnf (B a) = rnf a
rnf (M a) = rnf a
-- rnf (T a) = rnf a
rnf NA = ()
typeVal :: Val -> Type
typeVal (D _) = DT
typeVal (I _) = IT
typeVal (S _) = ST
typeVal (B _) = BT
-- typeVal (T _) = TT
typeVal (M (Just t)) = MT (typeVal t)
typeVal (M Nothing) = Any
typeVal NA = Any
-- lub [I 3, D 2.3] -> DT
-- lub [I 3, D 2.3, S "a"] -> ST
lub :: [Val] -> Either String Type
lub vals = go Nothing vals
where
go (Just lub) [] = Right lub
go Nothing (NA:xs) = goNa Nothing xs -- first value is a NA
go Nothing (x:xs) = go (Just (typeVal x)) xs
go (Just lub) (x:xs)
| typeVal x == Any = goNa (Just (maybeT lub)) xs -- we hit a NA midstream
| lub == typeVal x = go (Just lub) xs
| lub `subsumes` typeVal x = go (Just lub) xs
| typeVal x `subsumes` lub = go (Just (typeVal x)) xs
| otherwise = Left $ "No subsumption: " ++ (show lub) ++ " ~ " ++ (show $ typeVal x)
goNa Nothing (x:xs) = goNa (Just (typeVal x)) xs
goNa (Just lub) [] = Right lub
goNa (Just lub) (x:xs)
| lub == typeVal x = goNa (Just lub) xs
| lub `subsumes` typeVal x = goNa (Just lub) xs
| maybeT (typeVal x) `subsumes` lub = goNa (Just (maybeT (typeVal x))) xs
| otherwise = goNa (Just lub) xs -- missing case
maybeT :: Type -> Type
maybeT (MT a) = MT a
maybeT a = (MT a)
| houshuang/frame | src/Data/Frame/Types.hs | mit | 3,994 | 0 | 14 | 1,102 | 1,729 | 876 | 853 | 112 | 12 |
module TypeClient where
-- Can't refer to modules named Type
-- Should rename the module
import Type
g :: Int
g = Type.id 3
| antalsz/hs-to-coq | examples/base-tests/TypeClient.hs | mit | 126 | 0 | 6 | 26 | 24 | 15 | 9 | 4 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module SimpleDecree (
IntegerOperation(..)
)
where
-- local imports
import Control.Consensus.Paxos
-- external imports
import qualified Data.Serialize as C
import GHC.Generics
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
data IntegerOperation =
SetValue Integer |
GetValue Integer |
AddDelta Integer |
SubtractDelta Integer |
MultiplyFactor Integer |
DivideByFactor Integer
deriving (Generic, Eq, Show)
instance C.Serialize IntegerOperation
instance Decreeable IntegerOperation
| hargettp/paxos | tests/SimpleDecree.hs | mit | 654 | 0 | 6 | 85 | 102 | 62 | 40 | 16 | 0 |
module Type(
Type(..)
)where
import Definition
data Type = Polymorphism | TypeAlias Identifier Identifier | TypeArray Type Int | Type Identifier Int deriving (Eq)
instance Show Type where
show Polymorphism = "?Type?"
show (Type n i) = n ++ replicate i '*'
| sqd/haskell-C89-interpreter | Type.hs | mit | 267 | 0 | 8 | 53 | 92 | 51 | 41 | 7 | 0 |
-----------------------------------------------------------
-- |
-- module: MXNet.Core.Base.Internal.TH.Symbol
-- copyright: (c) 2016 Tao He
-- license: MIT
-- maintainer: [email protected]
--
-- Functions about Symbol that generated by template haskell.
--
{-# OPTIONS_GHC -Wno-missing-signatures #-}
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
{-# OPTIONS_GHC -Wno-unused-local-binds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module MXNet.Core.Base.Internal.TH.Symbol where
import Data.Proxy
import MXNet.Core.Base.HMap
import MXNet.Core.Base.Internal
import MXNet.Core.Base.Internal.TH (registerSymbolOps)
import MXNet.Core.NNVM.Internal (nnGetOpHandle, nnSymbolCompose)
import Prelude hiding (sin, sinh, cos, cosh, tan, tanh, min, max, round, floor,
abs, sum, sqrt, log, exp, flip, concat, repeat, reverse)
-- | Register symbol operators.
$(registerSymbolOps)
| sighingnow/mxnet-haskell | mxnet/src/MXNet/Core/Base/Internal/TH/Symbol.hs | mit | 1,228 | 0 | 6 | 224 | 149 | 107 | 42 | 21 | 0 |
module Y2017.M02.D20.Solution where
import Control.Arrow ((&&&))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid
import Data.Ratio
-- below imports available via 1HaskellADay git repository
import Control.Logic.Frege (adjoin)
import qualified Data.Bag as Bag
import Data.Percentage
import Rosalind.Types
import Rosalind.GCContent
import Rosalind.Scan.FASTA
{--
Is there a better way?
In above Rosalind.GCContent import we have this function:
gcContent :: DNAStrand -> Percentage
gcContent strand =
let gcs = getSum (sumGC (Map.toList (Bag.fromList strand)))
in P (uncurry (%) $ adjoin fromIntegral (gcs, length strand))
The thing is, okay, it works, but how does it work?
1. It iterates through the DNA string to get the GC-content
2. It iterates through the DNA string, again, to get the length.
Whenever I see the length-function I have a little annoying voice saying:
You could do better.
Some cases I do have to call the length function, but in some cases, I do not.
This is one of those cases.
rewrite gcContent so that the DNA string is iterated only once. That is to say,
somewhere (else) the length of this list (String) is already recorded. Find that
record and use it to compute the GC-content signature of a string.
--}
gcContent' :: DNAStrand -> Percentage
gcContent' dna =
-- our nucleotide-totals is as follows:
let nukes = Map.toList (Bag.fromList dna)
-- well, the totals of all the nucleotides IS the length of the list:
len = sum (map snd nukes)
-- the length of nukes is 4 or less, dna can have thousands of nucleotides
-- And now, along with the old definition, we have everything we need
in P (uncurry (%) $ adjoin (fromIntegral . getSum) (sumGC nukes, len))
-- Now, using Rosalind/rosy_strands.txt verify that gcContent' == gcContent
{--
>>> fmap (map (ident &&& gcContent . strand)) $ readFASTA "Rosalind/rosy_strands.txt"
[("Rosalind_6404",53.75%),("Rosalind_5959",53.57%),("Rosalind_0808",60.91%)]
>>> fmap (map (ident &&& gcContent' . strand)) $ readFASTA "Rosalind/rosy_strands.txt"
[("Rosalind_6404",53.75%),("Rosalind_5959",53.57%),("Rosalind_0808",60.91%)]
--}
-- moving gcContent' definition to Rosalind.GCContent.gcContent
| geophf/1HaskellADay | exercises/HAD/Y2017/M02/D20/Solution.hs | mit | 2,231 | 0 | 12 | 367 | 190 | 114 | 76 | 17 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Rx.Notification where
import Rx.Observable.Types
getValue :: Notification v -> Maybe v
getValue (OnNext v) = Just v
getValue _ = Nothing
hasThrowable :: Notification v -> Bool
hasThrowable (OnError _) = True
hasThrowable _ = False
accept :: Notification v -> Observer v -> IO ()
accept notification (Observer observerFn) =
observerFn notification
| roman/Haskell-Reactive-Extensions | rx-core/src/Rx/Notification.hs | mit | 394 | 0 | 8 | 64 | 130 | 65 | 65 | 12 | 1 |
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Base (Error(..), rebase)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "rebase" $ for_ cases test
where
test Case{..} = it description assertion
where
assertion = expression `shouldBe` outputDigits
expression = rebase inputBase outputBase inputDigits
data Case = Case { description :: String
, inputBase :: Integer
, inputDigits :: [Integer]
, outputBase :: Integer
, outputDigits :: Either (Error Integer) [Integer]
}
cases :: [Case]
cases = [ Case { description = "single bit one to decimal"
, inputBase = 2
, inputDigits = [1]
, outputBase = 10
, outputDigits = Right [1]
}
, Case { description = "binary to single decimal"
, inputBase = 2
, inputDigits = [1, 0, 1]
, outputBase = 10
, outputDigits = Right [5]
}
, Case { description = "single decimal to binary"
, inputBase = 10
, inputDigits = [5]
, outputBase = 2
, outputDigits = Right [1, 0, 1]
}
, Case { description = "binary to multiple decimal"
, inputBase = 2
, inputDigits = [1, 0, 1, 0, 1, 0]
, outputBase = 10
, outputDigits = Right [4, 2]
}
, Case { description = "decimal to binary"
, inputBase = 10
, inputDigits = [4, 2]
, outputBase = 2
, outputDigits = Right [1, 0, 1, 0, 1, 0]
}
, Case { description = "trinary to hexadecimal"
, inputBase = 3
, inputDigits = [1, 1, 2, 0]
, outputBase = 16
, outputDigits = Right [2, 10]
}
, Case { description = "hexadecimal to trinary"
, inputBase = 16
, inputDigits = [2, 10]
, outputBase = 3
, outputDigits = Right [1, 1, 2, 0]
}
, Case { description = "15-bit integer"
, inputBase = 97
, inputDigits = [3, 46, 60]
, outputBase = 73
, outputDigits = Right [6, 10, 45]
}
-- The following three cases are [0] in all-your-base.json.
-- Here we use [] to represent the lack of digits, i.e., zero.
, Case { description = "empty list"
, inputBase = 2
, inputDigits = []
, outputBase = 10
, outputDigits = Right []
}
, Case { description = "single zero"
, inputBase = 10
, inputDigits = [0]
, outputBase = 2
, outputDigits = Right []
}
, Case { description = "multiple zeros"
, inputBase = 10
, inputDigits = [0, 0, 0]
, outputBase = 2
, outputDigits = Right []
}
, Case { description = "leading zeros"
, inputBase = 7
, inputDigits = [0, 6, 0]
, outputBase = 10
, outputDigits = Right [4, 2]
}
, Case { description = "input base is one"
, inputBase = 1
, inputDigits = [0]
, outputBase = 10
, outputDigits = Left InvalidInputBase
}
, Case { description = "input base is zero"
, inputBase = 0
, inputDigits = []
, outputBase = 10
, outputDigits = Left InvalidInputBase
}
, Case { description = "input base is negative"
, inputBase = -2
, inputDigits = [1]
, outputBase = 10
, outputDigits = Left InvalidInputBase
}
, Case { description = "negative digit"
, inputBase = 2
, inputDigits = [1, -1, 1, 0, 1, 0]
, outputBase = 10
, outputDigits = Left (InvalidDigit (-1))
}
, Case { description = "invalid positive digit"
, inputBase = 2
, inputDigits = [1, 2, 1, 0, 1, 0]
, outputBase = 10
, outputDigits = Left (InvalidDigit 2)
}
, Case { description = "output base is one"
, inputBase = 2
, inputDigits = [1, 0, 1, 0, 1, 0]
, outputBase = 1
, outputDigits = Left InvalidOutputBase
}
, Case { description = "output base is zero"
, inputBase = 10
, inputDigits = [7]
, outputBase = 0
, outputDigits = Left InvalidOutputBase
}
, Case { description = "output base is negative"
, inputBase = 2
, inputDigits = [1]
, outputBase = -7
, outputDigits = Left InvalidOutputBase
}
, Case { description = "both bases are negative"
, inputBase = -2
, inputDigits = [1]
, outputBase = -7
-- debatable: This could be Left InvalidOutputBase as well.
, outputDigits = Left InvalidInputBase
}
]
-- a13cfc6a039b8e5effac4ecc3ceea56d3f8fa807
| exercism/xhaskell | exercises/practice/all-your-base/test/Tests.hs | mit | 5,933 | 0 | 12 | 2,800 | 1,295 | 811 | 484 | 123 | 1 |
import Test.HUnit
import Q33
test1 = TestCase (assertEqual "tryCoPrime 0 15 should be Right Zero is not supported." (Right "Zero is not supported") (tryCoPrime 0 15))
test2 = TestCase (assertEqual "tryCoPrime 15 0 should be Right Zero is not supported." (Right "Zero is not supported") (tryCoPrime 15 0 ))
test3 = TestCase (assertEqual "tryCoPrime 12 32 should be Left False ." (Left False ) (tryCoPrime 12 32))
test4 = TestCase (assertEqual "tryCoPrime 31 12 should be Left True ." (Left True ) (tryCoPrime 31 12))
main = runTestTT $ TestList [test1,test2,test3,test4] | cshung/MiscLab | Haskell99/q33.test.hs | mit | 644 | 0 | 9 | 165 | 161 | 83 | 78 | 7 | 1 |
module Ch15.MadLibs where
import Data.Monoid
type Verb = String
type Adjective = String
type Adverb = String
type Noun = String
type Exclamation = String
madlib :: Exclamation -> Adverb -> Noun -> Adjective -> String
madlib e adv noun adj =
e <> "! he said " <>
adv <> " as he jumped into his car " <>
noun <> " and drove off with his " <>
adj <> " wife."
madlibBetter :: Exclamation -> Adverb -> Noun -> Adjective -> String
madlibBetter e adv noun adj =
mconcat
[ e, "! he said "
, adv, " as he jumped into his car "
, noun, " and drove off with his "
, adj, " wife."
]
| andrewMacmurray/haskell-book-solutions | src/ch15/MadLibs.hs | mit | 606 | 0 | 11 | 157 | 166 | 94 | 72 | 20 | 1 |
module Slack where
import ClassyPrelude
import Control.Monad (mfilter)
import Control.Monad.Except (ExceptT(ExceptT))
import Control.Lens (Getter, Prism', prism', view, to)
import Control.Lens.TH (makeLenses, makePrisms)
import Data.Aeson ((.=), Value(Number, String), Object, FromJSON(parseJSON), ToJSON(toJSON), object)
import qualified Data.Aeson.BetterErrors as ABE
import qualified Data.Aeson.BetterErrors.Internal as ABEI
import qualified Data.HashMap.Strict as HM
import Data.Proxy (Proxy(Proxy))
import Data.Scientific (toBoundedInteger)
import Data.Text (splitOn)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import TextShow (TextShow(showb))
import TextShow.TH (deriveTextShow)
import TextShowOrphans ()
data TS = TS { _tsTime :: Word32, _tsUnique :: Word32 } deriving (Eq, Ord, Read, Show)
asTS :: ABE.Parse Text TS
asTS = ABE.asText >>= either ABE.throwCustomError pure . ts
ts :: Text -> Either Text TS
ts t = case splitOn "." t of
[readMay -> Just time, readMay -> Just unique] -> Right $ TS time unique
other -> Left $ "couldn't parse as a time.unique pair, got parts: " <> tshow other
unTS :: TS -> Text
unTS (TS t u) = tshow t <> "." <> tshow u
tsToUTCTime :: TS -> UTCTime
tsToUTCTime (TS t _) = posixSecondsToUTCTime (fromIntegral t)
instance FromJSON TS where parseJSON = ABE.toAesonParser id asTS
instance ToJSON TS where toJSON = toJSON . unTS
makeLenses ''TS
deriveTextShow ''TS
newtype Time = Time { unTime :: Word32 } deriving (Eq, Ord, Read, Show)
asTime :: ABE.Parse Text Time
asTime =
ABEI.ParseT . ReaderT $ \ (ABEI.ParseReader path value) ->
ExceptT . Identity $ case value of
String s | Just t <- readMay s ->
Right (Time t)
String s ->
Left (ABEI.BadSchema (toList path) . ABEI.CustomError $ "tried to parse " <> tshow s <> " as time but couldn't")
Number s | Just w32 <- toBoundedInteger s ->
Right (Time w32)
Number s ->
Left . ABEI.BadSchema (toList path) . ABEI.CustomError $ "out of bound unix time " <> tshow s
other ->
Left . ABEI.BadSchema (toList path) . ABEI.CustomError $ "expected a time as string or number not " <> tshow other
instance ToJSON Time where toJSON = toJSON . unTime
makePrisms ''Time
deriveTextShow ''Time
newtype ID a = ID { unID :: Text } deriving (Eq, Ord, Read, Show)
asID :: ABE.Parse Text (ID a)
asID = ID <$> ABE.asText
instance FromJSON (ID a) where parseJSON = ABE.toAesonParser id asID
instance ToJSON (ID a) where toJSON = String . unID
makePrisms ''ID
deriveTextShow ''ID
idedName :: Getter s Text -> Getter s (ID k) -> (s -> Text)
idedName name ident s = view name s ++ " <" ++ view (ident . to unID) s ++ ">"
data Response a = ResponseNotOk !Text | ResponseOk a
data RtmStartRequest = RtmStartRequest { rtmStartToken :: Text }
data RtmStartRp = RtmStartRp
{ _rtmStartUrl :: Text
, _rtmStartSelf :: Self
, _rtmStartTeam :: Team
, _rtmStartUsers :: [User]
, _rtmStartChannels :: [Channel]
, _rtmStartGroups :: [Group]
, _rtmStartIMs :: [IM]
, _rtmStartBots :: [Bot] }
testRtmStartRp :: RtmStartRp
testRtmStartRp = RtmStartRp
{ _rtmStartUrl = "url"
, _rtmStartSelf = Self (ID "UEMBOT") "Embot" mempty (Time 0) PresenceActive
, _rtmStartTeam = Team (ID "TTEAM") "Team" Nothing "domain" Nothing False mempty
, _rtmStartUsers = []
, _rtmStartChannels = []
, _rtmStartGroups = []
, _rtmStartIMs = []
, _rtmStartBots = [] }
data Self = Self
{ _selfID :: ID User
, _selfName :: Text
, _selfPrefs :: Object
, _selfCreated :: Time
, _selfManualPresence :: Presence }
data Presence = PresenceActive | PresenceAway
data Team = Team
{ _teamID :: ID Team
, _teamName :: Text
, _teamEmailDomain :: Maybe Text
, _teamDomain :: Text
, _teamMsgEditWindowMins :: Maybe Int
, _teamOverStorageLimit :: Bool
, _teamPrefs :: Object }
data User = User
{ _userID :: ID User
, _userName :: Text
, _userRealName :: Maybe Text
, _userDeleted :: Bool
, _userColor :: Maybe Text
, _userTz :: Maybe Tz
, _userProfile :: Profile
, _userIsAdmin :: Bool
, _userIsOwner :: Bool
, _userIsPrimaryOwner :: Bool
, _userIsRestricted :: Bool
, _userIsUltraRestricted :: Bool
, _userHas2fa :: Bool
, _userTwoFactorType :: Maybe Text
, _userHasFiles :: Bool
, _userPresence :: Maybe Presence }
data Tz = Tz
{ _tz :: Text
, _tzLabel :: Text
, _tzOffset :: Int }
data Profile = Profile
{ _profileFirstName :: Maybe Text
, _profileLastName :: Maybe Text
, _profileRealName :: Maybe Text
, _profileRealNameNormalized :: Maybe Text
, _profileEmail :: Maybe Text
, _profileSkype :: Maybe Text
, _profilePhone :: Maybe Text
, _profileImages :: IntMap Text }
data Channel = Channel
{ _channelID :: ID Channel
, _channelName :: Text
, _channelCreated :: Time
, _channelCreator :: ID User
, _channelIsArchived :: Bool
, _channelIsGeneral :: Bool
, _channelMembers :: [ID User]
, _channelTopic :: Maybe (SlackTracked Text)
, _channelPurpose :: Maybe (SlackTracked Text)
, _channelIsMember :: Bool
, _channelLastRead :: Maybe TS
, _channelLatest :: Maybe Message
, _channelUnreadCount :: Maybe Int }
data Group = Group
{ _groupID :: ID Group
, _groupName :: Text
, _groupCreated :: Time
, _groupCreator :: ID User
, _groupIsArchived :: Bool
, _groupMembers :: [ID User]
, _groupTopic :: Maybe (SlackTracked Text)
, _groupPurpose :: Maybe (SlackTracked Text)
, _groupIsOpen :: Bool
, _groupLastRead :: Maybe TS
, _groupLatest :: Maybe Message
, _groupUnreadCount :: Maybe Int }
data IM = IM
{ _imID :: ID IM
, _imUser :: ID User
, _imCreated :: Time
, _imIsUserDeleted :: Bool
, _imIsOpen :: Bool
, _imLastRead :: Maybe TS
, _imLatest :: Maybe Message
, _imUnreadCount :: Maybe Int }
data Bot = Bot
{ _botID :: ID Bot
, _botName :: Text
, _botIcons :: HM.HashMap Text Text }
data Chat
data Message = Message
{ _messageChat :: Maybe (ID Chat)
, _messageUser :: Maybe (ID User)
, _messageSubtype :: Maybe MessageSubtype
, _messageText :: Maybe Text
, _messageTS :: TS
, _messageEdited :: Maybe MessageEdited
, _messageDeletedTS :: Maybe TS
, _messageEventTS :: Maybe TS
, _messageHidden :: Bool
, _messageAttachments :: [Attachment]
, _messageInviter :: Maybe (ID User)
, _messageIsStarred :: Bool
, _messagePinnedTo :: [ID Channel]
, _messageReactions :: [MessageReaction] }
testMessage :: ID Chat -> ID User -> Text -> Message
testMessage chat from text = Message
{ _messageChat = Just chat
, _messageUser = Just from
, _messageSubtype = Nothing
, _messageText = Just text
, _messageTS = TS 0 0
, _messageEdited = Nothing
, _messageDeletedTS = Nothing
, _messageEventTS = Nothing
, _messageHidden = False
, _messageAttachments = []
, _messageInviter = Nothing
, _messageIsStarred = False
, _messagePinnedTo = []
, _messageReactions = [] }
data MessageSubtype
= BotMS | MeMS | ChangedMS | DeletedMS
| ChannelJoinMS | ChannelLeaveMS | ChannelTopicMS | ChannelPurposeMS | ChannelNameMS | ChannelArchiveMS | ChannelUnarchiveMS
| GroupJoinMS | GroupLeaveMS | GroupTopicMS | GroupPurposeMS | GroupNameMS | GroupArchiveMS | GroupUnarchiveMS
| FileShareMS | FileCommentMS | FileMentionMS
| PinnedItemMS | ReminderAddMS | ReminderDeleteMS | BotAddMS
data MessageEdited = MessageEdited
{ _messageEditedUser :: ID User
, _messageEditedTS :: TS }
data MessageReaction = MessageReaction
{ _messageReactionName :: Text
, _messageReactionCount :: Int
, _messageReactionUsers :: [ID User] }
data Attachment = Attachment
{ _attachmentFallback :: Maybe Text
, _attachmentColor :: Maybe Text
, _attachmentPretext :: Maybe Text
, _attachmentAuthorName :: Maybe Text
, _attachmentAuthorLink :: Maybe Text
, _attachmentAuthorIcon :: Maybe Text
, _attachmentTitle :: Maybe Text
, _attachmentTitleLink :: Maybe Text
, _attachmentText :: Maybe Text
, _attachmentFields :: [AttachmentField]
, _attachmentFromUrl :: Maybe Text
, _attachmentThumbUrl :: Maybe Text
, _attachmentThumbWidth :: Maybe Int
, _attachmentThumbHeight :: Maybe Int
, _attachmentId :: Int }
data AttachmentField = AttachmentField
{ _fieldTitle :: Text
, _fieldValue :: Text
, _fieldShort :: Bool }
data SlackTracked a = SlackTracked
{ _trackedValue :: a
, _trackedCreator :: ID User
, _trackedLastSet :: Time }
data File = File
{ _fileID :: ID File
, _fileCreated :: Time
, _fileTimestamp :: Time
, _fileName :: Text
, _fileTitle :: Text
, _fileMimeType :: Text
, _fileFileType :: Text
, _filePrettyType :: Text
, _fileUser :: ID User
, _fileMode :: FileMode
, _fileEditable :: Bool
, _fileIsExternal :: Bool
, _fileExternalType :: Text
, _fileSize :: Word64
, _fileURL :: Text
, _fileURLDownload :: Text
, _fileURLPrivate :: Text
, _fileURLPrivateDownload :: Text
, _fileThumb :: IntMap Text
, _filePermalink :: Text
, _fileEditLink :: Text
, _filePreview :: Text
, _filePreviewHighlight :: Text
, _fileLines :: Int
, _fileLinesMore :: Int
, _fileIsPublic :: Bool
, _filePublicURLShared :: Bool
, _fileChannels :: [ID Channel]
, _fileGroups :: [ID Group]
, _fileIMs :: [ID IM]
, _fileInitialComment :: Maybe Message
, _fileNumStars :: Int
, _fileIsStarred :: Bool }
data FileMode
= FileHosted
| FileExternal
| FileSnippet
| FilePost
data FileComment = FileComment
{ _fileCommentID :: ID FileComment
, _fileCommentTimestamp :: Time
, _fileCommentUser :: ID User
, _fileCommentComment :: Text }
data RtmEvent
= RtmHello
| RtmReplyOk Word64 (Maybe TS) (Maybe Text)
| RtmReplyNotOk Word64 Int32 Text
| RtmMessage Message
| RtmChannelMarked (ChatMarked Channel)
| RtmChannelCreated Channel
| RtmChannelJoined Channel
| RtmChannelLeft (ID Channel)
| RtmChannelDeleted (ID Channel)
| RtmChannelRenamed (ChatRenamed Channel)
| RtmChannelArchive (ChatUser Channel)
| RtmChannelUnarchive (ChatUser Channel)
| RtmChannelHistoryChanged (ChatHistoryChanged Channel)
| RtmIMCreated IMCreated
| RtmIMOpen (ChatUser IM)
| RtmIMClose (ChatUser IM)
| RtmIMMarked (ChatMarked IM)
| RtmIMHistoryChanged (ChatHistoryChanged IM)
| RtmGroupJoined Group
| RtmGroupLeft (ID Group)
| RtmGroupOpen (ChatUser Group)
| RtmGroupClose (ChatUser Group)
| RtmGroupArchive (ID Group)
| RtmGroupUnarchive (ID Group)
| RtmGroupRename (ChatRenamed Group)
| RtmGroupMarked (ChatMarked Group)
| RtmGroupHistoryChanged (ChatHistoryChanged Group)
| RtmFileCreated File
| RtmFileShared File
| RtmFileUnshared File
| RtmFilePublic File
| RtmFilePrivate (ID File)
| RtmFileChange File
| RtmFileDeleted FileDeleted
| RtmFileCommentAdded FileCommentUpdated
| RtmFileCommentEdited FileCommentUpdated
| RtmFileCommentDeleted FileCommentDeleted
| RtmPresenceChange PresenceChange
| RtmManualPresenceChange Presence
| RtmPrefChange PrefChange
| RtmUserChange User
| RtmUserTyping (ChatUser Chat)
| RtmTeamJoin User
| RtmStarAdded Star
| RtmStarRemoved Star
| RtmEmojiChanged TS
| RtmCommandsChanged TS
| RtmTeamPrefChange PrefChange
| RtmTeamRename Text
| RtmTeamDomainChange TeamDomainChange
| RtmEmailDomainChanged EmailDomainChanged
| RtmBotAdded Bot
| RtmBotChanged Bot
| RtmAccountsChanged
data ChatMarked a = ChatMarked
{ _chatMarkedChannel :: ID a
, _chatMarkedTS :: TS }
data ChatUser a = ChatUser
{ _chatUserUser :: ID User
, _chatUserChannelID :: ID a }
data ChatRenamed a = ChatRenamed
{ _chatRenamedChannelID :: ID a
, _chatRenamedName :: Text }
data ChatHistoryChanged a = ChatHistoryChanged
{ _chatHistoryChangedLatest :: Text
, _chatHistoryChangedTS :: TS
, _chatHistoryChangedEventTS :: TS }
data IMCreated = IMCreated
{ _imCreatedUser :: ID User
, _imCreatedChannel :: IM }
data FileDeleted = FileDeleted
{ _fileDeletedFileID :: ID File
, _fileDeletedEventTS :: TS }
data FileCommentUpdated = FileCommentUpdated
{ _fileCommentUpdatedFile :: File
, _fileCommentUpdatedComment :: FileComment }
data FileCommentDeleted = FileCommentDeleted
{ _fileCommentDeletedFile :: File
, _fileCommentDeletedComment :: ID FileComment }
data PresenceChange = PresenceChange
{ _presenceChangeUser :: ID User
, _presenceChangePresence :: Presence }
data PrefChange = PrefChange
{ _prefChangeName :: Text
, _prefChangeValue :: Value }
data Star = Star
{ _starUser :: Text
, _starItem :: StarItem
, _starEventTS :: TS }
data StarItem
= StarItemMessage Message
| StarItemFile File
| StarItemFileComment File FileComment
| StarItemChannel (ID Channel)
| StarItemIM (ID IM)
| StarItemGroup (ID Group)
data TeamDomainChange = TeamDomainChange
{ _teamDomainChangeUrl :: Text
, _teamDomainChangeDomain :: Text }
data EmailDomainChanged = EmailDomainChanged
{ _emailDomainChangedEmailDomain :: Text
, _emailDomainChangedEventTS :: TS }
data RtmSendMessage = RtmSendMessage
{ _sendMessageSeqnum :: Word64
, _sendMessageChat :: ID Chat
, _sendMessageText :: Text }
class SlackTyped a where
isTypedID :: Proxy a -> ID b -> Bool
instance SlackTyped Channel where
isTypedID _ = isPrefixOf "C" . unID
instance SlackTyped File where
isTypedID _ (ID t) = "F" `isPrefixOf` t && not ("Fc" `isPrefixOf` t)
instance SlackTyped FileComment where
isTypedID _ (ID t) = "Fc" `isPrefixOf` t
instance SlackTyped Group where
isTypedID _ = isPrefixOf "G" . unID
instance SlackTyped Chat where
isTypedID _ i
= isTypedID (Proxy :: Proxy Channel) i
|| isTypedID (Proxy :: Proxy IM) i
|| isTypedID (Proxy :: Proxy Group) i
instance SlackTyped IM where
isTypedID _ = isPrefixOf "D" . unID
instance SlackTyped User where
isTypedID _ = isPrefixOf "U" . unID
typedID :: forall a. SlackTyped a => Prism' (ID Chat) (ID a)
typedID = prism' (\ (ID t) -> ID t) asTypedID
asTypedID :: forall a b. SlackTyped b => ID a -> Maybe (ID b)
asTypedID i =
if isTypedID (Proxy :: Proxy b) i
then Just (ID . unID $ i)
else Nothing
asChannelID :: ID Chat -> Maybe (ID Channel)
asChannelID = asTypedID
asGroupID :: ID Chat -> Maybe (ID Group)
asGroupID = asTypedID
asIMID :: ID Chat -> Maybe (ID IM)
asIMID = asTypedID
deriving instance Eq RtmStartRequest
deriving instance Eq RtmStartRp
deriving instance Eq Self
deriving instance Eq Team
deriving instance Eq User
deriving instance Eq Tz
deriving instance Eq Profile
deriving instance Eq Chat
deriving instance Eq Channel
deriving instance Eq Group
deriving instance Eq IM
deriving instance Eq Bot
deriving instance Eq MessageSubtype
deriving instance Enum MessageSubtype
deriving instance Ord MessageSubtype
deriving instance Bounded MessageSubtype
deriving instance Eq MessageReaction
deriving instance Eq Message
deriving instance Eq MessageEdited
deriving instance Eq Attachment
deriving instance Eq AttachmentField
deriving instance Eq a => Eq (SlackTracked a)
deriving instance Eq FileMode
deriving instance Eq File
deriving instance Eq FileComment
deriving instance Eq RtmEvent
deriving instance Eq a => Eq (ChatMarked a)
deriving instance Eq a => Eq (ChatUser a)
deriving instance Eq a => Eq (ChatRenamed a)
deriving instance Eq a => Eq (ChatHistoryChanged a)
deriving instance Eq IMCreated
deriving instance Eq FileDeleted
deriving instance Eq FileCommentUpdated
deriving instance Eq FileCommentDeleted
deriving instance Eq Presence
deriving instance Eq PresenceChange
deriving instance Eq PrefChange
deriving instance Eq Star
deriving instance Eq StarItem
deriving instance Eq TeamDomainChange
deriving instance Eq EmailDomainChanged
deriving instance Eq RtmSendMessage
makeLenses ''RtmStartRequest
makeLenses ''RtmStartRp
makeLenses ''Self
makeLenses ''Team
makeLenses ''User
makeLenses ''Tz
makeLenses ''Profile
makeLenses ''Channel
makeLenses ''Group
makeLenses ''IM
makeLenses ''Bot
makeLenses ''MessageReaction
makeLenses ''Message
makeLenses ''MessageEdited
makeLenses ''Attachment
makeLenses ''AttachmentField
makeLenses ''SlackTracked
makeLenses ''File
makeLenses ''FileComment
makePrisms ''RtmEvent
makeLenses ''ChatMarked
makeLenses ''ChatUser
makeLenses ''ChatRenamed
makeLenses ''ChatHistoryChanged
makeLenses ''IMCreated
makeLenses ''FileDeleted
makeLenses ''FileCommentUpdated
makeLenses ''FileCommentDeleted
makeLenses ''PresenceChange
makeLenses ''PrefChange
makeLenses ''Star
makePrisms ''StarItem
makeLenses ''TeamDomainChange
makeLenses ''EmailDomainChanged
makeLenses ''RtmSendMessage
instance TextShow Chat where
showb _ = "Chat"
deriveTextShow ''RtmStartRequest
deriveTextShow ''RtmStartRp
deriveTextShow ''Self
deriveTextShow ''Presence
deriveTextShow ''Team
deriveTextShow ''User
deriveTextShow ''Tz
deriveTextShow ''Profile
deriveTextShow ''Channel
deriveTextShow ''Group
deriveTextShow ''IM
deriveTextShow ''Bot
deriveTextShow ''Message
deriveTextShow ''MessageSubtype
deriveTextShow ''MessageEdited
deriveTextShow ''MessageReaction
deriveTextShow ''Attachment
deriveTextShow ''AttachmentField
deriveTextShow ''SlackTracked
deriveTextShow ''File
deriveTextShow ''FileMode
deriveTextShow ''FileComment
deriveTextShow ''RtmEvent
deriveTextShow ''ChatMarked
deriveTextShow ''ChatUser
deriveTextShow ''ChatRenamed
deriveTextShow ''ChatHistoryChanged
deriveTextShow ''IMCreated
deriveTextShow ''FileDeleted
deriveTextShow ''FileCommentUpdated
deriveTextShow ''FileCommentDeleted
deriveTextShow ''PresenceChange
deriveTextShow ''PrefChange
deriveTextShow ''Star
deriveTextShow ''StarItem
deriveTextShow ''TeamDomainChange
deriveTextShow ''EmailDomainChanged
deriveTextShow ''RtmSendMessage
instance ToJSON RtmStartRequest where
toJSON (RtmStartRequest { .. }) = object
[ ("token", toJSON rtmStartToken) ]
asResponse :: ABE.Parse Text a -> ABE.Parse Text (Response a)
asResponse parseInner =
ABE.key "ok" ABE.asBool >>= \ case
True -> ResponseOk <$> parseInner
False -> ResponseNotOk <$> ABE.keyOrDefault "error" "unknown error" ABE.asText
asRtmStartRp :: ABE.Parse Text RtmStartRp
asRtmStartRp =
RtmStartRp
<$> ABE.key "url" ABE.asText
<*> ABE.key "self" asSelf
<*> ABE.key "team" asTeam
<*> ABE.key "users" (ABE.eachInArray asUser)
<*> ABE.key "channels" (ABE.eachInArray asChannel)
<*> ABE.key "groups" (ABE.eachInArray asGroup)
<*> ABE.key "ims" (ABE.eachInArray asIM)
<*> ABE.key "bots" (ABE.eachInArray asBot)
asSelf :: ABE.Parse Text Self
asSelf =
Self
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> ABE.key "prefs" ABE.asObject
<*> ABE.key "created" asTime
<*> ABE.key "manual_presence" asPresence
asPresence :: ABE.Parse Text Presence
asPresence =
ABE.asText >>= \ case
"active" -> pure PresenceActive
"away" -> pure PresenceAway
other -> ABE.throwCustomError $ "unknown presence value " <> other
asTeam :: ABE.Parse Text Team
asTeam =
Team
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> (mfilter (not . null) <$> ABE.keyMay "email_domain" ABE.asText)
<*> ABE.key "domain" ABE.asText
<*> (mfilter (not . (==) (-1)) <$> ABE.keyMay "msg_edit_window_mins" ABE.asIntegral)
<*> ABE.key "over_storage_limit" ABE.asBool
<*> ABE.key "prefs" ABE.asObject
asUser :: ABE.Parse Text User
asUser =
User
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> ABE.keyMay "real_name" ABE.asText
<*> ABE.key "deleted" ABE.asBool
<*> ABE.keyMay "color" ABE.asText
<*> ( ( (,,) <$> (join <$> ABE.keyMay "tz" (ABE.perhaps ABE.asText))
<*> ABE.keyMay "tz_label" ABE.asText
<*> ABE.keyMay "tz_offset" ABE.asIntegral )
>>= \ (tzMay, labelMay, offsMay) -> pure $ Tz <$> tzMay <*> labelMay <*> offsMay )
<*> ABE.key "profile" asProfile
<*> ABE.keyOrDefault "is_admin" False ABE.asBool
<*> ABE.keyOrDefault "is_owner" False ABE.asBool
<*> ABE.keyOrDefault "is_primary_owner" False ABE.asBool
<*> ABE.keyOrDefault "is_restricted" False ABE.asBool
<*> ABE.keyOrDefault "is_ultra_restricted" False ABE.asBool
<*> ABE.keyOrDefault "has_2fa" False ABE.asBool
<*> ABE.keyMay "two_factor_type" ABE.asText
<*> ABE.keyOrDefault "has_files" False ABE.asBool
<*> ABE.keyMay "presence" asPresence
asProfile :: ABE.Parse Text Profile
asProfile =
Profile
<$> ABE.keyMay "first_name" ABE.asText
<*> ABE.keyMay "last_name" ABE.asText
<*> ABE.keyMay "real_name" ABE.asText
<*> ABE.keyMay "real_name_normalized" ABE.asText
<*> ABE.keyMay "email" ABE.asText
<*> ABE.keyMay "skype" ABE.asText
<*> ABE.keyMay "phone" ABE.asText
<*> asThumbs
asThumbs :: ABE.Parse Text (IntMap Text)
asThumbs =
mapFromList . catMaybes
<$> mapM (\ n -> map (n, ) <$> ABE.keyMay ("image_" <> tshow n) ABE.asText)
[24 :: Int, 32, 48, 72, 192, 512]
asChannel :: ABE.Parse Text Channel
asChannel =
Channel
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> ABE.key "created" asTime
<*> ABE.key "creator" asID
<*> ABE.key "is_archived" ABE.asBool
<*> ABE.keyOrDefault "is_general" False ABE.asBool
<*> ABE.keyOrDefault "members" [] (ABE.eachInArray asID)
<*> ABE.keyMay "topic" (asSlackTracked ABE.asText)
<*> ABE.keyMay "purpose" (asSlackTracked ABE.asText)
<*> ABE.keyOrDefault "is_member" False ABE.asBool
<*> ABE.keyMay "last_read" asTS
<*> ABE.keyMay "latest" asMessage
<*> ABE.keyMay "unread_count" ABE.asIntegral
asGroup :: ABE.Parse Text Group
asGroup =
Group
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> ABE.key "created" asTime
<*> ABE.key "creator" asID
<*> ABE.key "is_archived" ABE.asBool
<*> ABE.keyOrDefault "members" [] (ABE.eachInArray asID)
<*> ABE.keyMay "topic" (asSlackTracked ABE.asText)
<*> ABE.keyMay "purpose" (asSlackTracked ABE.asText)
<*> ABE.keyOrDefault "is_open" False ABE.asBool
<*> ABE.keyMay "last_read" asTS
<*> ABE.keyMay "latest" asMessage
<*> ABE.keyMay "unread_count" ABE.asIntegral
asIM :: ABE.Parse Text IM
asIM =
IM
<$> ABE.key "id" asID
<*> ABE.key "user" asID
<*> ABE.key "created" asTime
<*> ABE.keyOrDefault "is_user_deleted" False ABE.asBool
<*> ABE.keyOrDefault "is_open" False ABE.asBool
<*> ABE.keyMay "last_read" asTS
<*> ABE.keyMay "latest" asMessage
<*> ABE.keyMay "unread_count" ABE.asIntegral
asBot :: ABE.Parse Text Bot
asBot =
Bot
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
<*> ABE.keyOrDefault "icons" mempty (mapFromList <$> ABE.eachInObject ABE.asText)
asSlackTracked :: ABE.Parse Text a -> ABE.Parse Text (SlackTracked a)
asSlackTracked parseValue =
SlackTracked
<$> ABE.key "value" parseValue
<*> ABE.key "creator" asID
<*> ABE.key "last_set" asTime
asMessage :: ABE.Parse Text Message
asMessage =
Message
<$> ABE.keyMay "channel" asID
<*> ABE.keyMay "user" asID
<*> ABE.keyMay "subtype" asMessageSubtype
<*> ABE.keyMay "text" ABE.asText
<*> ABE.key "ts" asTS
<*> ABE.keyMay "edited" asMessageEdited
<*> ABE.keyMay "deleted_ts" asTS
<*> ABE.keyMay "event_is" asTS
<*> ABE.keyOrDefault "hidden" False ABE.asBool
<*> ABE.keyOrDefault "attachments" [] (ABE.eachInArray asAttachment)
<*> ABE.keyMay "inviter" asID
<*> ABE.keyOrDefault "is_starred" False ABE.asBool
<*> ABE.keyOrDefault "pinned_to" [] (ABE.eachInArray asID)
<*> ABE.keyOrDefault "reactions" [] (ABE.eachInArray asMessageReaction)
asMessageSubtype :: ABE.Parse Text MessageSubtype
asMessageSubtype = ABE.asText >>= either ABE.throwCustomError pure . messageSubtypeFromText
messageSubtypeFromText :: Text -> Either Text MessageSubtype
messageSubtypeFromText = \ case
"bot_message" -> Right BotMS
"me_message" -> Right MeMS
"message_changed" -> Right ChangedMS
"message_deleted" -> Right DeletedMS
"channel_join" -> Right ChannelJoinMS
"channel_leave" -> Right ChannelLeaveMS
"channel_topic" -> Right ChannelTopicMS
"channel_purpose" -> Right ChannelPurposeMS
"channel_name" -> Right ChannelNameMS
"channel_archive" -> Right ChannelArchiveMS
"channel_unarchive" -> Right ChannelUnarchiveMS
"group_join" -> Right GroupJoinMS
"group_leave" -> Right GroupLeaveMS
"group_topic" -> Right GroupTopicMS
"group_purpose" -> Right GroupPurposeMS
"group_name" -> Right GroupNameMS
"group_archive" -> Right GroupArchiveMS
"group_unarchive" -> Right GroupUnarchiveMS
"file_share" -> Right FileShareMS
"file_comment" -> Right FileCommentMS
"file_mention" -> Right FileMentionMS
"pinned_item" -> Right PinnedItemMS
"reminder_add" -> Right ReminderAddMS
"reminder_delete" -> Right ReminderDeleteMS
"bot_add" -> Right BotAddMS
other -> Left $ "unknown message subtype " <> other
messageSubtypeToText :: MessageSubtype -> Text
messageSubtypeToText = \ case
BotMS -> "bot_message"
MeMS -> "me_message"
ChangedMS -> "message_changed"
DeletedMS -> "message_deleted"
ChannelJoinMS -> "channel_join"
ChannelLeaveMS -> "channel_leave"
ChannelTopicMS -> "channel_topic"
ChannelPurposeMS -> "channel_purpose"
ChannelNameMS -> "channel_name"
ChannelArchiveMS -> "channel_archive"
ChannelUnarchiveMS -> "channel_unarchive"
GroupJoinMS -> "group_join"
GroupLeaveMS -> "group_leave"
GroupTopicMS -> "group_topic"
GroupPurposeMS -> "group_purpose"
GroupNameMS -> "group_name"
GroupArchiveMS -> "group_archive"
GroupUnarchiveMS -> "group_unarchive"
FileShareMS -> "file_share"
FileCommentMS -> "file_comment"
FileMentionMS -> "file_mention"
PinnedItemMS -> "pinned_item"
ReminderAddMS -> "reminder_add"
ReminderDeleteMS -> "reminder_delete"
BotAddMS -> "bot_add"
instance FromJSON MessageSubtype where
parseJSON = ABE.toAesonParser id asMessageSubtype
instance ToJSON MessageSubtype where
toJSON = toJSON . messageSubtypeToText
asMessageEdited :: ABE.Parse Text MessageEdited
asMessageEdited =
MessageEdited
<$> ABE.key "user" asID
<*> ABE.key "ts" asTS
asMessageReaction :: ABE.Parse Text MessageReaction
asMessageReaction =
MessageReaction
<$> ABE.key "name" ABE.asText
<*> ABE.key "count" ABE.asIntegral
<*> ABE.key "users" (ABE.eachInArray asID)
asAttachment :: ABE.Parse Text Attachment
asAttachment =
Attachment
<$> ABE.keyMay "fallback" ABE.asText
<*> ABE.keyMay "color" ABE.asText
<*> ABE.keyMay "pretext" ABE.asText
<*> ABE.keyMay "author_name" ABE.asText
<*> ABE.keyMay "author_link" ABE.asText
<*> ABE.keyMay "author_icon" ABE.asText
<*> ABE.keyMay "title" ABE.asText
<*> ABE.keyMay "title_link" ABE.asText
<*> ABE.keyMay "text" ABE.asText
<*> ABE.keyOrDefault "fields" [] (ABE.eachInArray asAttachmentField)
<*> ABE.keyMay "from_url" ABE.asText
<*> ABE.keyMay "thumb_url" ABE.asText
<*> ABE.keyMay "thumb_width" ABE.asIntegral
<*> ABE.keyMay "thumb_height" ABE.asIntegral
<*> ABE.keyOrDefault "id" 1 ABE.asIntegral -- FIXME? this defaulting is a lie!
asAttachmentField :: ABE.Parse Text AttachmentField
asAttachmentField =
AttachmentField
<$> ABE.key "title" ABE.asText
<*> ABE.key "value" ABE.asText
<*> ABE.key "short" ABE.asBool
asFile :: ABE.Parse Text File
asFile =
File
<$> ABE.key "id" asID
<*> ABE.key "created" asTime
<*> ABE.key "timestamp" asTime
<*> ABE.key "name" ABE.asText
<*> ABE.key "title" ABE.asText
<*> ABE.key "mimetype" ABE.asText
<*> ABE.key "filetype" ABE.asText
<*> ABE.key "pretty_type" ABE.asText
<*> ABE.key "user" asID
<*> ABE.key "mode" asFileMode
<*> ABE.key "editable" ABE.asBool
<*> ABE.key "is_external" ABE.asBool
<*> ABE.key "external_type" ABE.asText
<*> ABE.key "size" ABE.asIntegral
<*> ABE.key "url" ABE.asText
<*> ABE.key "url_download" ABE.asText
<*> ABE.key "url_private" ABE.asText
<*> ABE.key "url_private_download" ABE.asText
<*> asThumbs
<*> ABE.key "permalink" ABE.asText
<*> ABE.key "edit_link" ABE.asText
<*> ABE.key "preview" ABE.asText
<*> ABE.key "preview_highlight" ABE.asText
<*> ABE.key "lines" ABE.asIntegral
<*> ABE.key "lines_more" ABE.asIntegral
<*> ABE.key "is_public" ABE.asBool
<*> ABE.key "public_url_shared" ABE.asBool
<*> ABE.keyOrDefault "channels" [] (ABE.eachInArray asID)
<*> ABE.keyOrDefault "groups" [] (ABE.eachInArray asID)
<*> ABE.keyOrDefault "ims" [] (ABE.eachInArray asID)
<*> ABE.keyMay "initial_comment" asMessage
<*> ABE.keyOrDefault "num_starts" 0 ABE.asIntegral
<*> ABE.keyOrDefault "is_starred" False ABE.asBool
asFileMode :: ABE.Parse Text FileMode
asFileMode =
ABE.asText >>= \ case
"hosted" -> pure FileHosted
"external" -> pure FileExternal
"snippet" -> pure FileSnippet
"post" -> pure FilePost
other -> ABE.throwCustomError $ "unknown file mode " <> other
asFileComment :: ABE.Parse Text FileComment
asFileComment =
FileComment
<$> ABE.key "id" asID
<*> ABE.key "timestamp" asTime
<*> ABE.key "user" asID
<*> ABE.key "comment" ABE.asText
asRtmEvent :: ABE.Parse Text RtmEvent
asRtmEvent =
ABE.keyMay "reply_to" ABE.asIntegral >>= \ case
Just seqnum ->
ABE.key "ok" ABE.asBool >>= \ case
True -> RtmReplyOk seqnum <$> ABE.keyMay "ts" asTS
<*> ABE.keyMay "text" ABE.asText
False -> ABE.key "error" ( RtmReplyNotOk seqnum <$> ABE.key "code" ABE.asIntegral
<*> ABE.key "msg" ABE.asText )
Nothing ->
ABE.key "type" ABE.asText >>= \ case
"hello" -> pure RtmHello
"message" -> RtmMessage <$> asMessage
"channel_marked" -> RtmChannelMarked <$> asChatMarked
"channel_created" -> RtmChannelCreated <$> ABE.key "channel" asChannel
"channel_joined" -> RtmChannelJoined <$> ABE.key "channel" asChannel
"channel_left" -> RtmChannelLeft <$> ABE.key "channel" asID
"channel_deleted" -> RtmChannelDeleted <$> ABE.key "channel" asID
"channel_rename" -> RtmChannelRenamed <$> ABE.key "channel" asChatRenamed
"channel_archive" -> RtmChannelArchive <$> asChatUser
"channel_unarchive" -> RtmChannelUnarchive <$> asChatUser
"channel_history_changed" -> RtmChannelHistoryChanged <$> asChatHistoryChanged
"im_created" -> RtmIMCreated <$> asIMCreated
"im_open" -> RtmIMOpen <$> asChatUser
"im_close" -> RtmIMClose <$> asChatUser
"im_marked" -> RtmIMMarked <$> asChatMarked
"im_history_changed" -> RtmIMHistoryChanged <$> asChatHistoryChanged
"group_joined" -> RtmGroupJoined <$> ABE.key "channel" asGroup
"group_left" -> RtmGroupLeft <$> ABE.key "channel" asID
"group_open" -> RtmGroupOpen <$> asChatUser
"group_close" -> RtmGroupClose <$> asChatUser
"group_archive" -> RtmGroupArchive <$> ABE.key "channel" asID
"group_unarchive" -> RtmGroupUnarchive <$> ABE.key "channel" asID
"group_rename" -> RtmGroupRename <$> ABE.key "channel" asChatRenamed
"group_marked" -> RtmGroupMarked <$> asChatMarked
"group_history_changed" -> RtmGroupHistoryChanged <$> asChatHistoryChanged
"file_created" -> RtmFileCreated <$> ABE.key "file" asFile
"file_shared" -> RtmFileShared <$> ABE.key "file" asFile
"file_unshared" -> RtmFileUnshared <$> ABE.key "file" asFile
"file_public" -> RtmFilePublic <$> ABE.key "file" asFile
"file_private" -> RtmFilePrivate <$> ABE.key "file" asID
"file_change" -> RtmFileChange <$> ABE.key "file" asFile
"file_deleted" -> RtmFileDeleted <$> asFileDeleted
"file_comment_added" -> RtmFileCommentAdded <$> asFileCommentUpdated
"file_comment_edited" -> RtmFileCommentEdited <$> asFileCommentUpdated
"file_comment_deleted" -> RtmFileCommentDeleted <$> asFileCommentDeleted
"presence_change" -> RtmPresenceChange <$> asPresenceChange
"manual_presence_change" -> RtmManualPresenceChange <$> ABE.key "presence" asPresence
"user_typing" -> RtmUserTyping <$> asChatUser
"pref_change" -> RtmPrefChange <$> asPrefChange
"user_change" -> RtmUserChange <$> ABE.key "user" asUser
"team_join" -> RtmTeamJoin <$> ABE.key "user" asUser
"star_added" -> RtmStarAdded <$> asStar
"star_removed" -> RtmStarRemoved <$> asStar
"emoji_changed" -> RtmEmojiChanged <$> ABE.key "event_ts" asTS
"commands_changed" -> RtmCommandsChanged <$> ABE.key "event_ts" asTS
"team_pref_change" -> RtmTeamPrefChange <$> asPrefChange
"team_rename" -> RtmTeamRename <$> ABE.key "name" ABE.asText
"team_domain_change" -> RtmTeamDomainChange <$> asTeamDomainChange
"email_domain_changed" -> RtmEmailDomainChanged <$> asEmailDomainChanged
"bot_added" -> RtmBotAdded <$> ABE.key "bot" asBot
"bot_changed" -> RtmBotChanged <$> ABE.key "bot" asBot
"accounts_changed" -> pure RtmAccountsChanged
other -> ABE.throwCustomError $ "unknown RTM event type " <> other
asChatMarked :: ABE.Parse Text (ChatMarked a)
asChatMarked =
ChatMarked
<$> ABE.key "channel" asID
<*> ABE.key "ts" asTS
asChatUser :: ABE.Parse Text (ChatUser a)
asChatUser =
ChatUser
<$> ABE.key "channel" asID
<*> ABE.key "user" asID
asChatRenamed :: ABE.Parse Text (ChatRenamed a)
asChatRenamed =
ChatRenamed
<$> ABE.key "id" asID
<*> ABE.key "name" ABE.asText
asChatHistoryChanged :: ABE.Parse Text (ChatHistoryChanged a)
asChatHistoryChanged =
ChatHistoryChanged
<$> ABE.key "latest" ABE.asText
<*> ABE.key "ts" asTS
<*> ABE.key "event_ts" asTS
asIMCreated :: ABE.Parse Text IMCreated
asIMCreated =
IMCreated
<$> ABE.key "user" asID
<*> ABE.key "channel" asIM
asFileDeleted :: ABE.Parse Text FileDeleted
asFileDeleted =
FileDeleted
<$> ABE.key "file_id" asID
<*> ABE.key "event_ts" asTS
asFileCommentUpdated :: ABE.Parse Text FileCommentUpdated
asFileCommentUpdated =
FileCommentUpdated
<$> ABE.key "file" asFile
<*> ABE.key "comment" asFileComment
asFileCommentDeleted :: ABE.Parse Text FileCommentDeleted
asFileCommentDeleted =
FileCommentDeleted
<$> ABE.key "file" asFile
<*> ABE.key "comment" asID
asPresenceChange :: ABE.Parse Text PresenceChange
asPresenceChange =
PresenceChange
<$> ABE.key "user" asID
<*> ABE.key "presence" asPresence
asPrefChange :: ABE.Parse Text PrefChange
asPrefChange =
PrefChange
<$> ABE.key "name" ABE.asText
<*> ABE.key "value" (ABEI.withValue Right)
asStar :: ABE.Parse Text Star
asStar =
Star
<$> ABE.key "user" ABE.asText
<*> ABE.key "item" asStarItem
<*> ABE.key "event_ts" asTS
asStarItem :: ABE.Parse Text StarItem
asStarItem =
ABE.key "type" ABE.asText >>= \ case
"message" -> StarItemMessage <$> ABE.key "message" asMessage
"file" -> StarItemFile <$> ABE.key "file" asFile
"file_comment" -> StarItemFileComment <$> ABE.key "file" asFile <*> ABE.key "comment" asFileComment
"channel" -> StarItemChannel <$> ABE.key "channel" asID
"im" -> StarItemIM <$> ABE.key "im" asID
"group" -> StarItemGroup <$> ABE.key "group" asID
other -> ABE.throwCustomError $ "unknown starrable item type " <> other
asTeamDomainChange :: ABE.Parse Text TeamDomainChange
asTeamDomainChange =
TeamDomainChange
<$> ABE.key "url" ABE.asText
<*> ABE.key "domain" ABE.asText
asEmailDomainChanged :: ABE.Parse Text EmailDomainChanged
asEmailDomainChanged =
EmailDomainChanged
<$> ABE.key "email_domain" ABE.asText
<*> ABE.key "event_ts" asTS
instance ToJSON RtmSendMessage where
toJSON (RtmSendMessage seqnum chat message) = object
[ "type" .= ("message" :: Text)
, "id" .= seqnum
, "channel" .= chat
, "text" .= message
]
| Dridus/alexandria | server/Slack.hs | mit | 37,892 | 0 | 39 | 8,899 | 10,221 | 5,211 | 5,010 | -1 | -1 |
{-# LANGUAGE TypeApplications, ScopedTypeVariables, LambdaCase,
ViewPatterns, RecordWildCards,
AllowAmbiguousTypes, GADTs, TypeFamilies, KindSignatures, DataKinds,
TemplateHaskell #-}
module PrintModGuts (
-- *Pieces of 'ModGuts' for printing
ModGutsInfo(..), fullInfo, summaryInfo,
-- *Printing 'ModGuts'
formatModGuts, printModGuts,
-- *Type hackery
ModGutsInfoType, KnownInfo(..), SomeKnownModGutsInfo(..), known, unknown
) where
import qualified Language.Haskell.TH as TH
import Data.Generics (Proxy(..))
import Data.Bifunctor
import Data.Foldable
import Data.Traversable
import Data.Function
import Data.Maybe
import Data.List (sortBy)
import GHC
import Avail (AvailInfo())
import PatSyn (PatSyn())
import InstEnv (InstEnv(), instEnvElts, is_orphan)
import FamInstEnv (FamInstEnv(), famInstEnvElts)
import GhcPlugins
import PprCore
import Text.PrettyPrint.Util
-- These aren't in the order they show up in 'ModGuts', necessarily; they're in
-- the most useful order for printing.
data ModGutsInfo
= MGI_Module
| MGI_Location
| MGI_Exports
| MGI_InPackageImports
| MGI_PackageDeps
| MGI_OrphanInstanceDeps
| MGI_TypeFamilyInstanceDeps
| MGI_UsedFiles
| MGI_UsedTemplateHaskell
| MGI_Environment
| MGI_Fixities
| MGI_TypeConstructors
| MGI_TypeClassInstances
| MGI_TypeFamilyInstances
| MGI_PatternSynonyms
| MGI_Rules
| MGI_ForeignStubs
| MGI_Warnings
| MGI_Annotations
| MGI_HpcInfo
| MGI_Breakpoints
| MGI_VectorizationPragmas
| MGI_VectorizedDeclarations
| MGI_TypeClassInstanceEnvironment
| MGI_TypeFamilyInstanceEnvironment
| MGI_SafeHaskell
| MGI_NeedToTrustSelfPkg
| MGI_Contents
deriving (Eq, Ord, Enum, Bounded, Show, Read)
type family ModGutsInfoType (info :: ModGutsInfo) :: * where
ModGutsInfoType 'MGI_Module = (Module, HscSource)
ModGutsInfoType 'MGI_Location = SrcSpan
ModGutsInfoType 'MGI_Exports = [AvailInfo]
ModGutsInfoType 'MGI_InPackageImports = [(ModuleName, IsBootInterface)]
ModGutsInfoType 'MGI_PackageDeps = [(UnitId, Bool)]
ModGutsInfoType 'MGI_OrphanInstanceDeps = [Module]
ModGutsInfoType 'MGI_TypeFamilyInstanceDeps = [Module]
ModGutsInfoType 'MGI_UsedFiles = [Usage]
ModGutsInfoType 'MGI_UsedTemplateHaskell = Bool
ModGutsInfoType 'MGI_Environment = GlobalRdrEnv
ModGutsInfoType 'MGI_Fixities = FixityEnv
ModGutsInfoType 'MGI_TypeConstructors = [TyCon]
ModGutsInfoType 'MGI_TypeClassInstances = [ClsInst]
ModGutsInfoType 'MGI_TypeFamilyInstances = [FamInst]
ModGutsInfoType 'MGI_PatternSynonyms = [PatSyn]
ModGutsInfoType 'MGI_Rules = [CoreRule]
ModGutsInfoType 'MGI_ForeignStubs = ForeignStubs
ModGutsInfoType 'MGI_Warnings = Warnings
ModGutsInfoType 'MGI_Annotations = [Annotation]
ModGutsInfoType 'MGI_HpcInfo = HpcInfo
ModGutsInfoType 'MGI_Breakpoints = Maybe ModBreaks
ModGutsInfoType 'MGI_VectorizationPragmas = [CoreVect]
ModGutsInfoType 'MGI_VectorizedDeclarations = VectInfo
ModGutsInfoType 'MGI_TypeClassInstanceEnvironment = InstEnv
ModGutsInfoType 'MGI_TypeFamilyInstanceEnvironment = FamInstEnv
ModGutsInfoType 'MGI_SafeHaskell = SafeHaskellMode
ModGutsInfoType 'MGI_NeedToTrustSelfPkg = Bool
ModGutsInfoType 'MGI_Contents = CoreProgram
class KnownInfo (info :: ModGutsInfo) where
knownInfo :: ModGutsInfo
infoDescription :: String
infoData :: ModGuts -> ModGutsInfoType info
infoFormat :: ModGutsInfoType info -> SDoc
data SomeKnownModGutsInfo where
Known :: KnownInfo info => Proxy info -> SomeKnownModGutsInfo
unknown :: SomeKnownModGutsInfo -> ModGutsInfo
unknown (Known (Proxy :: Proxy info)) = knownInfo @info
instance Eq SomeKnownModGutsInfo where (==) = (==) `on` unknown
instance Ord SomeKnownModGutsInfo where compare = compare `on` unknown
instance Show SomeKnownModGutsInfo where
showsPrec p (unknown -> info) =
showParen (p >= 11) $ showString "Known @'" . shows info . showString " Proxy"
do sig <- TH.sigD (TH.mkName "known") [t|ModGutsInfo -> SomeKnownModGutsInfo|]
TH.TyConI (TH.DataD _ _ _ _ cons _) <- TH.reify ''ModGutsInfo
clauses <- fmap (TH.FunD $ TH.mkName "known") . for cons $ \case
TH.NormalC info _ ->
TH.clause [TH.conP info []]
(TH.normalB [e|Known (Proxy :: Proxy $(TH.promotedT info))|])
[]
_ ->
fail "internal error: could not define `known'"
pure [sig, clauses]
instance KnownInfo 'MGI_Module where
knownInfo = MGI_Module
infoDescription = "Module"
infoData = (,) <$> mg_module <*> mg_hsc_src
infoFormat = \(mod, hscSrc) ->
ppr mod <> case hscSrc of
HsSrcFile -> empty
HsBootFile -> space <> text "[boot interface]"
HsigFile -> space <> text "[signature]"
instance KnownInfo 'MGI_Location where
knownInfo = MGI_Location
infoDescription = "Source locations"
infoData = mg_loc
infoFormat = ppr
instance KnownInfo 'MGI_Exports where
knownInfo = MGI_Exports
infoDescription = "Exports"
infoData = mg_exports
infoFormat = ppr
instance KnownInfo 'MGI_InPackageImports where
knownInfo = MGI_InPackageImports
infoDescription = "In-package imports"
infoData = dep_mods . mg_deps
infoFormat = pprListWith . pprAnnotated $ text "[boot]"
instance KnownInfo 'MGI_PackageDeps where
knownInfo = MGI_PackageDeps
infoDescription = "Required packages"
infoData = dep_pkgs . mg_deps
infoFormat = pprListWith . pprAnnotated $ text "[must be trusted]"
instance KnownInfo 'MGI_OrphanInstanceDeps where
knownInfo = MGI_OrphanInstanceDeps
infoDescription = "Orphan instances in"
infoData = dep_orphs . mg_deps
infoFormat = ppr
instance KnownInfo 'MGI_TypeFamilyInstanceDeps where
knownInfo = MGI_TypeFamilyInstanceDeps
infoDescription = "Type family instantiations in"
infoData = dep_finsts . mg_deps
infoFormat = ppr
instance KnownInfo 'MGI_UsedFiles where
knownInfo = MGI_UsedFiles
infoDescription = "Used files"
infoData = mg_usages
infoFormat = pprListWith (text . getUsageName)
. sortBy compareUsage
. map withUsageName
where
(UsageFile{}, name1) `compareUsage` (UsageFile{}, name2) =
name1 `compare` name2
(UsageHomeModule{}, name1) `compareUsage` (UsageHomeModule{}, name2) =
name1 `compare` name2
(UsagePackageModule{}, name1) `compareUsage` (UsagePackageModule{}, name2) =
name1 `compare` name2
(UsageFile{}, _) `compareUsage` _ = LT
_ `compareUsage` (UsageFile{}, _) = GT
(UsagePackageModule{}, _) `compareUsage` _ = GT
_ `compareUsage` (UsagePackageModule{}, _) = LT
usageName UsagePackageModule{..} = moduleNameString $ moduleName usg_mod
-- TODO: include package?
usageName UsageHomeModule{..} = moduleNameString usg_mod_name
usageName UsageFile{..} = usg_file_path
withUsageName u = (u, usageName u)
getUsageName = snd
instance KnownInfo 'MGI_UsedTemplateHaskell where
knownInfo = MGI_UsedTemplateHaskell
infoDescription = "Template Haskell"
infoData = mg_used_th
infoFormat = yesNo
instance KnownInfo 'MGI_Environment where
knownInfo = MGI_Environment
infoDescription = "Environment"
infoData = mg_rdr_env
infoFormat = pprListWith element
. sortBy (stableNameCmp `on` gre_name)
. concat . occEnvElts
where
element GRE{..} = ppr gre_name <> label [ parent gre_par
, nonlocal gre_lcl
, imports gre_imp ]
label mlabels = case catMaybes mlabels of
[] -> empty
labels -> space <> pprListWith id labels
parent NoParent =
Nothing
parent (ParentIs parent) =
Just $ text "parent:" <+> ppr parent
parent (FldParent parent mlabel) =
Just $ text "parent:" <+> ppr parent
<> case mlabel of
Just label -> text "." <> text (unpackFS label)
Nothing -> empty
parent PatternSynonym =
Just $ text "pattern synonym"
nonlocal True = Nothing
nonlocal False = Just $ text "nonlocal"
imports _ = Nothing -- TODO
instance KnownInfo 'MGI_Fixities where
knownInfo = MGI_Fixities
infoDescription = "Fixities"
infoData = mg_fix_env
infoFormat = ppr . nameEnvElts
instance KnownInfo 'MGI_TypeConstructors where
knownInfo = MGI_TypeConstructors
infoDescription = "Type constructors"
infoData = mg_tcs
infoFormat = ppr
instance KnownInfo 'MGI_TypeClassInstances where
knownInfo = MGI_TypeClassInstances
infoDescription = "Instances"
infoData = mg_insts
infoFormat = pprListWith $ \inst ->
pprInstanceHdr inst <> case is_orphan inst of
IsOrphan -> space <> text "[orphan]"
NotOrphan _ -> empty
instance KnownInfo 'MGI_TypeFamilyInstances where
knownInfo = MGI_TypeFamilyInstances
infoDescription = "Open type family instantiations"
infoData = mg_fam_insts
infoFormat = ppr
instance KnownInfo 'MGI_PatternSynonyms where
knownInfo = MGI_PatternSynonyms
infoDescription = "Pattern synonyms"
infoData = mg_patsyns
infoFormat = ppr
instance KnownInfo 'MGI_Rules where
knownInfo = MGI_Rules
infoDescription = "Rewrite rules"
infoData = mg_rules
infoFormat = pprListWith $ \case
Rule{..} ->
doubleQuotes (ftext ru_name) <+> ppr ru_act
BuiltinRule{..} ->
doubleQuotes (ftext ru_name) <+>
text "[builtin for" <+> ppr ru_fn <> text "]"
instance KnownInfo 'MGI_ForeignStubs where
knownInfo = MGI_ForeignStubs
infoDescription = "Foreign stubs"
infoData = mg_foreign
infoFormat = \case
NoStubs ->
none
ForeignStubs prototypes cStubs ->
maybeEmpty none id $
labeled "Prototypes" prototypes $+$
labeled "C stubs" cStubs
where
none = text "None"
labeled label =
maybeEmpty empty $ hang (text label <> colon) (length label + 1)
instance KnownInfo 'MGI_Warnings where
knownInfo = MGI_Warnings
infoDescription = "Warning annotations"
infoData = mg_warns
infoFormat = pprListWith warning . \case
NoWarnings -> []
WarnAll txt -> [(text "Whole module", txt)]
WarnSome txts -> map (first ppr) txts
where
warning (what,txt) = warningFor what txt <+> warningText txt
warningFor what (WarningTxt _ _) = what <> colon
warningFor what (DeprecatedTxt _ _) = what <> text ": [DEPRECATED]"
warningText = fsep . map (ftext . sl_fs . unLoc) . \case
WarningTxt _ lits -> lits
DeprecatedTxt _ lits -> lits
instance KnownInfo 'MGI_Annotations where
knownInfo = MGI_Annotations
infoDescription = "Annotations"
infoData = mg_anns
infoFormat = pprListWith $ \Annotation{..} ->
let target = case ann_target of
NamedTarget name -> ppr name
ModuleTarget mod -> text "module" <+> ppr mod
payload = case ann_value of
Serialized ty _bytes -> text (show ty)
in parens $ target <> comma <+> payload
instance KnownInfo 'MGI_HpcInfo where
knownInfo = MGI_HpcInfo
infoDescription = "HPC"
infoData = mg_hpc_info
infoFormat = \case
HpcInfo ticks _ -> text "Used;" <+>
ppr ticks <+> text "tick" <> if ticks == 1 then empty else char 's'
NoHpcInfo True -> text "Unused, but depended on"
NoHpcInfo False -> text "Unused"
instance KnownInfo 'MGI_Breakpoints where
knownInfo = MGI_Breakpoints
infoDescription = "Breakpoints"
infoData = mg_modBreaks
infoFormat = ppr . maybe [] (toList . modBreaks_locs)
-- TODO: We could inclode the other information, but the location is by far
-- the simplest and is probably one of the most useful things
instance KnownInfo 'MGI_VectorizationPragmas where
knownInfo = MGI_VectorizationPragmas
infoDescription = "Vectorization pragmas"
infoData = mg_vect_decls
infoFormat = ppr
instance KnownInfo 'MGI_VectorizedDeclarations where
knownInfo = MGI_VectorizedDeclarations
infoDescription = "Vectorized declarations"
infoData = mg_vect_info
infoFormat = ppr
instance KnownInfo 'MGI_TypeClassInstanceEnvironment where
knownInfo = MGI_TypeClassInstanceEnvironment
infoDescription = "Type class instance environment"
infoData = mg_inst_env
infoFormat = ppr . instEnvElts
instance KnownInfo 'MGI_TypeFamilyInstanceEnvironment where
knownInfo = MGI_TypeFamilyInstanceEnvironment
infoDescription = "Type family instance environment"
infoData = mg_fam_inst_env
infoFormat = ppr . famInstEnvElts
instance KnownInfo 'MGI_SafeHaskell where
knownInfo = MGI_SafeHaskell
infoDescription = "Safe Haskell"
infoData = mg_safe_haskell
infoFormat = ppr
instance KnownInfo 'MGI_NeedToTrustSelfPkg where
knownInfo = MGI_NeedToTrustSelfPkg
infoDescription = "Needs to trust its own package"
infoData = mg_trust_pkg
infoFormat = yesNo
instance KnownInfo 'MGI_Contents where
knownInfo = MGI_Contents
infoDescription = "Contents"
infoData = mg_binds
infoFormat = pprCoreBindings -- TODO: Newline first?
formatModGuts :: [ModGutsInfo] -> ModGuts -> SDoc
formatModGuts infos guts =
let format (known -> Known (Proxy :: Proxy info)) =
text (infoDescription @info) <> colon
<+> infoFormat @info (infoData @info guts)
in foldr ($+$) empty $ map format infos
printModGuts :: [ModGutsInfo] -> ModGuts -> CoreM ()
printModGuts = (putMsg .) . formatModGuts
fullInfo :: [ModGutsInfo]
fullInfo = [minBound..maxBound]
summaryInfo :: [ModGutsInfo]
summaryInfo = [MGI_Module, MGI_Exports, MGI_Contents]
| antalsz/hs-to-coq | structural-isomorphism-plugin/src/PrintModGuts.hs | mit | 15,062 | 0 | 17 | 4,305 | 3,336 | 1,786 | 1,550 | 338 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Batch.Parser
(
parse
, command
, Script
, Command (..)
, Expression (..)
) where
import Batch.Definitions
import Batch.Lexer
import Control.Applicative
import Control.Exception (assert)
import Text.Parsec (ParseError, Parsec, ParsecT, Stream,
(<?>))
import qualified Text.Parsec as Parsec
import Text.Parsec.Pos (SourcePos)
type Script = [Command]
parse :: String -> Either ParseError Command
parse source = lexx source >>= parseTokens
parseTokens :: Tokens -> Either ParseError Command
parseTokens = Parsec.parse script "(tokens)"
script :: Parsec Tokens st Command
script = Program <$> Parsec.manyTill command Parsec.eof
block :: Parsec Tokens st [Command]
block = parenthesizedCommands <|> fmap (:[]) command
where
parenthesizedCommands = tok OpenParen *> Parsec.manyTill command (tok CloseParen)
command :: Parsec Tokens st Command
command = (actual <|> comment) <?> "command"
where
actual = do
c <- nextCommand
pipe c <|> redirect c <|> return c
nextCommand = Parsec.choice [
at
, echo
, label
, goto
, ifCommand
, ver
, find
, typeCommand
]
comment :: Parsec Tokens st Command
comment = Comment <$> (tok DoubleColon *> stringTok)
at :: Parsec Tokens st Command
at = Quieted <$> (tok At *> command)
label :: Parsec Tokens st Command
label = Label <$> (tok Colon *> stringTok)
goto :: Parsec Tokens st Command
goto = Goto <$> (tok KeywordGoto *> stringTok)
echo :: Parsec Tokens st Command
echo = tok KeywordEcho *> (dot <|> msg <|> on <|> off) where
msg = EchoMessage <$> stringTok
dot = tok Dot *> return (EchoMessage "")
on = tok KeywordOn *> return (EchoEnabled True)
off = tok KeywordOff *> return (EchoEnabled False)
ver :: Parsec Tokens st Command
ver = tok KeywordVer *> return Ver
find :: Parsec Tokens st Command
find = (\f -> Find f []) <$> (tok KeywordFind *> stringTok)
typeCommand :: Parsec Tokens st Command
typeCommand = (\p -> Type [p]) <$> (tok KeywordType *> stringTok)
ifCommand :: Parsec Tokens st Command
ifCommand = tok KeywordIf *> (caseInsensitive <|> nots <|> normal)
where
caseInsensitive = notted string <|> string
string = do
item1 <- stringTok
cmp <- stringComparison
item2 <- stringTok
consequent <- block
alternative <- (tok KeywordElse *> block) <|> return [Noop]
return $ If (parseComparison item1 cmp item2) consequent alternative
stringComparison = Parsec.choice $ map tok [
DoubleEqual
, CompareOpEqu
, CompareOpNeq
, CompareOpLss
, CompareOpLeq
, CompareOpGtr
, CompareOpGeq
]
parseComparison l DoubleEqual r = EqualsExpr (StringExpr l) (StringExpr r)
nots = tok KeywordNot *> return Noop -- TODO
normal = fileExist <|> string <|> defined <|> errorLevel <|> cmdExtVersion
notted p = tok KeywordNot *> p -- TODO
fileExist = do
tok KeywordExist
filepath <- stringTok
consequent <- block
alternative <- (tok KeywordElse *> block) <|> return [Noop]
return $ If (Exist filepath) consequent alternative
errorLevel = do
tok KeywordErrorLevel
n <- integerTok
c <- command
return (If (ErrorLevelExpr n) [c] [Noop])
defined = do
tok KeywordDefined
var <- stringTok
c <- command
return (If (DefinedExpr var) [c] [Noop])
cmdExtVersion = do
tok KeywordCmdExtVersion
n <- integerTok
c <- command
return (If (CmdExtVersionExpr n) [c] [Noop])
pipe :: Command -> Parsec Tokens st Command
pipe c = PipeCommand c <$> (tok Pipe *> command)
redirect :: Command -> Parsec Tokens st Command
redirect c = Redirection c <$> (tok GreaterThan *> filepathTok)
filepathTok :: (Stream s m Token) => ParsecT s u m String
filepathTok = stringTok <|> (tok KeywordNul *> return "Nul")
stringTok :: (Stream s m Token) => ParsecT s u m String
stringTok = (extract <$> satisfy f) <?> "string" where
f (StringTok _) = True
f _ = False
extract (StringTok s) = s
extract _ = assert False undefined
integerTok :: (Stream s m Token) => ParsecT s u m Integer
integerTok = (extract <$> satisfy f) <?> "integer" where
f (IntegerTok _) = True
f _ = False
extract (IntegerTok i) = i
extract _ = assert False undefined
tok :: (Stream s m Token) => Token -> ParsecT s u m Token
tok t = satisfy (==t) <?> show t
satisfy :: (Stream s m Token) => (Token -> Bool) -> ParsecT s u m Token
satisfy f = Parsec.tokenPrim show
(\pos c _cs -> updatePosToken pos c)
(\c -> if f c then Just c else Nothing)
updatePosToken :: SourcePos -> Token -> SourcePos
updatePosToken pos _ = Parsec.incSourceColumn pos 1
| danstiner/transpiler | src/Batch/Parser.hs | mit | 4,947 | 0 | 13 | 1,299 | 1,693 | 868 | 825 | 130 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html
module Stratosphere.ResourceProperties.AppStreamImageBuilderDomainJoinInfo where
import Stratosphere.ResourceImports
-- | Full data type definition for AppStreamImageBuilderDomainJoinInfo. See
-- 'appStreamImageBuilderDomainJoinInfo' for a more convenient constructor.
data AppStreamImageBuilderDomainJoinInfo =
AppStreamImageBuilderDomainJoinInfo
{ _appStreamImageBuilderDomainJoinInfoDirectoryName :: Maybe (Val Text)
, _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON AppStreamImageBuilderDomainJoinInfo where
toJSON AppStreamImageBuilderDomainJoinInfo{..} =
object $
catMaybes
[ fmap (("DirectoryName",) . toJSON) _appStreamImageBuilderDomainJoinInfoDirectoryName
, fmap (("OrganizationalUnitDistinguishedName",) . toJSON) _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName
]
-- | Constructor for 'AppStreamImageBuilderDomainJoinInfo' containing required
-- fields as arguments.
appStreamImageBuilderDomainJoinInfo
:: AppStreamImageBuilderDomainJoinInfo
appStreamImageBuilderDomainJoinInfo =
AppStreamImageBuilderDomainJoinInfo
{ _appStreamImageBuilderDomainJoinInfoDirectoryName = Nothing
, _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html#cfn-appstream-imagebuilder-domainjoininfo-directoryname
asibdjiDirectoryName :: Lens' AppStreamImageBuilderDomainJoinInfo (Maybe (Val Text))
asibdjiDirectoryName = lens _appStreamImageBuilderDomainJoinInfoDirectoryName (\s a -> s { _appStreamImageBuilderDomainJoinInfoDirectoryName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html#cfn-appstream-imagebuilder-domainjoininfo-organizationalunitdistinguishedname
asibdjiOrganizationalUnitDistinguishedName :: Lens' AppStreamImageBuilderDomainJoinInfo (Maybe (Val Text))
asibdjiOrganizationalUnitDistinguishedName = lens _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName (\s a -> s { _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/AppStreamImageBuilderDomainJoinInfo.hs | mit | 2,522 | 0 | 12 | 205 | 264 | 151 | 113 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters.html
module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters.
-- See
-- 'kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters'
-- for a more convenient constructor.
data KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters =
KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters where
toJSON KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters{..} =
object $
catMaybes
[ (Just . ("RecordRowPath",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath
]
-- | Constructor for
-- 'KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters'
-- containing required fields as arguments.
kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters
:: Val Text -- ^ 'kavardsjsonmpRecordRowPath'
-> KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters
kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters recordRowPatharg =
KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath = recordRowPatharg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters-recordrowpath
kavardsjsonmpRecordRowPath :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters (Val Text)
kavardsjsonmpRecordRowPath = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters.hs | mit | 2,477 | 0 | 13 | 168 | 178 | 104 | 74 | 23 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
-- | Interface für externe Korrektoren
module Inter.Action where
import Control.Types
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
import Network.XmlRpc.THDeriveXmlRpcType
import Network.XmlRpc.Internals
-- | die Nummer dessen, der sich einloggt (Tutor)
data Actor
= Actor { schule :: String
, matrikel :: String
, passwort :: String
}
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Actor])
$(asXmlRpcStruct ''Actor)
data Problem
= Problem { vorlesung :: String
, aufgabe :: String
}
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Problem])
$(asXmlRpcStruct ''Problem)
data Answer =
Answer { wert :: Wert
, kommentar :: String
}
deriving ( Eq, Ord, Typeable )
$(asXmlRpcStruct ''Answer)
$(derives [makeReader, makeToDoc] [''Answer])
instance XmlRpcType Wert where
toValue w = case w of
Reset -> toValue [("tag", toValue "Reset")]
Pending -> toValue [("tag", toValue "Pending")]
No -> toValue [("tag", toValue "No")]
Ok s -> toValue [("tag", toValue "OK"), ("size", toValue s) ]
Okay {} -> toValue [("tag", toValue "Okay")
, ("punkte", toValue $ punkte w ), ("size", toValue $ size w) ]
fromValue v = do
it <- fromValue v
tag <- getField "tag" it
case tag of
"Reset" -> return Reset
"Pending" -> return Pending
"No" -> return No
"Ok" -> do s <- getField "size" it ; return $ ok s
"Okay" -> do
p <- getField "punkte" it; s <- getField "size" it
return $ okay p s
getType _ = TStruct
-- FIXME
instance XmlRpcType Integer where
toValue = toValue . ( fromIntegral :: Integer -> Int )
fromValue = fmap ( fromIntegral :: Int -> Integer ) . fromValue
getType _ = TInt -- FIXME
-- Local Variables:
-- mode: haskell
-- End:
| Erdwolf/autotool-bonn | src/Inter/Action.hs | gpl-2.0 | 1,965 | 24 | 12 | 515 | 557 | 321 | 236 | 51 | 0 |
import System.Environment (getArgs)
import Control.Exception (SomeException, catch)
handler :: SomeException -> IO ()
handler e = do
putStrLn $ "Cannot locate file " ++ show e
handleFile :: String -> IO ()
handleFile path = do
text <- readFile path
putStrLn text
handledFile path = catch (handleFile path) handler
main :: IO ()
main = do
args <- getArgs
case args of
[] -> interact id
_ -> mapM_ handledFile args
| nkartashov/haskell | hw07/cat.hs | gpl-2.0 | 427 | 0 | 10 | 85 | 168 | 81 | 87 | 16 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Parse where
import Text.Parsec
import Text.Parsec.Text
import qualified Data.Text as T
import Types
encode :: T.Text -> [Chunk]
encode txt =
case (parse entire "" txt) of
Left err -> []
Right result -> result
entire :: Parser Program
entire = manyTill chunk eof
chunk :: Parser Chunk
chunk = (try def) <|> prose
prose :: Parser Chunk
prose = grabLine >>= (\line -> return $ Prose line)
def :: Parser Chunk
def = do
(indent, header, lineNum) <- title
parts <- manyTill (part indent) $ endDef indent
return $ Def lineNum header parts
endDef :: String -> Parser ()
endDef indent = try $ do { skipMany newline; notFollowedBy (string indent) <|> (lookAhead title >> parserReturn ()) }
-- Returns (indent, macro-name, line-no)
title :: Parser (String, T.Text, Int)
title = do
pos <- getPosition
indent <- many ws
name <- packM =<< between (string "<<") (string ">>=") (many notDelim)
newline
return $ (indent, T.strip name, sourceLine pos)
notDelim = noneOf ">="
part :: String -> Parser Part
part indent =
try (string indent >> varLine) <|>
try (string indent >> defLine) <|>
(grabLine >>= \extra -> return $ Code extra)
varLine :: Parser Part
varLine = do
indent <- packM =<< many ws
name <- packM =<< between (string "<<") (string ">>") (many notDelim)
newline
return $ Ref name indent
defLine :: Parser Part
defLine = do
line <- grabLine
return $ Code line
grabLine :: Parser T.Text
grabLine = do
line <- many (noneOf "\n\r")
last <- newline
return $ T.pack $ line ++ [last]
ws :: Parser Char
ws = char ' ' <|> char '\t'
packM str = return $ T.pack str
textP :: Parsec T.Text () T.Text -> T.Text -> T.Text
textP p txt =
case (parse p "" txt) of
Left err -> T.empty
Right result -> result
chunkP :: Parsec T.Text () Chunk -> T.Text -> Maybe Chunk
chunkP p txt =
case (parse p "" txt) of
Left err -> Nothing
Right result -> Just result
| tcrs/lit | src/Parse.hs | gpl-2.0 | 2,019 | 0 | 12 | 485 | 828 | 409 | 419 | 65 | 2 |
module Match.DrawPitch(drawPitch)
where
import Graphics.Rendering.OpenGL as OpenGL
import Drawing
import FVector
drawRect :: Rectangle -> Float -> IO ()
drawRect r d' = preservingMatrix $ do
let ((a, b), (c, d)) = rectToNum r
e = realToFrac d'
loadIdentity
translate $ Vector3 a b e
renderPrimitive Quads $ do
vertex $ Vertex3 0 0 (0 :: GLfloat)
vertex $ Vertex3 c 0 (0 :: GLfloat)
vertex $ Vertex3 c d 0
vertex $ Vertex3 0 d 0
lw :: Float -- linewidth
lw = 0.15
drawRectBox :: Rectangle -> Float -> Float -> IO ()
drawRectBox ((p, q), (r, s)) w d = do
drawRect ((p, q), (r, w)) d
drawRect ((p, q), (w, s)) d
drawRect ((p, q + s - w), (r, w)) d
drawRect ((p + r - w, q), (w, s)) d
draw2DArc :: (Float, Float) -> Float -> Float -> Float -> IO ()
draw2DArc p r w d = draw2DArcAngled' p r w d Nothing
draw2DArcAngled :: (Float, Float) -> Float -> Float -> Float -> (Float, Float) -> IO ()
draw2DArcAngled p r w d as = draw2DArcAngled' p r w d (Just as)
draw2DArcAngled' :: (Float, Float) -> Float -> Float -> Float -> (Maybe (Float, Float)) -> IO ()
draw2DArcAngled' (xp', yp') r' w' d' an = preservingMatrix $ do
let (xp, yp) = (realToFrac xp', realToFrac yp')
r = realToFrac r'
w = realToFrac w'
d = realToFrac d'
translate $ Vector3 xp yp (d :: GLfloat)
case an of
Nothing -> renderQuadric (QuadricStyle Nothing NoTextureCoordinates Inside FillStyle) (Disk r (r + w) 64 1)
Just (a1, a2) -> renderQuadric (QuadricStyle Nothing NoTextureCoordinates Inside FillStyle) (PartialDisk r (r + w) 64 1 (realToFrac a1) (realToFrac a2))
drawSpot :: (Float, Float) -> Float -> Float -> IO ()
drawSpot p = draw2DArc p 0
drawPitch :: TextureObject -> FRange -> FRange -> IO ()
drawPitch grtexobj grtiling (px, py) = do
loadIdentity
color $ Color3 1.0 1.0 (1.0 :: GLfloat)
drawTiling grtexobj (return ()) ((-px, -py), (px * 3, py * 3)) (-1) grtiling
color $ Color3 0.8 0.8 (0.8 :: GLfloat)
drawRectBox ((0, 0), (px, py)) lw 0 -- pitch boundaries
drawRect ((0, (py - lw) / 2), (px, lw)) 0 -- middle line
draw2DArc ((px / 2), (py / 2)) 9.15 lw 0 -- centre ring
drawSpot ((px / 2), (py / 2)) (lw * 2) 0 -- centre spot
drawRectBox ((px / 2 - 20.16, 0), (40.32, 16.5)) lw 0 -- penalty area 1
drawRectBox ((px / 2 - 9.15, 0), (18.3, 5.5)) lw 0 -- goal area 1
drawSpot (px / 2, 11) (lw * 2) 0 -- penalty spot 1
draw2DArcAngled (px / 2, 11.1) 9.15 lw 0 (-54.63298, 109.26596) -- penalty arc 1
drawRectBox ((px / 2 - 20.16, py - 16.5), (40.32, 16.5)) lw 0 -- penalty area 2
drawRectBox ((px / 2 - 9.15, py - 5.5), (18.3, 5.5)) lw 0 -- goal area 2
drawSpot (px / 2, py - 11) (lw * 2) 0 -- penalty spot 2
draw2DArcAngled (px / 2, py - 11.2) 9.15 lw 0 (125.36702, 109.26596) -- penalty arc 2
draw2DArcAngled (0, 0) 1 lw 0 (0, 90) -- corner line 1
draw2DArcAngled (px, 0) 1 lw 0 (0, -90) -- corner line 2
draw2DArcAngled (0, py) 1 lw 0 (90, 90) -- corner line 3
draw2DArcAngled (px, py) 1 lw 0 (-90, -90) -- corner line 4
| anttisalonen/freekick2 | src/Match/DrawPitch.hs | gpl-3.0 | 3,076 | 0 | 15 | 753 | 1,485 | 795 | 690 | 61 | 2 |
{-# OPTIONS -fallow-overlapping-instances -fglasgow-exts -fallow-undecidable-instances -fallow-incoherent-instances -fno-monomorphism-restriction #-}
module Set
where
-- $Id: Set.hs,v 1.25 2006-10-22 22:04:22 joe Exp $
import qualified Data.Set
type Set = Data.Set.Set
isEmptySet = Data.Set.null
emptySet = Data.Set.empty
unitSet = Data.Set.singleton
delFromSet = flip Data.Set.delete
addToSet = flip Data.Set.insert
elementOf = Data.Set.member
cardinality = Data.Set.size
unionSet = Data.Set.union
unionManySets = Data.Set.unions
intersectSet = Data.Set.intersection
intersectManySets = foldr1 Data.Set.intersection
minusSet = Data.Set.difference
mkSet :: Ord a => [a] -> Set a
mkSet = Data.Set.fromList
setToList :: Set a -> [a]
setToList = Data.Set.toAscList
subseteq :: Ord a => Set a -> Set a -> Bool
subseteq xs ys = Data.Set.null $ xs `Data.Set.difference` ys
mapSet :: (Ord a, Ord b) => (a -> b) -> (Set a -> Set b)
mapSet = Data.Set.map
filterSet :: Ord a => (a -> Bool) -> (Set a -> Set a)
filterSet = Data.Set.filter
nonempty :: Ord a => Set a -> Bool
nonempty = not . Data.Set.null
cross :: (Ord a, Ord b) => Set a -> Set b -> Set (a, b)
cross xs ys = Data.Set.fromList $ do
x <- Data.Set.toList xs; y <- Data.Set.toList ys; return (x, y)
| jwaldmann/rx | src/Set.hs | gpl-3.0 | 1,284 | 0 | 10 | 225 | 458 | 252 | 206 | 31 | 1 |
-- This file is part of KSQuant2.
-- Copyright (c) 2010 - 2011, Kilian Sprotte. All rights reserved.
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module DurCalc (notableDur
, notableDurL
, divToRatio
, dotFactor
, exp2
, isExp2)
where
import Data.Ratio ( numerator, denominator, (%) )
-- | Compute @2^n@ for any integer @n@.
exp2 :: Integer -> Rational
exp2 n | n >= 0 = 2^n
| otherwise = 1 / (2^abs n)
isExp2 :: Rational -> Bool
isExp2 r | r == 1 = True
| r > 1 && denominator r == 1 = isPowerOfTwo (numerator r)
| r < 1 && numerator r == 1 = isPowerOfTwo (denominator r)
| otherwise = False
isPowerOfTwo :: Integer -> Bool
isPowerOfTwo 1 = True
isPowerOfTwo x | x > 1 = even x && isPowerOfTwo (x `div` 2)
isPowerOfTwo _ = error "isPowerOfTwo"
lowerPowerOfTwo :: Integer -> Integer
lowerPowerOfTwo 1 = 1
lowerPowerOfTwo x | x > 1 = if isPowerOfTwo x then
x
else
lowerPowerOfTwo (x-1)
lowerPowerOfTwo _ = error "lowerPowerOfTwo"
-- e.g. when we divide by 9 the tuplet ratio will be 8 % 9
divToRatio :: Integer -> Rational
divToRatio d = lowerPowerOfTwo d % d
-- notableDur' :: Rational -> Bool
-- notableDur' x = h (numerator x) (denominator x)
-- where h 1 d = isPowerOfTwo d
-- h 3 d = isPowerOfTwo d && d >= 2
-- h 7 d = isPowerOfTwo d && d >= 4
-- -- 15 = 1 + 2 + 4 + 8
-- h 15 d = isPowerOfTwo d && d >= 8
-- h _ _ = False
notableDur' :: Integer -> Rational -> Bool
notableDur' maxDots r | maxDots == 0 = isExp2 r
| maxDots > 0 = notableDur' (maxDots - 1) r ||
notableDur' 0 (r / dotFactor maxDots)
| otherwise = error "notableDur' maxDots < 0"
notableDur :: Integer -> Rational -> Bool
notableDur maxDots x = notableDur' maxDots (abs x)
notableDurL :: Rational -> Rational -> Bool
notableDurL l x | abs x <= abs l = notableDur 3 x
| otherwise = False
-- | Compute a factor for a given number of augmentation dots.
--
-- The duration of a dotted note with duration @a@ and @n@ dots can be
-- obtained by @a * 'dotFactor' n@.
--
--
-- Examples:
--
-- >>> dotFactor 0
-- 1
--
-- >>> dotFactor 1
-- 3 % 2
--
-- >>> dotFactor 2
-- 7 % 4
--
dotFactor :: Integer -- ^ number of augmentation dots
-> Rational -- ^ factor
dotFactor n | n >= 0 = 2 - (1 % (2^n))
| otherwise = error "dotFactor not defined on negative n"
| kisp/ksquant2 | DurCalc.hs | gpl-3.0 | 3,214 | 0 | 10 | 963 | 631 | 331 | 300 | 41 | 2 |
module Clock (toString, fromHourMin) where
data Clock =
Clock { hour :: Integer
, minute :: Integer
} deriving Eq
instance Show Clock where
show c = zeropad (hour c) ++ ":" ++ zeropad (minute c)
instance Num Clock where
(+) (Clock h1 m1) (Clock h2 m2) =
Clock { hour = (h1 + h2 + (m1+m2) `div` 60) `mod` 24
, minute = (m1 + m2) `mod` 60
}
(*) (Clock h1 m1) (Clock h2 m2) =
Clock { hour = (h1 * h2 * (1 + (m1*m2) `div` 60)) `mod` 24
, minute = (m1 * m2) `mod` 60
}
abs c = c -- all clock times are positive, return self
signum c = Clock 1 1 -- positive identity, to satisfy abs x * signum x == x
-- per: https://hackage.haskell.org/package/base-4.7.0.0/docs/Prelude.html#v:signum
negate (Clock h m) =
Clock { hour = 23 - h - (m `div` 60)
, minute = 60 - m
}
fromInteger n = Clock 0 0 + Clock 0 n
-- We can simply use logic from Clock instances to implement our functions...
toString :: Clock -> String
toString = show
fromHourMin :: Integer -> Integer -> Clock
fromHourMin h m = Clock 0 0 + Clock h m
zeropad :: (Num a, Ord a, Show a) => a -> String
zeropad n =
if n < 10
then '0' : show n
else show n
| ciderpunx/exercismo | src/Clock.hs | gpl-3.0 | 1,303 | 0 | 15 | 436 | 472 | 261 | 211 | 29 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.OAuth2.UserInfo.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- /See:/ <https://developers.google.com/identity/protocols/oauth2/ Google OAuth2 API Reference> for @oauth2.userinfo.get@.
module Network.Google.Resource.OAuth2.UserInfo.Get
(
-- * REST Resource
UserInfoGetResource
-- * Creating a Request
, userInfoGet
, UserInfoGet
) where
import Network.Google.OAuth2.Types
import Network.Google.Prelude
-- | A resource alias for @oauth2.userinfo.get@ method which the
-- 'UserInfoGet' request conforms to.
type UserInfoGetResource =
"oauth2" :>
"v2" :>
"userinfo" :>
QueryParam "alt" AltJSON :> Get '[JSON] UserInfo
--
-- /See:/ 'userInfoGet' smart constructor.
data UserInfoGet =
UserInfoGet'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UserInfoGet' with the minimum fields required to make a request.
--
userInfoGet
:: UserInfoGet
userInfoGet = UserInfoGet'
instance GoogleRequest UserInfoGet where
type Rs UserInfoGet = UserInfo
type Scopes UserInfoGet =
'["openid",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile"]
requestClient UserInfoGet'{}
= go (Just AltJSON) oAuth2Service
where go
= buildClient (Proxy :: Proxy UserInfoGetResource)
mempty
| brendanhay/gogol | gogol-oauth2/gen/Network/Google/Resource/OAuth2/UserInfo/Get.hs | mpl-2.0 | 2,137 | 0 | 11 | 481 | 221 | 137 | 84 | 41 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.SQLAdmin.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.SQLAdmin.Types.Product where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types.Sum
-- | SslCerts Resource
--
-- /See:/ 'sslCert' smart constructor.
data SSLCert = SSLCert'
{ _scCommonName :: !(Maybe Text)
, _scKind :: !Text
, _scCertSerialNumber :: !(Maybe Text)
, _scSelfLink :: !(Maybe Text)
, _scCert :: !(Maybe Text)
, _scSha1Fingerprint :: !(Maybe Text)
, _scExpirationTime :: !(Maybe DateTime')
, _scCreateTime :: !(Maybe DateTime')
, _scInstance :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scCommonName'
--
-- * 'scKind'
--
-- * 'scCertSerialNumber'
--
-- * 'scSelfLink'
--
-- * 'scCert'
--
-- * 'scSha1Fingerprint'
--
-- * 'scExpirationTime'
--
-- * 'scCreateTime'
--
-- * 'scInstance'
sslCert
:: SSLCert
sslCert =
SSLCert'
{ _scCommonName = Nothing
, _scKind = "sql#sslCert"
, _scCertSerialNumber = Nothing
, _scSelfLink = Nothing
, _scCert = Nothing
, _scSha1Fingerprint = Nothing
, _scExpirationTime = Nothing
, _scCreateTime = Nothing
, _scInstance = Nothing
}
-- | User supplied name. Constrained to [a-zA-Z.-_ ]+.
scCommonName :: Lens' SSLCert (Maybe Text)
scCommonName
= lens _scCommonName (\ s a -> s{_scCommonName = a})
-- | This is always sql#sslCert.
scKind :: Lens' SSLCert Text
scKind = lens _scKind (\ s a -> s{_scKind = a})
-- | Serial number, as extracted from the certificate.
scCertSerialNumber :: Lens' SSLCert (Maybe Text)
scCertSerialNumber
= lens _scCertSerialNumber
(\ s a -> s{_scCertSerialNumber = a})
-- | The URI of this resource.
scSelfLink :: Lens' SSLCert (Maybe Text)
scSelfLink
= lens _scSelfLink (\ s a -> s{_scSelfLink = a})
-- | PEM representation.
scCert :: Lens' SSLCert (Maybe Text)
scCert = lens _scCert (\ s a -> s{_scCert = a})
-- | Sha1 Fingerprint.
scSha1Fingerprint :: Lens' SSLCert (Maybe Text)
scSha1Fingerprint
= lens _scSha1Fingerprint
(\ s a -> s{_scSha1Fingerprint = a})
-- | The time when the certificate expires in RFC 3339 format, for example
-- 2012-11-15T16:19:00.094Z.
scExpirationTime :: Lens' SSLCert (Maybe UTCTime)
scExpirationTime
= lens _scExpirationTime
(\ s a -> s{_scExpirationTime = a})
. mapping _DateTime
-- | The time when the certificate was created in RFC 3339 format, for
-- example 2012-11-15T16:19:00.094Z
scCreateTime :: Lens' SSLCert (Maybe UTCTime)
scCreateTime
= lens _scCreateTime (\ s a -> s{_scCreateTime = a})
. mapping _DateTime
-- | Name of the database instance.
scInstance :: Lens' SSLCert (Maybe Text)
scInstance
= lens _scInstance (\ s a -> s{_scInstance = a})
instance FromJSON SSLCert where
parseJSON
= withObject "SSLCert"
(\ o ->
SSLCert' <$>
(o .:? "commonName") <*>
(o .:? "kind" .!= "sql#sslCert")
<*> (o .:? "certSerialNumber")
<*> (o .:? "selfLink")
<*> (o .:? "cert")
<*> (o .:? "sha1Fingerprint")
<*> (o .:? "expirationTime")
<*> (o .:? "createTime")
<*> (o .:? "instance"))
instance ToJSON SSLCert where
toJSON SSLCert'{..}
= object
(catMaybes
[("commonName" .=) <$> _scCommonName,
Just ("kind" .= _scKind),
("certSerialNumber" .=) <$> _scCertSerialNumber,
("selfLink" .=) <$> _scSelfLink,
("cert" .=) <$> _scCert,
("sha1Fingerprint" .=) <$> _scSha1Fingerprint,
("expirationTime" .=) <$> _scExpirationTime,
("createTime" .=) <$> _scCreateTime,
("instance" .=) <$> _scInstance])
-- | Database list response.
--
-- /See:/ 'databasesListResponse' smart constructor.
data DatabasesListResponse = DatabasesListResponse'
{ _dlrKind :: !Text
, _dlrItems :: !(Maybe [Database])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DatabasesListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dlrKind'
--
-- * 'dlrItems'
databasesListResponse
:: DatabasesListResponse
databasesListResponse =
DatabasesListResponse'
{ _dlrKind = "sql#databasesList"
, _dlrItems = Nothing
}
-- | This is always sql#databasesList.
dlrKind :: Lens' DatabasesListResponse Text
dlrKind = lens _dlrKind (\ s a -> s{_dlrKind = a})
-- | List of database resources in the instance.
dlrItems :: Lens' DatabasesListResponse [Database]
dlrItems
= lens _dlrItems (\ s a -> s{_dlrItems = a}) .
_Default
. _Coerce
instance FromJSON DatabasesListResponse where
parseJSON
= withObject "DatabasesListResponse"
(\ o ->
DatabasesListResponse' <$>
(o .:? "kind" .!= "sql#databasesList") <*>
(o .:? "items" .!= mempty))
instance ToJSON DatabasesListResponse where
toJSON DatabasesListResponse'{..}
= object
(catMaybes
[Just ("kind" .= _dlrKind),
("items" .=) <$> _dlrItems])
-- | Database instance export request.
--
-- /See:/ 'instancesExportRequest' smart constructor.
newtype InstancesExportRequest = InstancesExportRequest'
{ _ierExportContext :: Maybe ExportContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesExportRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ierExportContext'
instancesExportRequest
:: InstancesExportRequest
instancesExportRequest =
InstancesExportRequest'
{ _ierExportContext = Nothing
}
-- | Contains details about the export operation.
ierExportContext :: Lens' InstancesExportRequest (Maybe ExportContext)
ierExportContext
= lens _ierExportContext
(\ s a -> s{_ierExportContext = a})
instance FromJSON InstancesExportRequest where
parseJSON
= withObject "InstancesExportRequest"
(\ o ->
InstancesExportRequest' <$> (o .:? "exportContext"))
instance ToJSON InstancesExportRequest where
toJSON InstancesExportRequest'{..}
= object
(catMaybes
[("exportContext" .=) <$> _ierExportContext])
-- | On-premises instance configuration.
--
-- /See:/ 'onPremisesConfiguration' smart constructor.
data OnPremisesConfiguration = OnPremisesConfiguration'
{ _opcKind :: !Text
, _opcHostPort :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OnPremisesConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'opcKind'
--
-- * 'opcHostPort'
onPremisesConfiguration
:: OnPremisesConfiguration
onPremisesConfiguration =
OnPremisesConfiguration'
{ _opcKind = "sql#onPremisesConfiguration"
, _opcHostPort = Nothing
}
-- | This is always sql#onPremisesConfiguration.
opcKind :: Lens' OnPremisesConfiguration Text
opcKind = lens _opcKind (\ s a -> s{_opcKind = a})
-- | The host and port of the on-premises instance in host:port format
opcHostPort :: Lens' OnPremisesConfiguration (Maybe Text)
opcHostPort
= lens _opcHostPort (\ s a -> s{_opcHostPort = a})
instance FromJSON OnPremisesConfiguration where
parseJSON
= withObject "OnPremisesConfiguration"
(\ o ->
OnPremisesConfiguration' <$>
(o .:? "kind" .!= "sql#onPremisesConfiguration") <*>
(o .:? "hostPort"))
instance ToJSON OnPremisesConfiguration where
toJSON OnPremisesConfiguration'{..}
= object
(catMaybes
[Just ("kind" .= _opcKind),
("hostPort" .=) <$> _opcHostPort])
-- | Database instance list operations response.
--
-- /See:/ 'operationsListResponse' smart constructor.
data OperationsListResponse = OperationsListResponse'
{ _olrNextPageToken :: !(Maybe Text)
, _olrKind :: !Text
, _olrItems :: !(Maybe [Operation])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationsListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olrNextPageToken'
--
-- * 'olrKind'
--
-- * 'olrItems'
operationsListResponse
:: OperationsListResponse
operationsListResponse =
OperationsListResponse'
{ _olrNextPageToken = Nothing
, _olrKind = "sql#operationsList"
, _olrItems = Nothing
}
-- | The continuation token, used to page through large result sets. Provide
-- this value in a subsequent request to return the next page of results.
olrNextPageToken :: Lens' OperationsListResponse (Maybe Text)
olrNextPageToken
= lens _olrNextPageToken
(\ s a -> s{_olrNextPageToken = a})
-- | This is always sql#operationsList.
olrKind :: Lens' OperationsListResponse Text
olrKind = lens _olrKind (\ s a -> s{_olrKind = a})
-- | List of operation resources.
olrItems :: Lens' OperationsListResponse [Operation]
olrItems
= lens _olrItems (\ s a -> s{_olrItems = a}) .
_Default
. _Coerce
instance FromJSON OperationsListResponse where
parseJSON
= withObject "OperationsListResponse"
(\ o ->
OperationsListResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "sql#operationsList")
<*> (o .:? "items" .!= mempty))
instance ToJSON OperationsListResponse where
toJSON OperationsListResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _olrNextPageToken,
Just ("kind" .= _olrKind),
("items" .=) <$> _olrItems])
-- | Database instance import context.
--
-- /See:/ 'importContext' smart constructor.
data ImportContext = ImportContext'
{ _icDatabase :: !(Maybe Text)
, _icKind :: !Text
, _icCSVImportOptions :: !(Maybe ImportContextCSVImportOptions)
, _icURI :: !(Maybe Text)
, _icFileType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'icDatabase'
--
-- * 'icKind'
--
-- * 'icCSVImportOptions'
--
-- * 'icURI'
--
-- * 'icFileType'
importContext
:: ImportContext
importContext =
ImportContext'
{ _icDatabase = Nothing
, _icKind = "sql#importContext"
, _icCSVImportOptions = Nothing
, _icURI = Nothing
, _icFileType = Nothing
}
-- | The database (for example, guestbook) to which the import is made. If
-- fileType is SQL and no database is specified, it is assumed that the
-- database is specified in the file to be imported. If fileType is CSV, it
-- must be specified.
icDatabase :: Lens' ImportContext (Maybe Text)
icDatabase
= lens _icDatabase (\ s a -> s{_icDatabase = a})
-- | This is always sql#importContext.
icKind :: Lens' ImportContext Text
icKind = lens _icKind (\ s a -> s{_icKind = a})
-- | Options for importing data as CSV.
icCSVImportOptions :: Lens' ImportContext (Maybe ImportContextCSVImportOptions)
icCSVImportOptions
= lens _icCSVImportOptions
(\ s a -> s{_icCSVImportOptions = a})
-- | A path to the file in Google Cloud Storage from which the import is
-- made. The URI is in the form gs:\/\/bucketName\/fileName. Compressed
-- gzip files (.gz) are supported when fileType is SQL.
icURI :: Lens' ImportContext (Maybe Text)
icURI = lens _icURI (\ s a -> s{_icURI = a})
-- | The file type for the specified uri. SQL: The file contains SQL
-- statements. CSV: The file contains CSV data.
icFileType :: Lens' ImportContext (Maybe Text)
icFileType
= lens _icFileType (\ s a -> s{_icFileType = a})
instance FromJSON ImportContext where
parseJSON
= withObject "ImportContext"
(\ o ->
ImportContext' <$>
(o .:? "database") <*>
(o .:? "kind" .!= "sql#importContext")
<*> (o .:? "csvImportOptions")
<*> (o .:? "uri")
<*> (o .:? "fileType"))
instance ToJSON ImportContext where
toJSON ImportContext'{..}
= object
(catMaybes
[("database" .=) <$> _icDatabase,
Just ("kind" .= _icKind),
("csvImportOptions" .=) <$> _icCSVImportOptions,
("uri" .=) <$> _icURI,
("fileType" .=) <$> _icFileType])
-- | An Operations resource contains information about database instance
-- operations such as create, delete, and restart. Operations resources are
-- created in response to operations that were initiated; you never create
-- them directly.
--
-- /See:/ 'operation' smart constructor.
data Operation = Operation'
{ _oTargetId :: !(Maybe Text)
, _oTargetProject :: !(Maybe Text)
, _oStatus :: !(Maybe Text)
, _oInsertTime :: !(Maybe DateTime')
, _oImportContext :: !(Maybe ImportContext)
, _oStartTime :: !(Maybe DateTime')
, _oKind :: !Text
, _oError :: !(Maybe OperationErrors)
, _oExportContext :: !(Maybe ExportContext)
, _oUser :: !(Maybe Text)
, _oSelfLink :: !(Maybe Text)
, _oName :: !(Maybe Text)
, _oEndTime :: !(Maybe DateTime')
, _oOperationType :: !(Maybe Text)
, _oTargetLink :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Operation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oTargetId'
--
-- * 'oTargetProject'
--
-- * 'oStatus'
--
-- * 'oInsertTime'
--
-- * 'oImportContext'
--
-- * 'oStartTime'
--
-- * 'oKind'
--
-- * 'oError'
--
-- * 'oExportContext'
--
-- * 'oUser'
--
-- * 'oSelfLink'
--
-- * 'oName'
--
-- * 'oEndTime'
--
-- * 'oOperationType'
--
-- * 'oTargetLink'
operation
:: Operation
operation =
Operation'
{ _oTargetId = Nothing
, _oTargetProject = Nothing
, _oStatus = Nothing
, _oInsertTime = Nothing
, _oImportContext = Nothing
, _oStartTime = Nothing
, _oKind = "sql#operation"
, _oError = Nothing
, _oExportContext = Nothing
, _oUser = Nothing
, _oSelfLink = Nothing
, _oName = Nothing
, _oEndTime = Nothing
, _oOperationType = Nothing
, _oTargetLink = Nothing
}
-- | Name of the database instance related to this operation.
oTargetId :: Lens' Operation (Maybe Text)
oTargetId
= lens _oTargetId (\ s a -> s{_oTargetId = a})
-- | The project ID of the target instance related to this operation.
oTargetProject :: Lens' Operation (Maybe Text)
oTargetProject
= lens _oTargetProject
(\ s a -> s{_oTargetProject = a})
-- | The status of an operation. Valid values are PENDING, RUNNING, DONE,
-- UNKNOWN.
oStatus :: Lens' Operation (Maybe Text)
oStatus = lens _oStatus (\ s a -> s{_oStatus = a})
-- | The time this operation was enqueued in UTC timezone in RFC 3339 format,
-- for example 2012-11-15T16:19:00.094Z.
oInsertTime :: Lens' Operation (Maybe UTCTime)
oInsertTime
= lens _oInsertTime (\ s a -> s{_oInsertTime = a}) .
mapping _DateTime
-- | The context for import operation, if applicable.
oImportContext :: Lens' Operation (Maybe ImportContext)
oImportContext
= lens _oImportContext
(\ s a -> s{_oImportContext = a})
-- | The time this operation actually started in UTC timezone in RFC 3339
-- format, for example 2012-11-15T16:19:00.094Z.
oStartTime :: Lens' Operation (Maybe UTCTime)
oStartTime
= lens _oStartTime (\ s a -> s{_oStartTime = a}) .
mapping _DateTime
-- | This is always sql#operation.
oKind :: Lens' Operation Text
oKind = lens _oKind (\ s a -> s{_oKind = a})
-- | If errors occurred during processing of this operation, this field will
-- be populated.
oError :: Lens' Operation (Maybe OperationErrors)
oError = lens _oError (\ s a -> s{_oError = a})
-- | The context for export operation, if applicable.
oExportContext :: Lens' Operation (Maybe ExportContext)
oExportContext
= lens _oExportContext
(\ s a -> s{_oExportContext = a})
-- | The email address of the user who initiated this operation.
oUser :: Lens' Operation (Maybe Text)
oUser = lens _oUser (\ s a -> s{_oUser = a})
-- | The URI of this resource.
oSelfLink :: Lens' Operation (Maybe Text)
oSelfLink
= lens _oSelfLink (\ s a -> s{_oSelfLink = a})
-- | An identifier that uniquely identifies the operation. You can use this
-- identifier to retrieve the Operations resource that has information
-- about the operation.
oName :: Lens' Operation (Maybe Text)
oName = lens _oName (\ s a -> s{_oName = a})
-- | The time this operation finished in UTC timezone in RFC 3339 format, for
-- example 2012-11-15T16:19:00.094Z.
oEndTime :: Lens' Operation (Maybe UTCTime)
oEndTime
= lens _oEndTime (\ s a -> s{_oEndTime = a}) .
mapping _DateTime
-- | The type of the operation. Valid values are CREATE, DELETE, UPDATE,
-- RESTART, IMPORT, EXPORT, BACKUP_VOLUME, RESTORE_VOLUME, CREATE_USER,
-- DELETE_USER, CREATE_DATABASE, DELETE_DATABASE .
oOperationType :: Lens' Operation (Maybe Text)
oOperationType
= lens _oOperationType
(\ s a -> s{_oOperationType = a})
-- | The URI of the instance related to the operation.
oTargetLink :: Lens' Operation (Maybe Text)
oTargetLink
= lens _oTargetLink (\ s a -> s{_oTargetLink = a})
instance FromJSON Operation where
parseJSON
= withObject "Operation"
(\ o ->
Operation' <$>
(o .:? "targetId") <*> (o .:? "targetProject") <*>
(o .:? "status")
<*> (o .:? "insertTime")
<*> (o .:? "importContext")
<*> (o .:? "startTime")
<*> (o .:? "kind" .!= "sql#operation")
<*> (o .:? "error")
<*> (o .:? "exportContext")
<*> (o .:? "user")
<*> (o .:? "selfLink")
<*> (o .:? "name")
<*> (o .:? "endTime")
<*> (o .:? "operationType")
<*> (o .:? "targetLink"))
instance ToJSON Operation where
toJSON Operation'{..}
= object
(catMaybes
[("targetId" .=) <$> _oTargetId,
("targetProject" .=) <$> _oTargetProject,
("status" .=) <$> _oStatus,
("insertTime" .=) <$> _oInsertTime,
("importContext" .=) <$> _oImportContext,
("startTime" .=) <$> _oStartTime,
Just ("kind" .= _oKind), ("error" .=) <$> _oError,
("exportContext" .=) <$> _oExportContext,
("user" .=) <$> _oUser,
("selfLink" .=) <$> _oSelfLink,
("name" .=) <$> _oName, ("endTime" .=) <$> _oEndTime,
("operationType" .=) <$> _oOperationType,
("targetLink" .=) <$> _oTargetLink])
-- | Database instance settings.
--
-- /See:/ 'settings' smart constructor.
data Settings = Settings'
{ _sStorageAutoResize :: !(Maybe Bool)
, _sReplicationType :: !(Maybe Text)
, _sActivationPolicy :: !(Maybe Text)
, _sSettingsVersion :: !(Maybe (Textual Int64))
, _sDataDiskSizeGb :: !(Maybe (Textual Int64))
, _sAuthorizedGaeApplications :: !(Maybe [Text])
, _sKind :: !Text
, _sPricingPlan :: !(Maybe Text)
, _sIPConfiguration :: !(Maybe IPConfiguration)
, _sMaintenanceWindow :: !(Maybe MaintenanceWindow)
, _sDatabaseReplicationEnabled :: !(Maybe Bool)
, _sTier :: !(Maybe Text)
, _sDatabaseFlags :: !(Maybe [DatabaseFlags])
, _sDataDiskType :: !(Maybe Text)
, _sCrashSafeReplicationEnabled :: !(Maybe Bool)
, _sLocationPreference :: !(Maybe LocationPreference)
, _sBackupConfiguration :: !(Maybe BackupConfiguration)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Settings' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sStorageAutoResize'
--
-- * 'sReplicationType'
--
-- * 'sActivationPolicy'
--
-- * 'sSettingsVersion'
--
-- * 'sDataDiskSizeGb'
--
-- * 'sAuthorizedGaeApplications'
--
-- * 'sKind'
--
-- * 'sPricingPlan'
--
-- * 'sIPConfiguration'
--
-- * 'sMaintenanceWindow'
--
-- * 'sDatabaseReplicationEnabled'
--
-- * 'sTier'
--
-- * 'sDatabaseFlags'
--
-- * 'sDataDiskType'
--
-- * 'sCrashSafeReplicationEnabled'
--
-- * 'sLocationPreference'
--
-- * 'sBackupConfiguration'
settings
:: Settings
settings =
Settings'
{ _sStorageAutoResize = Nothing
, _sReplicationType = Nothing
, _sActivationPolicy = Nothing
, _sSettingsVersion = Nothing
, _sDataDiskSizeGb = Nothing
, _sAuthorizedGaeApplications = Nothing
, _sKind = "sql#settings"
, _sPricingPlan = Nothing
, _sIPConfiguration = Nothing
, _sMaintenanceWindow = Nothing
, _sDatabaseReplicationEnabled = Nothing
, _sTier = Nothing
, _sDatabaseFlags = Nothing
, _sDataDiskType = Nothing
, _sCrashSafeReplicationEnabled = Nothing
, _sLocationPreference = Nothing
, _sBackupConfiguration = Nothing
}
-- | Configuration to increase storage size automatically. The default value
-- is false. Applies only to Second Generation instances.
sStorageAutoResize :: Lens' Settings (Maybe Bool)
sStorageAutoResize
= lens _sStorageAutoResize
(\ s a -> s{_sStorageAutoResize = a})
-- | The type of replication this instance uses. This can be either
-- ASYNCHRONOUS or SYNCHRONOUS. This property is only applicable to First
-- Generation instances.
sReplicationType :: Lens' Settings (Maybe Text)
sReplicationType
= lens _sReplicationType
(\ s a -> s{_sReplicationType = a})
-- | The activation policy specifies when the instance is activated; it is
-- applicable only when the instance state is RUNNABLE. The activation
-- policy cannot be updated together with other settings for Second
-- Generation instances. Valid values: ALWAYS: The instance is on; it is
-- not deactivated by inactivity. NEVER: The instance is off; it is not
-- activated, even if a connection request arrives. ON_DEMAND: The instance
-- responds to incoming requests, and turns itself off when not in use.
-- Instances with PER_USE pricing turn off after 15 minutes of inactivity.
-- Instances with PER_PACKAGE pricing turn off after 12 hours of
-- inactivity.
sActivationPolicy :: Lens' Settings (Maybe Text)
sActivationPolicy
= lens _sActivationPolicy
(\ s a -> s{_sActivationPolicy = a})
-- | The version of instance settings. This is a required field for update
-- method to make sure concurrent updates are handled properly. During
-- update, use the most recent settingsVersion value for this instance and
-- do not try to update this value.
sSettingsVersion :: Lens' Settings (Maybe Int64)
sSettingsVersion
= lens _sSettingsVersion
(\ s a -> s{_sSettingsVersion = a})
. mapping _Coerce
-- | The size of data disk, in GB. The data disk size minimum is 10GB.
-- Applies only to Second Generation instances.
sDataDiskSizeGb :: Lens' Settings (Maybe Int64)
sDataDiskSizeGb
= lens _sDataDiskSizeGb
(\ s a -> s{_sDataDiskSizeGb = a})
. mapping _Coerce
-- | The App Engine app IDs that can access this instance. This property is
-- only applicable to First Generation instances.
sAuthorizedGaeApplications :: Lens' Settings [Text]
sAuthorizedGaeApplications
= lens _sAuthorizedGaeApplications
(\ s a -> s{_sAuthorizedGaeApplications = a})
. _Default
. _Coerce
-- | This is always sql#settings.
sKind :: Lens' Settings Text
sKind = lens _sKind (\ s a -> s{_sKind = a})
-- | The pricing plan for this instance. This can be either PER_USE or
-- PACKAGE. Only PER_USE is supported for Second Generation instances.
sPricingPlan :: Lens' Settings (Maybe Text)
sPricingPlan
= lens _sPricingPlan (\ s a -> s{_sPricingPlan = a})
-- | The settings for IP Management. This allows to enable or disable the
-- instance IP and manage which external networks can connect to the
-- instance. The IPv4 address cannot be disabled for Second Generation
-- instances.
sIPConfiguration :: Lens' Settings (Maybe IPConfiguration)
sIPConfiguration
= lens _sIPConfiguration
(\ s a -> s{_sIPConfiguration = a})
-- | The maintenance window for this instance. This specifies when the
-- instance may be restarted for maintenance purposes. Applies only to
-- Second Generation instances.
sMaintenanceWindow :: Lens' Settings (Maybe MaintenanceWindow)
sMaintenanceWindow
= lens _sMaintenanceWindow
(\ s a -> s{_sMaintenanceWindow = a})
-- | Configuration specific to read replica instances. Indicates whether
-- replication is enabled or not.
sDatabaseReplicationEnabled :: Lens' Settings (Maybe Bool)
sDatabaseReplicationEnabled
= lens _sDatabaseReplicationEnabled
(\ s a -> s{_sDatabaseReplicationEnabled = a})
-- | The tier of service for this instance, for example D1, D2. For more
-- information, see pricing.
sTier :: Lens' Settings (Maybe Text)
sTier = lens _sTier (\ s a -> s{_sTier = a})
-- | The database flags passed to the instance at startup.
sDatabaseFlags :: Lens' Settings [DatabaseFlags]
sDatabaseFlags
= lens _sDatabaseFlags
(\ s a -> s{_sDatabaseFlags = a})
. _Default
. _Coerce
-- | The type of data disk. Only supported for Second Generation instances.
-- The default type is PD_SSD. Applies only to Second Generation instances.
sDataDiskType :: Lens' Settings (Maybe Text)
sDataDiskType
= lens _sDataDiskType
(\ s a -> s{_sDataDiskType = a})
-- | Configuration specific to read replica instances. Indicates whether
-- database flags for crash-safe replication are enabled. This property is
-- only applicable to First Generation instances.
sCrashSafeReplicationEnabled :: Lens' Settings (Maybe Bool)
sCrashSafeReplicationEnabled
= lens _sCrashSafeReplicationEnabled
(\ s a -> s{_sCrashSafeReplicationEnabled = a})
-- | The location preference settings. This allows the instance to be located
-- as near as possible to either an App Engine app or GCE zone for better
-- performance. App Engine co-location is only applicable to First
-- Generation instances.
sLocationPreference :: Lens' Settings (Maybe LocationPreference)
sLocationPreference
= lens _sLocationPreference
(\ s a -> s{_sLocationPreference = a})
-- | The daily backup configuration for the instance.
sBackupConfiguration :: Lens' Settings (Maybe BackupConfiguration)
sBackupConfiguration
= lens _sBackupConfiguration
(\ s a -> s{_sBackupConfiguration = a})
instance FromJSON Settings where
parseJSON
= withObject "Settings"
(\ o ->
Settings' <$>
(o .:? "storageAutoResize") <*>
(o .:? "replicationType")
<*> (o .:? "activationPolicy")
<*> (o .:? "settingsVersion")
<*> (o .:? "dataDiskSizeGb")
<*> (o .:? "authorizedGaeApplications" .!= mempty)
<*> (o .:? "kind" .!= "sql#settings")
<*> (o .:? "pricingPlan")
<*> (o .:? "ipConfiguration")
<*> (o .:? "maintenanceWindow")
<*> (o .:? "databaseReplicationEnabled")
<*> (o .:? "tier")
<*> (o .:? "databaseFlags" .!= mempty)
<*> (o .:? "dataDiskType")
<*> (o .:? "crashSafeReplicationEnabled")
<*> (o .:? "locationPreference")
<*> (o .:? "backupConfiguration"))
instance ToJSON Settings where
toJSON Settings'{..}
= object
(catMaybes
[("storageAutoResize" .=) <$> _sStorageAutoResize,
("replicationType" .=) <$> _sReplicationType,
("activationPolicy" .=) <$> _sActivationPolicy,
("settingsVersion" .=) <$> _sSettingsVersion,
("dataDiskSizeGb" .=) <$> _sDataDiskSizeGb,
("authorizedGaeApplications" .=) <$>
_sAuthorizedGaeApplications,
Just ("kind" .= _sKind),
("pricingPlan" .=) <$> _sPricingPlan,
("ipConfiguration" .=) <$> _sIPConfiguration,
("maintenanceWindow" .=) <$> _sMaintenanceWindow,
("databaseReplicationEnabled" .=) <$>
_sDatabaseReplicationEnabled,
("tier" .=) <$> _sTier,
("databaseFlags" .=) <$> _sDatabaseFlags,
("dataDiskType" .=) <$> _sDataDiskType,
("crashSafeReplicationEnabled" .=) <$>
_sCrashSafeReplicationEnabled,
("locationPreference" .=) <$> _sLocationPreference,
("backupConfiguration" .=) <$>
_sBackupConfiguration])
-- | Database instance IP Mapping.
--
-- /See:/ 'ipMApping' smart constructor.
data IPMApping = IPMApping'
{ _imaIPAddress :: !(Maybe Text)
, _imaTimeToRetire :: !(Maybe DateTime')
, _imaType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'IPMApping' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'imaIPAddress'
--
-- * 'imaTimeToRetire'
--
-- * 'imaType'
ipMApping
:: IPMApping
ipMApping =
IPMApping'
{ _imaIPAddress = Nothing
, _imaTimeToRetire = Nothing
, _imaType = Nothing
}
-- | The IP address assigned.
imaIPAddress :: Lens' IPMApping (Maybe Text)
imaIPAddress
= lens _imaIPAddress (\ s a -> s{_imaIPAddress = a})
-- | The due time for this IP to be retired in RFC 3339 format, for example
-- 2012-11-15T16:19:00.094Z. This field is only available when the IP is
-- scheduled to be retired.
imaTimeToRetire :: Lens' IPMApping (Maybe UTCTime)
imaTimeToRetire
= lens _imaTimeToRetire
(\ s a -> s{_imaTimeToRetire = a})
. mapping _DateTime
-- | The type of this IP address. A PRIMARY address is an address that can
-- accept incoming connections. An OUTGOING address is the source address
-- of connections originating from the instance, if supported.
imaType :: Lens' IPMApping (Maybe Text)
imaType = lens _imaType (\ s a -> s{_imaType = a})
instance FromJSON IPMApping where
parseJSON
= withObject "IPMApping"
(\ o ->
IPMApping' <$>
(o .:? "ipAddress") <*> (o .:? "timeToRetire") <*>
(o .:? "type"))
instance ToJSON IPMApping where
toJSON IPMApping'{..}
= object
(catMaybes
[("ipAddress" .=) <$> _imaIPAddress,
("timeToRetire" .=) <$> _imaTimeToRetire,
("type" .=) <$> _imaType])
-- | A database resource inside a Cloud SQL instance.
--
-- /See:/ 'database' smart constructor.
data Database = Database'
{ _dEtag :: !(Maybe Text)
, _dProject :: !(Maybe Text)
, _dKind :: !Text
, _dCollation :: !(Maybe Text)
, _dSelfLink :: !(Maybe Text)
, _dName :: !(Maybe Text)
, _dCharSet :: !(Maybe Text)
, _dInstance :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Database' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dEtag'
--
-- * 'dProject'
--
-- * 'dKind'
--
-- * 'dCollation'
--
-- * 'dSelfLink'
--
-- * 'dName'
--
-- * 'dCharSet'
--
-- * 'dInstance'
database
:: Database
database =
Database'
{ _dEtag = Nothing
, _dProject = Nothing
, _dKind = "sql#database"
, _dCollation = Nothing
, _dSelfLink = Nothing
, _dName = Nothing
, _dCharSet = Nothing
, _dInstance = Nothing
}
-- | HTTP 1.1 Entity tag for the resource.
dEtag :: Lens' Database (Maybe Text)
dEtag = lens _dEtag (\ s a -> s{_dEtag = a})
-- | The project ID of the project containing the Cloud SQL database. The
-- Google apps domain is prefixed if applicable.
dProject :: Lens' Database (Maybe Text)
dProject = lens _dProject (\ s a -> s{_dProject = a})
-- | This is always sql#database.
dKind :: Lens' Database Text
dKind = lens _dKind (\ s a -> s{_dKind = a})
-- | The MySQL collation value.
dCollation :: Lens' Database (Maybe Text)
dCollation
= lens _dCollation (\ s a -> s{_dCollation = a})
-- | The URI of this resource.
dSelfLink :: Lens' Database (Maybe Text)
dSelfLink
= lens _dSelfLink (\ s a -> s{_dSelfLink = a})
-- | The name of the database in the Cloud SQL instance. This does not
-- include the project ID or instance name.
dName :: Lens' Database (Maybe Text)
dName = lens _dName (\ s a -> s{_dName = a})
-- | The MySQL charset value.
dCharSet :: Lens' Database (Maybe Text)
dCharSet = lens _dCharSet (\ s a -> s{_dCharSet = a})
-- | The name of the Cloud SQL instance. This does not include the project
-- ID.
dInstance :: Lens' Database (Maybe Text)
dInstance
= lens _dInstance (\ s a -> s{_dInstance = a})
instance FromJSON Database where
parseJSON
= withObject "Database"
(\ o ->
Database' <$>
(o .:? "etag") <*> (o .:? "project") <*>
(o .:? "kind" .!= "sql#database")
<*> (o .:? "collation")
<*> (o .:? "selfLink")
<*> (o .:? "name")
<*> (o .:? "charset")
<*> (o .:? "instance"))
instance ToJSON Database where
toJSON Database'{..}
= object
(catMaybes
[("etag" .=) <$> _dEtag,
("project" .=) <$> _dProject,
Just ("kind" .= _dKind),
("collation" .=) <$> _dCollation,
("selfLink" .=) <$> _dSelfLink,
("name" .=) <$> _dName, ("charset" .=) <$> _dCharSet,
("instance" .=) <$> _dInstance])
-- | SslCerts create ephemeral certificate request.
--
-- /See:/ 'sslCertsCreateEphemeralRequest' smart constructor.
newtype SSLCertsCreateEphemeralRequest = SSLCertsCreateEphemeralRequest'
{ _sccerPublicKey :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertsCreateEphemeralRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sccerPublicKey'
sslCertsCreateEphemeralRequest
:: SSLCertsCreateEphemeralRequest
sslCertsCreateEphemeralRequest =
SSLCertsCreateEphemeralRequest'
{ _sccerPublicKey = Nothing
}
-- | PEM encoded public key to include in the signed certificate.
sccerPublicKey :: Lens' SSLCertsCreateEphemeralRequest (Maybe Text)
sccerPublicKey
= lens _sccerPublicKey
(\ s a -> s{_sccerPublicKey = a})
instance FromJSON SSLCertsCreateEphemeralRequest
where
parseJSON
= withObject "SSLCertsCreateEphemeralRequest"
(\ o ->
SSLCertsCreateEphemeralRequest' <$>
(o .:? "public_key"))
instance ToJSON SSLCertsCreateEphemeralRequest where
toJSON SSLCertsCreateEphemeralRequest'{..}
= object
(catMaybes [("public_key" .=) <$> _sccerPublicKey])
-- | Binary log coordinates.
--
-- /See:/ 'binLogCoordinates' smart constructor.
data BinLogCoordinates = BinLogCoordinates'
{ _blcBinLogPosition :: !(Maybe (Textual Int64))
, _blcKind :: !Text
, _blcBinLogFileName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BinLogCoordinates' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blcBinLogPosition'
--
-- * 'blcKind'
--
-- * 'blcBinLogFileName'
binLogCoordinates
:: BinLogCoordinates
binLogCoordinates =
BinLogCoordinates'
{ _blcBinLogPosition = Nothing
, _blcKind = "sql#binLogCoordinates"
, _blcBinLogFileName = Nothing
}
-- | Position (offset) within the binary log file.
blcBinLogPosition :: Lens' BinLogCoordinates (Maybe Int64)
blcBinLogPosition
= lens _blcBinLogPosition
(\ s a -> s{_blcBinLogPosition = a})
. mapping _Coerce
-- | This is always sql#binLogCoordinates.
blcKind :: Lens' BinLogCoordinates Text
blcKind = lens _blcKind (\ s a -> s{_blcKind = a})
-- | Name of the binary log file for a Cloud SQL instance.
blcBinLogFileName :: Lens' BinLogCoordinates (Maybe Text)
blcBinLogFileName
= lens _blcBinLogFileName
(\ s a -> s{_blcBinLogFileName = a})
instance FromJSON BinLogCoordinates where
parseJSON
= withObject "BinLogCoordinates"
(\ o ->
BinLogCoordinates' <$>
(o .:? "binLogPosition") <*>
(o .:? "kind" .!= "sql#binLogCoordinates")
<*> (o .:? "binLogFileName"))
instance ToJSON BinLogCoordinates where
toJSON BinLogCoordinates'{..}
= object
(catMaybes
[("binLogPosition" .=) <$> _blcBinLogPosition,
Just ("kind" .= _blcKind),
("binLogFileName" .=) <$> _blcBinLogFileName])
-- | The name and status of the failover replica. This property is applicable
-- only to Second Generation instances.
--
-- /See:/ 'databaseInstanceFailoverReplica' smart constructor.
data DatabaseInstanceFailoverReplica = DatabaseInstanceFailoverReplica'
{ _difrName :: !(Maybe Text)
, _difrAvailable :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DatabaseInstanceFailoverReplica' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'difrName'
--
-- * 'difrAvailable'
databaseInstanceFailoverReplica
:: DatabaseInstanceFailoverReplica
databaseInstanceFailoverReplica =
DatabaseInstanceFailoverReplica'
{ _difrName = Nothing
, _difrAvailable = Nothing
}
-- | The name of the failover replica. If specified at instance creation, a
-- failover replica is created for the instance. The name doesn\'t include
-- the project ID. This property is applicable only to Second Generation
-- instances.
difrName :: Lens' DatabaseInstanceFailoverReplica (Maybe Text)
difrName = lens _difrName (\ s a -> s{_difrName = a})
-- | The availability status of the failover replica. A false status
-- indicates that the failover replica is out of sync. The master can only
-- failover to the falover replica when the status is true.
difrAvailable :: Lens' DatabaseInstanceFailoverReplica (Maybe Bool)
difrAvailable
= lens _difrAvailable
(\ s a -> s{_difrAvailable = a})
instance FromJSON DatabaseInstanceFailoverReplica
where
parseJSON
= withObject "DatabaseInstanceFailoverReplica"
(\ o ->
DatabaseInstanceFailoverReplica' <$>
(o .:? "name") <*> (o .:? "available"))
instance ToJSON DatabaseInstanceFailoverReplica where
toJSON DatabaseInstanceFailoverReplica'{..}
= object
(catMaybes
[("name" .=) <$> _difrName,
("available" .=) <$> _difrAvailable])
-- | Tiers list response.
--
-- /See:/ 'tiersListResponse' smart constructor.
data TiersListResponse = TiersListResponse'
{ _tlrKind :: !Text
, _tlrItems :: !(Maybe [Tier])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TiersListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tlrKind'
--
-- * 'tlrItems'
tiersListResponse
:: TiersListResponse
tiersListResponse =
TiersListResponse'
{ _tlrKind = "sql#tiersList"
, _tlrItems = Nothing
}
-- | This is always sql#tiersList.
tlrKind :: Lens' TiersListResponse Text
tlrKind = lens _tlrKind (\ s a -> s{_tlrKind = a})
-- | List of tiers.
tlrItems :: Lens' TiersListResponse [Tier]
tlrItems
= lens _tlrItems (\ s a -> s{_tlrItems = a}) .
_Default
. _Coerce
instance FromJSON TiersListResponse where
parseJSON
= withObject "TiersListResponse"
(\ o ->
TiersListResponse' <$>
(o .:? "kind" .!= "sql#tiersList") <*>
(o .:? "items" .!= mempty))
instance ToJSON TiersListResponse where
toJSON TiersListResponse'{..}
= object
(catMaybes
[Just ("kind" .= _tlrKind),
("items" .=) <$> _tlrItems])
-- | User list response.
--
-- /See:/ 'usersListResponse' smart constructor.
data UsersListResponse = UsersListResponse'
{ _ulrNextPageToken :: !(Maybe Text)
, _ulrKind :: !Text
, _ulrItems :: !(Maybe [User])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ulrNextPageToken'
--
-- * 'ulrKind'
--
-- * 'ulrItems'
usersListResponse
:: UsersListResponse
usersListResponse =
UsersListResponse'
{ _ulrNextPageToken = Nothing
, _ulrKind = "sql#usersList"
, _ulrItems = Nothing
}
-- | An identifier that uniquely identifies the operation. You can use this
-- identifier to retrieve the Operations resource that has information
-- about the operation.
ulrNextPageToken :: Lens' UsersListResponse (Maybe Text)
ulrNextPageToken
= lens _ulrNextPageToken
(\ s a -> s{_ulrNextPageToken = a})
-- | This is always sql#usersList.
ulrKind :: Lens' UsersListResponse Text
ulrKind = lens _ulrKind (\ s a -> s{_ulrKind = a})
-- | List of user resources in the instance.
ulrItems :: Lens' UsersListResponse [User]
ulrItems
= lens _ulrItems (\ s a -> s{_ulrItems = a}) .
_Default
. _Coerce
instance FromJSON UsersListResponse where
parseJSON
= withObject "UsersListResponse"
(\ o ->
UsersListResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "sql#usersList")
<*> (o .:? "items" .!= mempty))
instance ToJSON UsersListResponse where
toJSON UsersListResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _ulrNextPageToken,
Just ("kind" .= _ulrKind),
("items" .=) <$> _ulrItems])
-- | Database instance export context.
--
-- /See:/ 'exportContext' smart constructor.
data ExportContext = ExportContext'
{ _ecCSVExportOptions :: !(Maybe ExportContextCSVExportOptions)
, _ecKind :: !Text
, _ecURI :: !(Maybe Text)
, _ecFileType :: !(Maybe Text)
, _ecSQLExportOptions :: !(Maybe ExportContextSQLExportOptions)
, _ecDatabases :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExportContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ecCSVExportOptions'
--
-- * 'ecKind'
--
-- * 'ecURI'
--
-- * 'ecFileType'
--
-- * 'ecSQLExportOptions'
--
-- * 'ecDatabases'
exportContext
:: ExportContext
exportContext =
ExportContext'
{ _ecCSVExportOptions = Nothing
, _ecKind = "sql#exportContext"
, _ecURI = Nothing
, _ecFileType = Nothing
, _ecSQLExportOptions = Nothing
, _ecDatabases = Nothing
}
-- | Options for exporting data as CSV.
ecCSVExportOptions :: Lens' ExportContext (Maybe ExportContextCSVExportOptions)
ecCSVExportOptions
= lens _ecCSVExportOptions
(\ s a -> s{_ecCSVExportOptions = a})
-- | This is always sql#exportContext.
ecKind :: Lens' ExportContext Text
ecKind = lens _ecKind (\ s a -> s{_ecKind = a})
-- | The path to the file in Google Cloud Storage where the export will be
-- stored. The URI is in the form gs:\/\/bucketName\/fileName. If the file
-- already exists, the operation fails. If fileType is SQL and the filename
-- ends with .gz, the contents are compressed.
ecURI :: Lens' ExportContext (Maybe Text)
ecURI = lens _ecURI (\ s a -> s{_ecURI = a})
-- | The file type for the specified uri. SQL: The file contains SQL
-- statements. CSV: The file contains CSV data.
ecFileType :: Lens' ExportContext (Maybe Text)
ecFileType
= lens _ecFileType (\ s a -> s{_ecFileType = a})
-- | Options for exporting data as SQL statements.
ecSQLExportOptions :: Lens' ExportContext (Maybe ExportContextSQLExportOptions)
ecSQLExportOptions
= lens _ecSQLExportOptions
(\ s a -> s{_ecSQLExportOptions = a})
-- | Databases (for example, guestbook) from which the export is made. If
-- fileType is SQL and no database is specified, all databases are
-- exported. If fileType is CSV, you can optionally specify at most one
-- database to export. If csvExportOptions.selectQuery also specifies the
-- database, this field will be ignored.
ecDatabases :: Lens' ExportContext [Text]
ecDatabases
= lens _ecDatabases (\ s a -> s{_ecDatabases = a}) .
_Default
. _Coerce
instance FromJSON ExportContext where
parseJSON
= withObject "ExportContext"
(\ o ->
ExportContext' <$>
(o .:? "csvExportOptions") <*>
(o .:? "kind" .!= "sql#exportContext")
<*> (o .:? "uri")
<*> (o .:? "fileType")
<*> (o .:? "sqlExportOptions")
<*> (o .:? "databases" .!= mempty))
instance ToJSON ExportContext where
toJSON ExportContext'{..}
= object
(catMaybes
[("csvExportOptions" .=) <$> _ecCSVExportOptions,
Just ("kind" .= _ecKind), ("uri" .=) <$> _ecURI,
("fileType" .=) <$> _ecFileType,
("sqlExportOptions" .=) <$> _ecSQLExportOptions,
("databases" .=) <$> _ecDatabases])
-- | Database instance operation errors list wrapper.
--
-- /See:/ 'operationErrors' smart constructor.
data OperationErrors = OperationErrors'
{ _oeKind :: !Text
, _oeErrors :: !(Maybe [OperationError])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationErrors' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oeKind'
--
-- * 'oeErrors'
operationErrors
:: OperationErrors
operationErrors =
OperationErrors'
{ _oeKind = "sql#operationErrors"
, _oeErrors = Nothing
}
-- | This is always sql#operationErrors.
oeKind :: Lens' OperationErrors Text
oeKind = lens _oeKind (\ s a -> s{_oeKind = a})
-- | The list of errors encountered while processing this operation.
oeErrors :: Lens' OperationErrors [OperationError]
oeErrors
= lens _oeErrors (\ s a -> s{_oeErrors = a}) .
_Default
. _Coerce
instance FromJSON OperationErrors where
parseJSON
= withObject "OperationErrors"
(\ o ->
OperationErrors' <$>
(o .:? "kind" .!= "sql#operationErrors") <*>
(o .:? "errors" .!= mempty))
instance ToJSON OperationErrors where
toJSON OperationErrors'{..}
= object
(catMaybes
[Just ("kind" .= _oeKind),
("errors" .=) <$> _oeErrors])
-- | SslCerts list response.
--
-- /See:/ 'sslCertsListResponse' smart constructor.
data SSLCertsListResponse = SSLCertsListResponse'
{ _sclrKind :: !Text
, _sclrItems :: !(Maybe [SSLCert])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertsListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sclrKind'
--
-- * 'sclrItems'
sslCertsListResponse
:: SSLCertsListResponse
sslCertsListResponse =
SSLCertsListResponse'
{ _sclrKind = "sql#sslCertsList"
, _sclrItems = Nothing
}
-- | This is always sql#sslCertsList.
sclrKind :: Lens' SSLCertsListResponse Text
sclrKind = lens _sclrKind (\ s a -> s{_sclrKind = a})
-- | List of client certificates for the instance.
sclrItems :: Lens' SSLCertsListResponse [SSLCert]
sclrItems
= lens _sclrItems (\ s a -> s{_sclrItems = a}) .
_Default
. _Coerce
instance FromJSON SSLCertsListResponse where
parseJSON
= withObject "SSLCertsListResponse"
(\ o ->
SSLCertsListResponse' <$>
(o .:? "kind" .!= "sql#sslCertsList") <*>
(o .:? "items" .!= mempty))
instance ToJSON SSLCertsListResponse where
toJSON SSLCertsListResponse'{..}
= object
(catMaybes
[Just ("kind" .= _sclrKind),
("items" .=) <$> _sclrItems])
-- | SslCerts insert request.
--
-- /See:/ 'sslCertsInsertRequest' smart constructor.
newtype SSLCertsInsertRequest = SSLCertsInsertRequest'
{ _scirCommonName :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertsInsertRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scirCommonName'
sslCertsInsertRequest
:: SSLCertsInsertRequest
sslCertsInsertRequest =
SSLCertsInsertRequest'
{ _scirCommonName = Nothing
}
-- | User supplied name. Must be a distinct name from the other certificates
-- for this instance. New certificates will not be usable until the
-- instance is restarted.
scirCommonName :: Lens' SSLCertsInsertRequest (Maybe Text)
scirCommonName
= lens _scirCommonName
(\ s a -> s{_scirCommonName = a})
instance FromJSON SSLCertsInsertRequest where
parseJSON
= withObject "SSLCertsInsertRequest"
(\ o ->
SSLCertsInsertRequest' <$> (o .:? "commonName"))
instance ToJSON SSLCertsInsertRequest where
toJSON SSLCertsInsertRequest'{..}
= object
(catMaybes [("commonName" .=) <$> _scirCommonName])
-- | IP Management configuration.
--
-- /See:/ 'ipConfiguration' smart constructor.
data IPConfiguration = IPConfiguration'
{ _icAuthorizedNetworks :: !(Maybe [ACLEntry])
, _icRequireSSL :: !(Maybe Bool)
, _icIPv4Enabled :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'IPConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'icAuthorizedNetworks'
--
-- * 'icRequireSSL'
--
-- * 'icIPv4Enabled'
ipConfiguration
:: IPConfiguration
ipConfiguration =
IPConfiguration'
{ _icAuthorizedNetworks = Nothing
, _icRequireSSL = Nothing
, _icIPv4Enabled = Nothing
}
-- | The list of external networks that are allowed to connect to the
-- instance using the IP. In CIDR notation, also known as \'slash\'
-- notation (e.g. 192.168.100.0\/24).
icAuthorizedNetworks :: Lens' IPConfiguration [ACLEntry]
icAuthorizedNetworks
= lens _icAuthorizedNetworks
(\ s a -> s{_icAuthorizedNetworks = a})
. _Default
. _Coerce
-- | Whether the mysqld should default to \'REQUIRE X509\' for users
-- connecting over IP.
icRequireSSL :: Lens' IPConfiguration (Maybe Bool)
icRequireSSL
= lens _icRequireSSL (\ s a -> s{_icRequireSSL = a})
-- | Whether the instance should be assigned an IP address or not.
icIPv4Enabled :: Lens' IPConfiguration (Maybe Bool)
icIPv4Enabled
= lens _icIPv4Enabled
(\ s a -> s{_icIPv4Enabled = a})
instance FromJSON IPConfiguration where
parseJSON
= withObject "IPConfiguration"
(\ o ->
IPConfiguration' <$>
(o .:? "authorizedNetworks" .!= mempty) <*>
(o .:? "requireSsl")
<*> (o .:? "ipv4Enabled"))
instance ToJSON IPConfiguration where
toJSON IPConfiguration'{..}
= object
(catMaybes
[("authorizedNetworks" .=) <$> _icAuthorizedNetworks,
("requireSsl" .=) <$> _icRequireSSL,
("ipv4Enabled" .=) <$> _icIPv4Enabled])
-- | Maintenance window. This specifies when a v2 Cloud SQL instance should
-- preferably be restarted for system maintenance puruposes.
--
-- /See:/ 'maintenanceWindow' smart constructor.
data MaintenanceWindow = MaintenanceWindow'
{ _mwKind :: !Text
, _mwDay :: !(Maybe (Textual Int32))
, _mwHour :: !(Maybe (Textual Int32))
, _mwUpdateTrack :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MaintenanceWindow' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mwKind'
--
-- * 'mwDay'
--
-- * 'mwHour'
--
-- * 'mwUpdateTrack'
maintenanceWindow
:: MaintenanceWindow
maintenanceWindow =
MaintenanceWindow'
{ _mwKind = "sql#maintenanceWindow"
, _mwDay = Nothing
, _mwHour = Nothing
, _mwUpdateTrack = Nothing
}
-- | This is always sql#maintenanceWindow.
mwKind :: Lens' MaintenanceWindow Text
mwKind = lens _mwKind (\ s a -> s{_mwKind = a})
-- | day of week (1-7), starting on Monday.
mwDay :: Lens' MaintenanceWindow (Maybe Int32)
mwDay
= lens _mwDay (\ s a -> s{_mwDay = a}) .
mapping _Coerce
-- | hour of day - 0 to 23.
mwHour :: Lens' MaintenanceWindow (Maybe Int32)
mwHour
= lens _mwHour (\ s a -> s{_mwHour = a}) .
mapping _Coerce
mwUpdateTrack :: Lens' MaintenanceWindow (Maybe Text)
mwUpdateTrack
= lens _mwUpdateTrack
(\ s a -> s{_mwUpdateTrack = a})
instance FromJSON MaintenanceWindow where
parseJSON
= withObject "MaintenanceWindow"
(\ o ->
MaintenanceWindow' <$>
(o .:? "kind" .!= "sql#maintenanceWindow") <*>
(o .:? "day")
<*> (o .:? "hour")
<*> (o .:? "updateTrack"))
instance ToJSON MaintenanceWindow where
toJSON MaintenanceWindow'{..}
= object
(catMaybes
[Just ("kind" .= _mwKind), ("day" .=) <$> _mwDay,
("hour" .=) <$> _mwHour,
("updateTrack" .=) <$> _mwUpdateTrack])
-- | Options for importing data as CSV.
--
-- /See:/ 'importContextCSVImportOptions' smart constructor.
data ImportContextCSVImportOptions = ImportContextCSVImportOptions'
{ _iccioColumns :: !(Maybe [Text])
, _iccioTable :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportContextCSVImportOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iccioColumns'
--
-- * 'iccioTable'
importContextCSVImportOptions
:: ImportContextCSVImportOptions
importContextCSVImportOptions =
ImportContextCSVImportOptions'
{ _iccioColumns = Nothing
, _iccioTable = Nothing
}
-- | The columns to which CSV data is imported. If not specified, all columns
-- of the database table are loaded with CSV data.
iccioColumns :: Lens' ImportContextCSVImportOptions [Text]
iccioColumns
= lens _iccioColumns (\ s a -> s{_iccioColumns = a})
. _Default
. _Coerce
-- | The table to which CSV data is imported.
iccioTable :: Lens' ImportContextCSVImportOptions (Maybe Text)
iccioTable
= lens _iccioTable (\ s a -> s{_iccioTable = a})
instance FromJSON ImportContextCSVImportOptions where
parseJSON
= withObject "ImportContextCSVImportOptions"
(\ o ->
ImportContextCSVImportOptions' <$>
(o .:? "columns" .!= mempty) <*> (o .:? "table"))
instance ToJSON ImportContextCSVImportOptions where
toJSON ImportContextCSVImportOptions'{..}
= object
(catMaybes
[("columns" .=) <$> _iccioColumns,
("table" .=) <$> _iccioTable])
-- | Options for exporting data as CSV.
--
-- /See:/ 'exportContextCSVExportOptions' smart constructor.
newtype ExportContextCSVExportOptions = ExportContextCSVExportOptions'
{ _ecceoSelectQuery :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExportContextCSVExportOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ecceoSelectQuery'
exportContextCSVExportOptions
:: ExportContextCSVExportOptions
exportContextCSVExportOptions =
ExportContextCSVExportOptions'
{ _ecceoSelectQuery = Nothing
}
-- | The select query used to extract the data.
ecceoSelectQuery :: Lens' ExportContextCSVExportOptions (Maybe Text)
ecceoSelectQuery
= lens _ecceoSelectQuery
(\ s a -> s{_ecceoSelectQuery = a})
instance FromJSON ExportContextCSVExportOptions where
parseJSON
= withObject "ExportContextCSVExportOptions"
(\ o ->
ExportContextCSVExportOptions' <$>
(o .:? "selectQuery"))
instance ToJSON ExportContextCSVExportOptions where
toJSON ExportContextCSVExportOptions'{..}
= object
(catMaybes
[("selectQuery" .=) <$> _ecceoSelectQuery])
-- | A Cloud SQL user resource.
--
-- /See:/ 'user' smart constructor.
data User = User'
{ _uEtag :: !(Maybe Text)
, _uProject :: !(Maybe Text)
, _uKind :: !Text
, _uName :: !(Maybe Text)
, _uPassword :: !(Maybe Text)
, _uHost :: !(Maybe Text)
, _uInstance :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'User' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uEtag'
--
-- * 'uProject'
--
-- * 'uKind'
--
-- * 'uName'
--
-- * 'uPassword'
--
-- * 'uHost'
--
-- * 'uInstance'
user
:: User
user =
User'
{ _uEtag = Nothing
, _uProject = Nothing
, _uKind = "sql#user"
, _uName = Nothing
, _uPassword = Nothing
, _uHost = Nothing
, _uInstance = Nothing
}
-- | HTTP 1.1 Entity tag for the resource.
uEtag :: Lens' User (Maybe Text)
uEtag = lens _uEtag (\ s a -> s{_uEtag = a})
-- | The project ID of the project containing the Cloud SQL database. The
-- Google apps domain is prefixed if applicable. Can be omitted for update
-- since it is already specified on the URL.
uProject :: Lens' User (Maybe Text)
uProject = lens _uProject (\ s a -> s{_uProject = a})
-- | This is always sql#user.
uKind :: Lens' User Text
uKind = lens _uKind (\ s a -> s{_uKind = a})
-- | The name of the user in the Cloud SQL instance. Can be omitted for
-- update since it is already specified on the URL.
uName :: Lens' User (Maybe Text)
uName = lens _uName (\ s a -> s{_uName = a})
-- | The password for the user.
uPassword :: Lens' User (Maybe Text)
uPassword
= lens _uPassword (\ s a -> s{_uPassword = a})
-- | The host name from which the user can connect. For insert operations,
-- host defaults to an empty string. For update operations, host is
-- specified as part of the request URL. The host name cannot be updated
-- after insertion.
uHost :: Lens' User (Maybe Text)
uHost = lens _uHost (\ s a -> s{_uHost = a})
-- | The name of the Cloud SQL instance. This does not include the project
-- ID. Can be omitted for update since it is already specified on the URL.
uInstance :: Lens' User (Maybe Text)
uInstance
= lens _uInstance (\ s a -> s{_uInstance = a})
instance FromJSON User where
parseJSON
= withObject "User"
(\ o ->
User' <$>
(o .:? "etag") <*> (o .:? "project") <*>
(o .:? "kind" .!= "sql#user")
<*> (o .:? "name")
<*> (o .:? "password")
<*> (o .:? "host")
<*> (o .:? "instance"))
instance ToJSON User where
toJSON User'{..}
= object
(catMaybes
[("etag" .=) <$> _uEtag,
("project" .=) <$> _uProject,
Just ("kind" .= _uKind), ("name" .=) <$> _uName,
("password" .=) <$> _uPassword,
("host" .=) <$> _uHost,
("instance" .=) <$> _uInstance])
-- | A Cloud SQL instance resource.
--
-- /See:/ 'databaseInstance' smart constructor.
data DatabaseInstance = DatabaseInstance'
{ _datBackendType :: !(Maybe Text)
, _datMaxDiskSize :: !(Maybe (Textual Int64))
, _datOnPremisesConfiguration :: !(Maybe OnPremisesConfiguration)
, _datEtag :: !(Maybe Text)
, _datState :: !(Maybe Text)
, _datIPv6Address :: !(Maybe Text)
, _datServerCaCert :: !(Maybe SSLCert)
, _datDatabaseVersion :: !(Maybe Text)
, _datProject :: !(Maybe Text)
, _datSettings :: !(Maybe Settings)
, _datKind :: !Text
, _datConnectionName :: !(Maybe Text)
, _datCurrentDiskSize :: !(Maybe (Textual Int64))
, _datInstanceType :: !(Maybe Text)
, _datReplicaNames :: !(Maybe [Text])
, _datSelfLink :: !(Maybe Text)
, _datFailoverReplica :: !(Maybe DatabaseInstanceFailoverReplica)
, _datName :: !(Maybe Text)
, _datMasterInstanceName :: !(Maybe Text)
, _datReplicaConfiguration :: !(Maybe ReplicaConfiguration)
, _datRegion :: !(Maybe Text)
, _datServiceAccountEmailAddress :: !(Maybe Text)
, _datIPAddresses :: !(Maybe [IPMApping])
, _datSuspensionReason :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DatabaseInstance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'datBackendType'
--
-- * 'datMaxDiskSize'
--
-- * 'datOnPremisesConfiguration'
--
-- * 'datEtag'
--
-- * 'datState'
--
-- * 'datIPv6Address'
--
-- * 'datServerCaCert'
--
-- * 'datDatabaseVersion'
--
-- * 'datProject'
--
-- * 'datSettings'
--
-- * 'datKind'
--
-- * 'datConnectionName'
--
-- * 'datCurrentDiskSize'
--
-- * 'datInstanceType'
--
-- * 'datReplicaNames'
--
-- * 'datSelfLink'
--
-- * 'datFailoverReplica'
--
-- * 'datName'
--
-- * 'datMasterInstanceName'
--
-- * 'datReplicaConfiguration'
--
-- * 'datRegion'
--
-- * 'datServiceAccountEmailAddress'
--
-- * 'datIPAddresses'
--
-- * 'datSuspensionReason'
databaseInstance
:: DatabaseInstance
databaseInstance =
DatabaseInstance'
{ _datBackendType = Nothing
, _datMaxDiskSize = Nothing
, _datOnPremisesConfiguration = Nothing
, _datEtag = Nothing
, _datState = Nothing
, _datIPv6Address = Nothing
, _datServerCaCert = Nothing
, _datDatabaseVersion = Nothing
, _datProject = Nothing
, _datSettings = Nothing
, _datKind = "sql#instance"
, _datConnectionName = Nothing
, _datCurrentDiskSize = Nothing
, _datInstanceType = Nothing
, _datReplicaNames = Nothing
, _datSelfLink = Nothing
, _datFailoverReplica = Nothing
, _datName = Nothing
, _datMasterInstanceName = Nothing
, _datReplicaConfiguration = Nothing
, _datRegion = Nothing
, _datServiceAccountEmailAddress = Nothing
, _datIPAddresses = Nothing
, _datSuspensionReason = Nothing
}
-- | FIRST_GEN: Basic Cloud SQL instance that runs in a Google-managed
-- container. SECOND_GEN: A newer Cloud SQL backend that runs in a Compute
-- Engine VM. EXTERNAL: A MySQL server that is not managed by Google.
datBackendType :: Lens' DatabaseInstance (Maybe Text)
datBackendType
= lens _datBackendType
(\ s a -> s{_datBackendType = a})
-- | The maximum disk size of the instance in bytes.
datMaxDiskSize :: Lens' DatabaseInstance (Maybe Int64)
datMaxDiskSize
= lens _datMaxDiskSize
(\ s a -> s{_datMaxDiskSize = a})
. mapping _Coerce
-- | Configuration specific to on-premises instances.
datOnPremisesConfiguration :: Lens' DatabaseInstance (Maybe OnPremisesConfiguration)
datOnPremisesConfiguration
= lens _datOnPremisesConfiguration
(\ s a -> s{_datOnPremisesConfiguration = a})
-- | HTTP 1.1 Entity tag for the resource.
datEtag :: Lens' DatabaseInstance (Maybe Text)
datEtag = lens _datEtag (\ s a -> s{_datEtag = a})
-- | The current serving state of the Cloud SQL instance. This can be one of
-- the following. RUNNABLE: The instance is running, or is ready to run
-- when accessed. SUSPENDED: The instance is not available, for example due
-- to problems with billing. PENDING_CREATE: The instance is being created.
-- MAINTENANCE: The instance is down for maintenance. FAILED: The instance
-- creation failed. UNKNOWN_STATE: The state of the instance is unknown.
datState :: Lens' DatabaseInstance (Maybe Text)
datState = lens _datState (\ s a -> s{_datState = a})
-- | The IPv6 address assigned to the instance. This property is applicable
-- only to First Generation instances.
datIPv6Address :: Lens' DatabaseInstance (Maybe Text)
datIPv6Address
= lens _datIPv6Address
(\ s a -> s{_datIPv6Address = a})
-- | SSL configuration.
datServerCaCert :: Lens' DatabaseInstance (Maybe SSLCert)
datServerCaCert
= lens _datServerCaCert
(\ s a -> s{_datServerCaCert = a})
-- | The database engine type and version. The databaseVersion can not be
-- changed after instance creation. Can be MYSQL_5_5, MYSQL_5_6 or
-- MYSQL_5_7. Defaults to MYSQL_5_6. MYSQL_5_7 is applicable only to Second
-- Generation instances.
datDatabaseVersion :: Lens' DatabaseInstance (Maybe Text)
datDatabaseVersion
= lens _datDatabaseVersion
(\ s a -> s{_datDatabaseVersion = a})
-- | The project ID of the project containing the Cloud SQL instance. The
-- Google apps domain is prefixed if applicable.
datProject :: Lens' DatabaseInstance (Maybe Text)
datProject
= lens _datProject (\ s a -> s{_datProject = a})
-- | The user settings.
datSettings :: Lens' DatabaseInstance (Maybe Settings)
datSettings
= lens _datSettings (\ s a -> s{_datSettings = a})
-- | This is always sql#instance.
datKind :: Lens' DatabaseInstance Text
datKind = lens _datKind (\ s a -> s{_datKind = a})
-- | Connection name of the Cloud SQL instance used in connection strings.
datConnectionName :: Lens' DatabaseInstance (Maybe Text)
datConnectionName
= lens _datConnectionName
(\ s a -> s{_datConnectionName = a})
-- | The current disk usage of the instance in bytes. This property has been
-- deprecated. Users should use the
-- \"cloudsql.googleapis.com\/database\/disk\/bytes_used\" metric in Cloud
-- Monitoring API instead. Please see
-- https:\/\/groups.google.com\/d\/msg\/google-cloud-sql-announce\/I_7-F9EBhT0\/BtvFtdFeAgAJ
-- for details.
datCurrentDiskSize :: Lens' DatabaseInstance (Maybe Int64)
datCurrentDiskSize
= lens _datCurrentDiskSize
(\ s a -> s{_datCurrentDiskSize = a})
. mapping _Coerce
-- | The instance type. This can be one of the following. CLOUD_SQL_INSTANCE:
-- A Cloud SQL instance that is not replicating from a master.
-- ON_PREMISES_INSTANCE: An instance running on the customer\'s premises.
-- READ_REPLICA_INSTANCE: A Cloud SQL instance configured as a
-- read-replica.
datInstanceType :: Lens' DatabaseInstance (Maybe Text)
datInstanceType
= lens _datInstanceType
(\ s a -> s{_datInstanceType = a})
-- | The replicas of the instance.
datReplicaNames :: Lens' DatabaseInstance [Text]
datReplicaNames
= lens _datReplicaNames
(\ s a -> s{_datReplicaNames = a})
. _Default
. _Coerce
-- | The URI of this resource.
datSelfLink :: Lens' DatabaseInstance (Maybe Text)
datSelfLink
= lens _datSelfLink (\ s a -> s{_datSelfLink = a})
-- | The name and status of the failover replica. This property is applicable
-- only to Second Generation instances.
datFailoverReplica :: Lens' DatabaseInstance (Maybe DatabaseInstanceFailoverReplica)
datFailoverReplica
= lens _datFailoverReplica
(\ s a -> s{_datFailoverReplica = a})
-- | Name of the Cloud SQL instance. This does not include the project ID.
datName :: Lens' DatabaseInstance (Maybe Text)
datName = lens _datName (\ s a -> s{_datName = a})
-- | The name of the instance which will act as master in the replication
-- setup.
datMasterInstanceName :: Lens' DatabaseInstance (Maybe Text)
datMasterInstanceName
= lens _datMasterInstanceName
(\ s a -> s{_datMasterInstanceName = a})
-- | Configuration specific to read-replicas replicating from on-premises
-- masters.
datReplicaConfiguration :: Lens' DatabaseInstance (Maybe ReplicaConfiguration)
datReplicaConfiguration
= lens _datReplicaConfiguration
(\ s a -> s{_datReplicaConfiguration = a})
-- | The geographical region. Can be us-central (FIRST_GEN instances only),
-- us-central1 (SECOND_GEN instances only), asia-east1 or europe-west1.
-- Defaults to us-central or us-central1 depending on the instance type
-- (First Generation or Second Generation). The region can not be changed
-- after instance creation.
datRegion :: Lens' DatabaseInstance (Maybe Text)
datRegion
= lens _datRegion (\ s a -> s{_datRegion = a})
-- | The service account email address assigned to the instance. This
-- property is applicable only to Second Generation instances.
datServiceAccountEmailAddress :: Lens' DatabaseInstance (Maybe Text)
datServiceAccountEmailAddress
= lens _datServiceAccountEmailAddress
(\ s a -> s{_datServiceAccountEmailAddress = a})
-- | The assigned IP addresses for the instance.
datIPAddresses :: Lens' DatabaseInstance [IPMApping]
datIPAddresses
= lens _datIPAddresses
(\ s a -> s{_datIPAddresses = a})
. _Default
. _Coerce
-- | If the instance state is SUSPENDED, the reason for the suspension.
datSuspensionReason :: Lens' DatabaseInstance [Text]
datSuspensionReason
= lens _datSuspensionReason
(\ s a -> s{_datSuspensionReason = a})
. _Default
. _Coerce
instance FromJSON DatabaseInstance where
parseJSON
= withObject "DatabaseInstance"
(\ o ->
DatabaseInstance' <$>
(o .:? "backendType") <*> (o .:? "maxDiskSize") <*>
(o .:? "onPremisesConfiguration")
<*> (o .:? "etag")
<*> (o .:? "state")
<*> (o .:? "ipv6Address")
<*> (o .:? "serverCaCert")
<*> (o .:? "databaseVersion")
<*> (o .:? "project")
<*> (o .:? "settings")
<*> (o .:? "kind" .!= "sql#instance")
<*> (o .:? "connectionName")
<*> (o .:? "currentDiskSize")
<*> (o .:? "instanceType")
<*> (o .:? "replicaNames" .!= mempty)
<*> (o .:? "selfLink")
<*> (o .:? "failoverReplica")
<*> (o .:? "name")
<*> (o .:? "masterInstanceName")
<*> (o .:? "replicaConfiguration")
<*> (o .:? "region")
<*> (o .:? "serviceAccountEmailAddress")
<*> (o .:? "ipAddresses" .!= mempty)
<*> (o .:? "suspensionReason" .!= mempty))
instance ToJSON DatabaseInstance where
toJSON DatabaseInstance'{..}
= object
(catMaybes
[("backendType" .=) <$> _datBackendType,
("maxDiskSize" .=) <$> _datMaxDiskSize,
("onPremisesConfiguration" .=) <$>
_datOnPremisesConfiguration,
("etag" .=) <$> _datEtag, ("state" .=) <$> _datState,
("ipv6Address" .=) <$> _datIPv6Address,
("serverCaCert" .=) <$> _datServerCaCert,
("databaseVersion" .=) <$> _datDatabaseVersion,
("project" .=) <$> _datProject,
("settings" .=) <$> _datSettings,
Just ("kind" .= _datKind),
("connectionName" .=) <$> _datConnectionName,
("currentDiskSize" .=) <$> _datCurrentDiskSize,
("instanceType" .=) <$> _datInstanceType,
("replicaNames" .=) <$> _datReplicaNames,
("selfLink" .=) <$> _datSelfLink,
("failoverReplica" .=) <$> _datFailoverReplica,
("name" .=) <$> _datName,
("masterInstanceName" .=) <$> _datMasterInstanceName,
("replicaConfiguration" .=) <$>
_datReplicaConfiguration,
("region" .=) <$> _datRegion,
("serviceAccountEmailAddress" .=) <$>
_datServiceAccountEmailAddress,
("ipAddresses" .=) <$> _datIPAddresses,
("suspensionReason" .=) <$> _datSuspensionReason])
-- | Database instance clone context.
--
-- /See:/ 'cloneContext' smart constructor.
data CloneContext = CloneContext'
{ _ccDestinationInstanceName :: !(Maybe Text)
, _ccBinLogCoordinates :: !(Maybe BinLogCoordinates)
, _ccKind :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CloneContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccDestinationInstanceName'
--
-- * 'ccBinLogCoordinates'
--
-- * 'ccKind'
cloneContext
:: CloneContext
cloneContext =
CloneContext'
{ _ccDestinationInstanceName = Nothing
, _ccBinLogCoordinates = Nothing
, _ccKind = "sql#cloneContext"
}
-- | Name of the Cloud SQL instance to be created as a clone.
ccDestinationInstanceName :: Lens' CloneContext (Maybe Text)
ccDestinationInstanceName
= lens _ccDestinationInstanceName
(\ s a -> s{_ccDestinationInstanceName = a})
-- | Binary log coordinates, if specified, indentify the the position up to
-- which the source instance should be cloned. If not specified, the source
-- instance is cloned up to the most recent binary log coordintes.
ccBinLogCoordinates :: Lens' CloneContext (Maybe BinLogCoordinates)
ccBinLogCoordinates
= lens _ccBinLogCoordinates
(\ s a -> s{_ccBinLogCoordinates = a})
-- | This is always sql#cloneContext.
ccKind :: Lens' CloneContext Text
ccKind = lens _ccKind (\ s a -> s{_ccKind = a})
instance FromJSON CloneContext where
parseJSON
= withObject "CloneContext"
(\ o ->
CloneContext' <$>
(o .:? "destinationInstanceName") <*>
(o .:? "binLogCoordinates")
<*> (o .:? "kind" .!= "sql#cloneContext"))
instance ToJSON CloneContext where
toJSON CloneContext'{..}
= object
(catMaybes
[("destinationInstanceName" .=) <$>
_ccDestinationInstanceName,
("binLogCoordinates" .=) <$> _ccBinLogCoordinates,
Just ("kind" .= _ccKind)])
-- | A Google Cloud SQL service flag resource.
--
-- /See:/ 'flag' smart constructor.
data Flag = Flag'
{ _fMaxValue :: !(Maybe (Textual Int64))
, _fKind :: !Text
, _fAppliesTo :: !(Maybe [Text])
, _fName :: !(Maybe Text)
, _fAllowedStringValues :: !(Maybe [Text])
, _fType :: !(Maybe Text)
, _fMinValue :: !(Maybe (Textual Int64))
, _fRequiresRestart :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Flag' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fMaxValue'
--
-- * 'fKind'
--
-- * 'fAppliesTo'
--
-- * 'fName'
--
-- * 'fAllowedStringValues'
--
-- * 'fType'
--
-- * 'fMinValue'
--
-- * 'fRequiresRestart'
flag
:: Flag
flag =
Flag'
{ _fMaxValue = Nothing
, _fKind = "sql#flag"
, _fAppliesTo = Nothing
, _fName = Nothing
, _fAllowedStringValues = Nothing
, _fType = Nothing
, _fMinValue = Nothing
, _fRequiresRestart = Nothing
}
-- | For INTEGER flags, the maximum allowed value.
fMaxValue :: Lens' Flag (Maybe Int64)
fMaxValue
= lens _fMaxValue (\ s a -> s{_fMaxValue = a}) .
mapping _Coerce
-- | This is always sql#flag.
fKind :: Lens' Flag Text
fKind = lens _fKind (\ s a -> s{_fKind = a})
-- | The database version this flag applies to. Can be MYSQL_5_5, MYSQL_5_6,
-- or MYSQL_5_7. MYSQL_5_7 is applicable only to Second Generation
-- instances.
fAppliesTo :: Lens' Flag [Text]
fAppliesTo
= lens _fAppliesTo (\ s a -> s{_fAppliesTo = a}) .
_Default
. _Coerce
-- | This is the name of the flag. Flag names always use underscores, not
-- hyphens, e.g. max_allowed_packet
fName :: Lens' Flag (Maybe Text)
fName = lens _fName (\ s a -> s{_fName = a})
-- | For STRING flags, a list of strings that the value can be set to.
fAllowedStringValues :: Lens' Flag [Text]
fAllowedStringValues
= lens _fAllowedStringValues
(\ s a -> s{_fAllowedStringValues = a})
. _Default
. _Coerce
-- | The type of the flag. Flags are typed to being BOOLEAN, STRING, INTEGER
-- or NONE. NONE is used for flags which do not take a value, such as
-- skip_grant_tables.
fType :: Lens' Flag (Maybe Text)
fType = lens _fType (\ s a -> s{_fType = a})
-- | For INTEGER flags, the minimum allowed value.
fMinValue :: Lens' Flag (Maybe Int64)
fMinValue
= lens _fMinValue (\ s a -> s{_fMinValue = a}) .
mapping _Coerce
-- | Indicates whether changing this flag will trigger a database restart.
-- Only applicable to Second Generation instances.
fRequiresRestart :: Lens' Flag (Maybe Bool)
fRequiresRestart
= lens _fRequiresRestart
(\ s a -> s{_fRequiresRestart = a})
instance FromJSON Flag where
parseJSON
= withObject "Flag"
(\ o ->
Flag' <$>
(o .:? "maxValue") <*> (o .:? "kind" .!= "sql#flag")
<*> (o .:? "appliesTo" .!= mempty)
<*> (o .:? "name")
<*> (o .:? "allowedStringValues" .!= mempty)
<*> (o .:? "type")
<*> (o .:? "minValue")
<*> (o .:? "requiresRestart"))
instance ToJSON Flag where
toJSON Flag'{..}
= object
(catMaybes
[("maxValue" .=) <$> _fMaxValue,
Just ("kind" .= _fKind),
("appliesTo" .=) <$> _fAppliesTo,
("name" .=) <$> _fName,
("allowedStringValues" .=) <$> _fAllowedStringValues,
("type" .=) <$> _fType,
("minValue" .=) <$> _fMinValue,
("requiresRestart" .=) <$> _fRequiresRestart])
-- | Instance failover request.
--
-- /See:/ 'instancesFailoverRequest' smart constructor.
newtype InstancesFailoverRequest = InstancesFailoverRequest'
{ _ifrFailoverContext :: Maybe FailoverContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesFailoverRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ifrFailoverContext'
instancesFailoverRequest
:: InstancesFailoverRequest
instancesFailoverRequest =
InstancesFailoverRequest'
{ _ifrFailoverContext = Nothing
}
-- | Failover Context.
ifrFailoverContext :: Lens' InstancesFailoverRequest (Maybe FailoverContext)
ifrFailoverContext
= lens _ifrFailoverContext
(\ s a -> s{_ifrFailoverContext = a})
instance FromJSON InstancesFailoverRequest where
parseJSON
= withObject "InstancesFailoverRequest"
(\ o ->
InstancesFailoverRequest' <$>
(o .:? "failoverContext"))
instance ToJSON InstancesFailoverRequest where
toJSON InstancesFailoverRequest'{..}
= object
(catMaybes
[("failoverContext" .=) <$> _ifrFailoverContext])
-- | A database instance backup run resource.
--
-- /See:/ 'backupRun' smart constructor.
data BackupRun = BackupRun'
{ _brStatus :: !(Maybe Text)
, _brStartTime :: !(Maybe DateTime')
, _brKind :: !Text
, _brError :: !(Maybe OperationError)
, _brWindowStartTime :: !(Maybe DateTime')
, _brSelfLink :: !(Maybe Text)
, _brEndTime :: !(Maybe DateTime')
, _brId :: !(Maybe (Textual Int64))
, _brType :: !(Maybe Text)
, _brEnQueuedTime :: !(Maybe DateTime')
, _brDescription :: !(Maybe Text)
, _brInstance :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BackupRun' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'brStatus'
--
-- * 'brStartTime'
--
-- * 'brKind'
--
-- * 'brError'
--
-- * 'brWindowStartTime'
--
-- * 'brSelfLink'
--
-- * 'brEndTime'
--
-- * 'brId'
--
-- * 'brType'
--
-- * 'brEnQueuedTime'
--
-- * 'brDescription'
--
-- * 'brInstance'
backupRun
:: BackupRun
backupRun =
BackupRun'
{ _brStatus = Nothing
, _brStartTime = Nothing
, _brKind = "sql#backupRun"
, _brError = Nothing
, _brWindowStartTime = Nothing
, _brSelfLink = Nothing
, _brEndTime = Nothing
, _brId = Nothing
, _brType = Nothing
, _brEnQueuedTime = Nothing
, _brDescription = Nothing
, _brInstance = Nothing
}
-- | The status of this run.
brStatus :: Lens' BackupRun (Maybe Text)
brStatus = lens _brStatus (\ s a -> s{_brStatus = a})
-- | The time the backup operation actually started in UTC timezone in RFC
-- 3339 format, for example 2012-11-15T16:19:00.094Z.
brStartTime :: Lens' BackupRun (Maybe UTCTime)
brStartTime
= lens _brStartTime (\ s a -> s{_brStartTime = a}) .
mapping _DateTime
-- | This is always sql#backupRun.
brKind :: Lens' BackupRun Text
brKind = lens _brKind (\ s a -> s{_brKind = a})
-- | Information about why the backup operation failed. This is only present
-- if the run has the FAILED status.
brError :: Lens' BackupRun (Maybe OperationError)
brError = lens _brError (\ s a -> s{_brError = a})
-- | The start time of the backup window during which this the backup was
-- attempted in RFC 3339 format, for example 2012-11-15T16:19:00.094Z.
brWindowStartTime :: Lens' BackupRun (Maybe UTCTime)
brWindowStartTime
= lens _brWindowStartTime
(\ s a -> s{_brWindowStartTime = a})
. mapping _DateTime
-- | The URI of this resource.
brSelfLink :: Lens' BackupRun (Maybe Text)
brSelfLink
= lens _brSelfLink (\ s a -> s{_brSelfLink = a})
-- | The time the backup operation completed in UTC timezone in RFC 3339
-- format, for example 2012-11-15T16:19:00.094Z.
brEndTime :: Lens' BackupRun (Maybe UTCTime)
brEndTime
= lens _brEndTime (\ s a -> s{_brEndTime = a}) .
mapping _DateTime
-- | A unique identifier for this backup run. Note that this is unique only
-- within the scope of a particular Cloud SQL instance.
brId :: Lens' BackupRun (Maybe Int64)
brId
= lens _brId (\ s a -> s{_brId = a}) .
mapping _Coerce
-- | The type of this run; can be either \"AUTOMATED\" or \"ON_DEMAND\".
brType :: Lens' BackupRun (Maybe Text)
brType = lens _brType (\ s a -> s{_brType = a})
-- | The time the run was enqueued in UTC timezone in RFC 3339 format, for
-- example 2012-11-15T16:19:00.094Z.
brEnQueuedTime :: Lens' BackupRun (Maybe UTCTime)
brEnQueuedTime
= lens _brEnQueuedTime
(\ s a -> s{_brEnQueuedTime = a})
. mapping _DateTime
-- | The description of this run, only applicable to on-demand backups.
brDescription :: Lens' BackupRun (Maybe Text)
brDescription
= lens _brDescription
(\ s a -> s{_brDescription = a})
-- | Name of the database instance.
brInstance :: Lens' BackupRun (Maybe Text)
brInstance
= lens _brInstance (\ s a -> s{_brInstance = a})
instance FromJSON BackupRun where
parseJSON
= withObject "BackupRun"
(\ o ->
BackupRun' <$>
(o .:? "status") <*> (o .:? "startTime") <*>
(o .:? "kind" .!= "sql#backupRun")
<*> (o .:? "error")
<*> (o .:? "windowStartTime")
<*> (o .:? "selfLink")
<*> (o .:? "endTime")
<*> (o .:? "id")
<*> (o .:? "type")
<*> (o .:? "enqueuedTime")
<*> (o .:? "description")
<*> (o .:? "instance"))
instance ToJSON BackupRun where
toJSON BackupRun'{..}
= object
(catMaybes
[("status" .=) <$> _brStatus,
("startTime" .=) <$> _brStartTime,
Just ("kind" .= _brKind), ("error" .=) <$> _brError,
("windowStartTime" .=) <$> _brWindowStartTime,
("selfLink" .=) <$> _brSelfLink,
("endTime" .=) <$> _brEndTime, ("id" .=) <$> _brId,
("type" .=) <$> _brType,
("enqueuedTime" .=) <$> _brEnQueuedTime,
("description" .=) <$> _brDescription,
("instance" .=) <$> _brInstance])
-- | An entry for an Access Control list.
--
-- /See:/ 'aclEntry' smart constructor.
data ACLEntry = ACLEntry'
{ _aeKind :: !Text
, _aeValue :: !(Maybe Text)
, _aeName :: !(Maybe Text)
, _aeExpirationTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ACLEntry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aeKind'
--
-- * 'aeValue'
--
-- * 'aeName'
--
-- * 'aeExpirationTime'
aclEntry
:: ACLEntry
aclEntry =
ACLEntry'
{ _aeKind = "sql#aclEntry"
, _aeValue = Nothing
, _aeName = Nothing
, _aeExpirationTime = Nothing
}
-- | This is always sql#aclEntry.
aeKind :: Lens' ACLEntry Text
aeKind = lens _aeKind (\ s a -> s{_aeKind = a})
-- | The whitelisted value for the access control list.
aeValue :: Lens' ACLEntry (Maybe Text)
aeValue = lens _aeValue (\ s a -> s{_aeValue = a})
-- | An optional label to identify this entry.
aeName :: Lens' ACLEntry (Maybe Text)
aeName = lens _aeName (\ s a -> s{_aeName = a})
-- | The time when this access control entry expires in RFC 3339 format, for
-- example 2012-11-15T16:19:00.094Z.
aeExpirationTime :: Lens' ACLEntry (Maybe UTCTime)
aeExpirationTime
= lens _aeExpirationTime
(\ s a -> s{_aeExpirationTime = a})
. mapping _DateTime
instance FromJSON ACLEntry where
parseJSON
= withObject "ACLEntry"
(\ o ->
ACLEntry' <$>
(o .:? "kind" .!= "sql#aclEntry") <*> (o .:? "value")
<*> (o .:? "name")
<*> (o .:? "expirationTime"))
instance ToJSON ACLEntry where
toJSON ACLEntry'{..}
= object
(catMaybes
[Just ("kind" .= _aeKind), ("value" .=) <$> _aeValue,
("name" .=) <$> _aeName,
("expirationTime" .=) <$> _aeExpirationTime])
-- | MySQL flags for Cloud SQL instances.
--
-- /See:/ 'databaseFlags' smart constructor.
data DatabaseFlags = DatabaseFlags'
{ _dfValue :: !(Maybe Text)
, _dfName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DatabaseFlags' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dfValue'
--
-- * 'dfName'
databaseFlags
:: DatabaseFlags
databaseFlags =
DatabaseFlags'
{ _dfValue = Nothing
, _dfName = Nothing
}
-- | The value of the flag. Booleans should be set to on for true and off for
-- false. This field must be omitted if the flag doesn\'t take a value.
dfValue :: Lens' DatabaseFlags (Maybe Text)
dfValue = lens _dfValue (\ s a -> s{_dfValue = a})
-- | The name of the flag. These flags are passed at instance startup, so
-- include both MySQL server options and MySQL system variables. Flags
-- should be specified with underscores, not hyphens. For more information,
-- see Configuring MySQL Flags in the Google Cloud SQL documentation, as
-- well as the official MySQL documentation for server options and system
-- variables.
dfName :: Lens' DatabaseFlags (Maybe Text)
dfName = lens _dfName (\ s a -> s{_dfName = a})
instance FromJSON DatabaseFlags where
parseJSON
= withObject "DatabaseFlags"
(\ o ->
DatabaseFlags' <$>
(o .:? "value") <*> (o .:? "name"))
instance ToJSON DatabaseFlags where
toJSON DatabaseFlags'{..}
= object
(catMaybes
[("value" .=) <$> _dfValue, ("name" .=) <$> _dfName])
-- | A Google Cloud SQL service tier resource.
--
-- /See:/ 'tier' smart constructor.
data Tier = Tier'
{ _tKind :: !Text
, _tTier :: !(Maybe Text)
, _tRegion :: !(Maybe [Text])
, _tDiskQuota :: !(Maybe (Textual Int64))
, _tRAM :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Tier' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tKind'
--
-- * 'tTier'
--
-- * 'tRegion'
--
-- * 'tDiskQuota'
--
-- * 'tRAM'
tier
:: Tier
tier =
Tier'
{ _tKind = "sql#tier"
, _tTier = Nothing
, _tRegion = Nothing
, _tDiskQuota = Nothing
, _tRAM = Nothing
}
-- | This is always sql#tier.
tKind :: Lens' Tier Text
tKind = lens _tKind (\ s a -> s{_tKind = a})
-- | An identifier for the service tier, for example D1, D2 etc. For related
-- information, see Pricing.
tTier :: Lens' Tier (Maybe Text)
tTier = lens _tTier (\ s a -> s{_tTier = a})
-- | The applicable regions for this tier.
tRegion :: Lens' Tier [Text]
tRegion
= lens _tRegion (\ s a -> s{_tRegion = a}) . _Default
. _Coerce
-- | The maximum disk size of this tier in bytes.
tDiskQuota :: Lens' Tier (Maybe Int64)
tDiskQuota
= lens _tDiskQuota (\ s a -> s{_tDiskQuota = a}) .
mapping _Coerce
-- | The maximum RAM usage of this tier in bytes.
tRAM :: Lens' Tier (Maybe Int64)
tRAM
= lens _tRAM (\ s a -> s{_tRAM = a}) .
mapping _Coerce
instance FromJSON Tier where
parseJSON
= withObject "Tier"
(\ o ->
Tier' <$>
(o .:? "kind" .!= "sql#tier") <*> (o .:? "tier") <*>
(o .:? "region" .!= mempty)
<*> (o .:? "DiskQuota")
<*> (o .:? "RAM"))
instance ToJSON Tier where
toJSON Tier'{..}
= object
(catMaybes
[Just ("kind" .= _tKind), ("tier" .=) <$> _tTier,
("region" .=) <$> _tRegion,
("DiskQuota" .=) <$> _tDiskQuota,
("RAM" .=) <$> _tRAM])
-- | Read-replica configuration specific to MySQL databases.
--
-- /See:/ 'mySQLReplicaConfiguration' smart constructor.
data MySQLReplicaConfiguration = MySQLReplicaConfiguration'
{ _msqlrcVerifyServerCertificate :: !(Maybe Bool)
, _msqlrcKind :: !Text
, _msqlrcClientKey :: !(Maybe Text)
, _msqlrcUsername :: !(Maybe Text)
, _msqlrcSSLCipher :: !(Maybe Text)
, _msqlrcMasterHeartbeatPeriod :: !(Maybe (Textual Int64))
, _msqlrcConnectRetryInterval :: !(Maybe (Textual Int32))
, _msqlrcClientCertificate :: !(Maybe Text)
, _msqlrcCaCertificate :: !(Maybe Text)
, _msqlrcDumpFilePath :: !(Maybe Text)
, _msqlrcPassword :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MySQLReplicaConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'msqlrcVerifyServerCertificate'
--
-- * 'msqlrcKind'
--
-- * 'msqlrcClientKey'
--
-- * 'msqlrcUsername'
--
-- * 'msqlrcSSLCipher'
--
-- * 'msqlrcMasterHeartbeatPeriod'
--
-- * 'msqlrcConnectRetryInterval'
--
-- * 'msqlrcClientCertificate'
--
-- * 'msqlrcCaCertificate'
--
-- * 'msqlrcDumpFilePath'
--
-- * 'msqlrcPassword'
mySQLReplicaConfiguration
:: MySQLReplicaConfiguration
mySQLReplicaConfiguration =
MySQLReplicaConfiguration'
{ _msqlrcVerifyServerCertificate = Nothing
, _msqlrcKind = "sql#mysqlReplicaConfiguration"
, _msqlrcClientKey = Nothing
, _msqlrcUsername = Nothing
, _msqlrcSSLCipher = Nothing
, _msqlrcMasterHeartbeatPeriod = Nothing
, _msqlrcConnectRetryInterval = Nothing
, _msqlrcClientCertificate = Nothing
, _msqlrcCaCertificate = Nothing
, _msqlrcDumpFilePath = Nothing
, _msqlrcPassword = Nothing
}
-- | Whether or not to check the master\'s Common Name value in the
-- certificate that it sends during the SSL handshake.
msqlrcVerifyServerCertificate :: Lens' MySQLReplicaConfiguration (Maybe Bool)
msqlrcVerifyServerCertificate
= lens _msqlrcVerifyServerCertificate
(\ s a -> s{_msqlrcVerifyServerCertificate = a})
-- | This is always sql#mysqlReplicaConfiguration.
msqlrcKind :: Lens' MySQLReplicaConfiguration Text
msqlrcKind
= lens _msqlrcKind (\ s a -> s{_msqlrcKind = a})
-- | PEM representation of the slave\'s private key. The corresponsing public
-- key is encoded in the client\'s certificate.
msqlrcClientKey :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcClientKey
= lens _msqlrcClientKey
(\ s a -> s{_msqlrcClientKey = a})
-- | The username for the replication connection.
msqlrcUsername :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcUsername
= lens _msqlrcUsername
(\ s a -> s{_msqlrcUsername = a})
-- | A list of permissible ciphers to use for SSL encryption.
msqlrcSSLCipher :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcSSLCipher
= lens _msqlrcSSLCipher
(\ s a -> s{_msqlrcSSLCipher = a})
-- | Interval in milliseconds between replication heartbeats.
msqlrcMasterHeartbeatPeriod :: Lens' MySQLReplicaConfiguration (Maybe Int64)
msqlrcMasterHeartbeatPeriod
= lens _msqlrcMasterHeartbeatPeriod
(\ s a -> s{_msqlrcMasterHeartbeatPeriod = a})
. mapping _Coerce
-- | Seconds to wait between connect retries. MySQL\'s default is 60 seconds.
msqlrcConnectRetryInterval :: Lens' MySQLReplicaConfiguration (Maybe Int32)
msqlrcConnectRetryInterval
= lens _msqlrcConnectRetryInterval
(\ s a -> s{_msqlrcConnectRetryInterval = a})
. mapping _Coerce
-- | PEM representation of the slave\'s x509 certificate.
msqlrcClientCertificate :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcClientCertificate
= lens _msqlrcClientCertificate
(\ s a -> s{_msqlrcClientCertificate = a})
-- | PEM representation of the trusted CA\'s x509 certificate.
msqlrcCaCertificate :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcCaCertificate
= lens _msqlrcCaCertificate
(\ s a -> s{_msqlrcCaCertificate = a})
-- | Path to a SQL dump file in Google Cloud Storage from which the slave
-- instance is to be created. The URI is in the form
-- gs:\/\/bucketName\/fileName. Compressed gzip files (.gz) are also
-- supported. Dumps should have the binlog co-ordinates from which
-- replication should begin. This can be accomplished by setting
-- --master-data to 1 when using mysqldump.
msqlrcDumpFilePath :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcDumpFilePath
= lens _msqlrcDumpFilePath
(\ s a -> s{_msqlrcDumpFilePath = a})
-- | The password for the replication connection.
msqlrcPassword :: Lens' MySQLReplicaConfiguration (Maybe Text)
msqlrcPassword
= lens _msqlrcPassword
(\ s a -> s{_msqlrcPassword = a})
instance FromJSON MySQLReplicaConfiguration where
parseJSON
= withObject "MySQLReplicaConfiguration"
(\ o ->
MySQLReplicaConfiguration' <$>
(o .:? "verifyServerCertificate") <*>
(o .:? "kind" .!= "sql#mysqlReplicaConfiguration")
<*> (o .:? "clientKey")
<*> (o .:? "username")
<*> (o .:? "sslCipher")
<*> (o .:? "masterHeartbeatPeriod")
<*> (o .:? "connectRetryInterval")
<*> (o .:? "clientCertificate")
<*> (o .:? "caCertificate")
<*> (o .:? "dumpFilePath")
<*> (o .:? "password"))
instance ToJSON MySQLReplicaConfiguration where
toJSON MySQLReplicaConfiguration'{..}
= object
(catMaybes
[("verifyServerCertificate" .=) <$>
_msqlrcVerifyServerCertificate,
Just ("kind" .= _msqlrcKind),
("clientKey" .=) <$> _msqlrcClientKey,
("username" .=) <$> _msqlrcUsername,
("sslCipher" .=) <$> _msqlrcSSLCipher,
("masterHeartbeatPeriod" .=) <$>
_msqlrcMasterHeartbeatPeriod,
("connectRetryInterval" .=) <$>
_msqlrcConnectRetryInterval,
("clientCertificate" .=) <$>
_msqlrcClientCertificate,
("caCertificate" .=) <$> _msqlrcCaCertificate,
("dumpFilePath" .=) <$> _msqlrcDumpFilePath,
("password" .=) <$> _msqlrcPassword])
-- | SslCertDetail.
--
-- /See:/ 'sslCertDetail' smart constructor.
data SSLCertDetail = SSLCertDetail'
{ _scdCertInfo :: !(Maybe SSLCert)
, _scdCertPrivateKey :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertDetail' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scdCertInfo'
--
-- * 'scdCertPrivateKey'
sslCertDetail
:: SSLCertDetail
sslCertDetail =
SSLCertDetail'
{ _scdCertInfo = Nothing
, _scdCertPrivateKey = Nothing
}
-- | The public information about the cert.
scdCertInfo :: Lens' SSLCertDetail (Maybe SSLCert)
scdCertInfo
= lens _scdCertInfo (\ s a -> s{_scdCertInfo = a})
-- | The private key for the client cert, in pem format. Keep private in
-- order to protect your security.
scdCertPrivateKey :: Lens' SSLCertDetail (Maybe Text)
scdCertPrivateKey
= lens _scdCertPrivateKey
(\ s a -> s{_scdCertPrivateKey = a})
instance FromJSON SSLCertDetail where
parseJSON
= withObject "SSLCertDetail"
(\ o ->
SSLCertDetail' <$>
(o .:? "certInfo") <*> (o .:? "certPrivateKey"))
instance ToJSON SSLCertDetail where
toJSON SSLCertDetail'{..}
= object
(catMaybes
[("certInfo" .=) <$> _scdCertInfo,
("certPrivateKey" .=) <$> _scdCertPrivateKey])
-- | Database instance restore backup request.
--
-- /See:/ 'instancesRestoreBackupRequest' smart constructor.
newtype InstancesRestoreBackupRequest = InstancesRestoreBackupRequest'
{ _irbrRestoreBackupContext :: Maybe RestoreBackupContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesRestoreBackupRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'irbrRestoreBackupContext'
instancesRestoreBackupRequest
:: InstancesRestoreBackupRequest
instancesRestoreBackupRequest =
InstancesRestoreBackupRequest'
{ _irbrRestoreBackupContext = Nothing
}
-- | Parameters required to perform the restore backup operation.
irbrRestoreBackupContext :: Lens' InstancesRestoreBackupRequest (Maybe RestoreBackupContext)
irbrRestoreBackupContext
= lens _irbrRestoreBackupContext
(\ s a -> s{_irbrRestoreBackupContext = a})
instance FromJSON InstancesRestoreBackupRequest where
parseJSON
= withObject "InstancesRestoreBackupRequest"
(\ o ->
InstancesRestoreBackupRequest' <$>
(o .:? "restoreBackupContext"))
instance ToJSON InstancesRestoreBackupRequest where
toJSON InstancesRestoreBackupRequest'{..}
= object
(catMaybes
[("restoreBackupContext" .=) <$>
_irbrRestoreBackupContext])
-- | Backup run list results.
--
-- /See:/ 'backupRunsListResponse' smart constructor.
data BackupRunsListResponse = BackupRunsListResponse'
{ _brlrNextPageToken :: !(Maybe Text)
, _brlrKind :: !Text
, _brlrItems :: !(Maybe [BackupRun])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BackupRunsListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'brlrNextPageToken'
--
-- * 'brlrKind'
--
-- * 'brlrItems'
backupRunsListResponse
:: BackupRunsListResponse
backupRunsListResponse =
BackupRunsListResponse'
{ _brlrNextPageToken = Nothing
, _brlrKind = "sql#backupRunsList"
, _brlrItems = Nothing
}
-- | The continuation token, used to page through large result sets. Provide
-- this value in a subsequent request to return the next page of results.
brlrNextPageToken :: Lens' BackupRunsListResponse (Maybe Text)
brlrNextPageToken
= lens _brlrNextPageToken
(\ s a -> s{_brlrNextPageToken = a})
-- | This is always sql#backupRunsList.
brlrKind :: Lens' BackupRunsListResponse Text
brlrKind = lens _brlrKind (\ s a -> s{_brlrKind = a})
-- | A list of backup runs in reverse chronological order of the enqueued
-- time.
brlrItems :: Lens' BackupRunsListResponse [BackupRun]
brlrItems
= lens _brlrItems (\ s a -> s{_brlrItems = a}) .
_Default
. _Coerce
instance FromJSON BackupRunsListResponse where
parseJSON
= withObject "BackupRunsListResponse"
(\ o ->
BackupRunsListResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "sql#backupRunsList")
<*> (o .:? "items" .!= mempty))
instance ToJSON BackupRunsListResponse where
toJSON BackupRunsListResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _brlrNextPageToken,
Just ("kind" .= _brlrKind),
("items" .=) <$> _brlrItems])
-- | Database instance operation error.
--
-- /See:/ 'operationError' smart constructor.
data OperationError = OperationError'
{ _opeKind :: !Text
, _opeCode :: !(Maybe Text)
, _opeMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationError' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'opeKind'
--
-- * 'opeCode'
--
-- * 'opeMessage'
operationError
:: OperationError
operationError =
OperationError'
{ _opeKind = "sql#operationError"
, _opeCode = Nothing
, _opeMessage = Nothing
}
-- | This is always sql#operationError.
opeKind :: Lens' OperationError Text
opeKind = lens _opeKind (\ s a -> s{_opeKind = a})
-- | Identifies the specific error that occurred.
opeCode :: Lens' OperationError (Maybe Text)
opeCode = lens _opeCode (\ s a -> s{_opeCode = a})
-- | Additional information about the error encountered.
opeMessage :: Lens' OperationError (Maybe Text)
opeMessage
= lens _opeMessage (\ s a -> s{_opeMessage = a})
instance FromJSON OperationError where
parseJSON
= withObject "OperationError"
(\ o ->
OperationError' <$>
(o .:? "kind" .!= "sql#operationError") <*>
(o .:? "code")
<*> (o .:? "message"))
instance ToJSON OperationError where
toJSON OperationError'{..}
= object
(catMaybes
[Just ("kind" .= _opeKind), ("code" .=) <$> _opeCode,
("message" .=) <$> _opeMessage])
-- | Database Instance truncate log context.
--
-- /See:/ 'truncateLogContext' smart constructor.
data TruncateLogContext = TruncateLogContext'
{ _tlcKind :: !Text
, _tlcLogType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TruncateLogContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tlcKind'
--
-- * 'tlcLogType'
truncateLogContext
:: TruncateLogContext
truncateLogContext =
TruncateLogContext'
{ _tlcKind = "sql#truncateLogContext"
, _tlcLogType = Nothing
}
-- | This is always sql#truncateLogContext.
tlcKind :: Lens' TruncateLogContext Text
tlcKind = lens _tlcKind (\ s a -> s{_tlcKind = a})
-- | The type of log to truncate. Valid values are MYSQL_GENERAL_TABLE and
-- MYSQL_SLOW_TABLE.
tlcLogType :: Lens' TruncateLogContext (Maybe Text)
tlcLogType
= lens _tlcLogType (\ s a -> s{_tlcLogType = a})
instance FromJSON TruncateLogContext where
parseJSON
= withObject "TruncateLogContext"
(\ o ->
TruncateLogContext' <$>
(o .:? "kind" .!= "sql#truncateLogContext") <*>
(o .:? "logType"))
instance ToJSON TruncateLogContext where
toJSON TruncateLogContext'{..}
= object
(catMaybes
[Just ("kind" .= _tlcKind),
("logType" .=) <$> _tlcLogType])
-- | Database instance clone request.
--
-- /See:/ 'instancesCloneRequest' smart constructor.
newtype InstancesCloneRequest = InstancesCloneRequest'
{ _icrCloneContext :: Maybe CloneContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesCloneRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'icrCloneContext'
instancesCloneRequest
:: InstancesCloneRequest
instancesCloneRequest =
InstancesCloneRequest'
{ _icrCloneContext = Nothing
}
-- | Contains details about the clone operation.
icrCloneContext :: Lens' InstancesCloneRequest (Maybe CloneContext)
icrCloneContext
= lens _icrCloneContext
(\ s a -> s{_icrCloneContext = a})
instance FromJSON InstancesCloneRequest where
parseJSON
= withObject "InstancesCloneRequest"
(\ o ->
InstancesCloneRequest' <$> (o .:? "cloneContext"))
instance ToJSON InstancesCloneRequest where
toJSON InstancesCloneRequest'{..}
= object
(catMaybes
[("cloneContext" .=) <$> _icrCloneContext])
-- | Read-replica configuration for connecting to the master.
--
-- /See:/ 'replicaConfiguration' smart constructor.
data ReplicaConfiguration = ReplicaConfiguration'
{ _rcFailoverTarget :: !(Maybe Bool)
, _rcKind :: !Text
, _rcMysqlReplicaConfiguration :: !(Maybe MySQLReplicaConfiguration)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplicaConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rcFailoverTarget'
--
-- * 'rcKind'
--
-- * 'rcMysqlReplicaConfiguration'
replicaConfiguration
:: ReplicaConfiguration
replicaConfiguration =
ReplicaConfiguration'
{ _rcFailoverTarget = Nothing
, _rcKind = "sql#replicaConfiguration"
, _rcMysqlReplicaConfiguration = Nothing
}
-- | Specifies if the replica is the failover target. If the field is set to
-- true the replica will be designated as a failover replica. In case the
-- master instance fails, the replica instance will be promoted as the new
-- master instance. Only one replica can be specified as failover target,
-- and the replica has to be in different zone with the master instance.
rcFailoverTarget :: Lens' ReplicaConfiguration (Maybe Bool)
rcFailoverTarget
= lens _rcFailoverTarget
(\ s a -> s{_rcFailoverTarget = a})
-- | This is always sql#replicaConfiguration.
rcKind :: Lens' ReplicaConfiguration Text
rcKind = lens _rcKind (\ s a -> s{_rcKind = a})
-- | MySQL specific configuration when replicating from a MySQL on-premises
-- master. Replication configuration information such as the username,
-- password, certificates, and keys are not stored in the instance
-- metadata. The configuration information is used only to set up the
-- replication connection and is stored by MySQL in a file named
-- master.info in the data directory.
rcMysqlReplicaConfiguration :: Lens' ReplicaConfiguration (Maybe MySQLReplicaConfiguration)
rcMysqlReplicaConfiguration
= lens _rcMysqlReplicaConfiguration
(\ s a -> s{_rcMysqlReplicaConfiguration = a})
instance FromJSON ReplicaConfiguration where
parseJSON
= withObject "ReplicaConfiguration"
(\ o ->
ReplicaConfiguration' <$>
(o .:? "failoverTarget") <*>
(o .:? "kind" .!= "sql#replicaConfiguration")
<*> (o .:? "mysqlReplicaConfiguration"))
instance ToJSON ReplicaConfiguration where
toJSON ReplicaConfiguration'{..}
= object
(catMaybes
[("failoverTarget" .=) <$> _rcFailoverTarget,
Just ("kind" .= _rcKind),
("mysqlReplicaConfiguration" .=) <$>
_rcMysqlReplicaConfiguration])
-- | Database instance failover context.
--
-- /See:/ 'failoverContext' smart constructor.
data FailoverContext = FailoverContext'
{ _fcSettingsVersion :: !(Maybe (Textual Int64))
, _fcKind :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FailoverContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fcSettingsVersion'
--
-- * 'fcKind'
failoverContext
:: FailoverContext
failoverContext =
FailoverContext'
{ _fcSettingsVersion = Nothing
, _fcKind = "sql#failoverContext"
}
-- | The current settings version of this instance. Request will be rejected
-- if this version doesn\'t match the current settings version.
fcSettingsVersion :: Lens' FailoverContext (Maybe Int64)
fcSettingsVersion
= lens _fcSettingsVersion
(\ s a -> s{_fcSettingsVersion = a})
. mapping _Coerce
-- | This is always sql#failoverContext.
fcKind :: Lens' FailoverContext Text
fcKind = lens _fcKind (\ s a -> s{_fcKind = a})
instance FromJSON FailoverContext where
parseJSON
= withObject "FailoverContext"
(\ o ->
FailoverContext' <$>
(o .:? "settingsVersion") <*>
(o .:? "kind" .!= "sql#failoverContext"))
instance ToJSON FailoverContext where
toJSON FailoverContext'{..}
= object
(catMaybes
[("settingsVersion" .=) <$> _fcSettingsVersion,
Just ("kind" .= _fcKind)])
-- | SslCert insert response.
--
-- /See:/ 'sslCertsInsertResponse' smart constructor.
data SSLCertsInsertResponse = SSLCertsInsertResponse'
{ _scirServerCaCert :: !(Maybe SSLCert)
, _scirOperation :: !(Maybe Operation)
, _scirKind :: !Text
, _scirClientCert :: !(Maybe SSLCertDetail)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SSLCertsInsertResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scirServerCaCert'
--
-- * 'scirOperation'
--
-- * 'scirKind'
--
-- * 'scirClientCert'
sslCertsInsertResponse
:: SSLCertsInsertResponse
sslCertsInsertResponse =
SSLCertsInsertResponse'
{ _scirServerCaCert = Nothing
, _scirOperation = Nothing
, _scirKind = "sql#sslCertsInsert"
, _scirClientCert = Nothing
}
-- | The server Certificate Authority\'s certificate. If this is missing you
-- can force a new one to be generated by calling resetSslConfig method on
-- instances resource.
scirServerCaCert :: Lens' SSLCertsInsertResponse (Maybe SSLCert)
scirServerCaCert
= lens _scirServerCaCert
(\ s a -> s{_scirServerCaCert = a})
-- | The operation to track the ssl certs insert request.
scirOperation :: Lens' SSLCertsInsertResponse (Maybe Operation)
scirOperation
= lens _scirOperation
(\ s a -> s{_scirOperation = a})
-- | This is always sql#sslCertsInsert.
scirKind :: Lens' SSLCertsInsertResponse Text
scirKind = lens _scirKind (\ s a -> s{_scirKind = a})
-- | The new client certificate and private key. The new certificate will not
-- work until the instance is restarted for First Generation instances.
scirClientCert :: Lens' SSLCertsInsertResponse (Maybe SSLCertDetail)
scirClientCert
= lens _scirClientCert
(\ s a -> s{_scirClientCert = a})
instance FromJSON SSLCertsInsertResponse where
parseJSON
= withObject "SSLCertsInsertResponse"
(\ o ->
SSLCertsInsertResponse' <$>
(o .:? "serverCaCert") <*> (o .:? "operation") <*>
(o .:? "kind" .!= "sql#sslCertsInsert")
<*> (o .:? "clientCert"))
instance ToJSON SSLCertsInsertResponse where
toJSON SSLCertsInsertResponse'{..}
= object
(catMaybes
[("serverCaCert" .=) <$> _scirServerCaCert,
("operation" .=) <$> _scirOperation,
Just ("kind" .= _scirKind),
("clientCert" .=) <$> _scirClientCert])
-- | Database instances list response.
--
-- /See:/ 'instancesListResponse' smart constructor.
data InstancesListResponse = InstancesListResponse'
{ _ilrNextPageToken :: !(Maybe Text)
, _ilrKind :: !Text
, _ilrItems :: !(Maybe [DatabaseInstance])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ilrNextPageToken'
--
-- * 'ilrKind'
--
-- * 'ilrItems'
instancesListResponse
:: InstancesListResponse
instancesListResponse =
InstancesListResponse'
{ _ilrNextPageToken = Nothing
, _ilrKind = "sql#instancesList"
, _ilrItems = Nothing
}
-- | The continuation token, used to page through large result sets. Provide
-- this value in a subsequent request to return the next page of results.
ilrNextPageToken :: Lens' InstancesListResponse (Maybe Text)
ilrNextPageToken
= lens _ilrNextPageToken
(\ s a -> s{_ilrNextPageToken = a})
-- | This is always sql#instancesList.
ilrKind :: Lens' InstancesListResponse Text
ilrKind = lens _ilrKind (\ s a -> s{_ilrKind = a})
-- | List of database instance resources.
ilrItems :: Lens' InstancesListResponse [DatabaseInstance]
ilrItems
= lens _ilrItems (\ s a -> s{_ilrItems = a}) .
_Default
. _Coerce
instance FromJSON InstancesListResponse where
parseJSON
= withObject "InstancesListResponse"
(\ o ->
InstancesListResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "sql#instancesList")
<*> (o .:? "items" .!= mempty))
instance ToJSON InstancesListResponse where
toJSON InstancesListResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _ilrNextPageToken,
Just ("kind" .= _ilrKind),
("items" .=) <$> _ilrItems])
-- | Database instance backup configuration.
--
-- /See:/ 'backupConfiguration' smart constructor.
data BackupConfiguration = BackupConfiguration'
{ _bcEnabled :: !(Maybe Bool)
, _bcStartTime :: !(Maybe Text)
, _bcKind :: !Text
, _bcBinaryLogEnabled :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BackupConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bcEnabled'
--
-- * 'bcStartTime'
--
-- * 'bcKind'
--
-- * 'bcBinaryLogEnabled'
backupConfiguration
:: BackupConfiguration
backupConfiguration =
BackupConfiguration'
{ _bcEnabled = Nothing
, _bcStartTime = Nothing
, _bcKind = "sql#backupConfiguration"
, _bcBinaryLogEnabled = Nothing
}
-- | Whether this configuration is enabled.
bcEnabled :: Lens' BackupConfiguration (Maybe Bool)
bcEnabled
= lens _bcEnabled (\ s a -> s{_bcEnabled = a})
-- | Start time for the daily backup configuration in UTC timezone in the 24
-- hour format - HH:MM.
bcStartTime :: Lens' BackupConfiguration (Maybe Text)
bcStartTime
= lens _bcStartTime (\ s a -> s{_bcStartTime = a})
-- | This is always sql#backupConfiguration.
bcKind :: Lens' BackupConfiguration Text
bcKind = lens _bcKind (\ s a -> s{_bcKind = a})
-- | Whether binary log is enabled. If backup configuration is disabled,
-- binary log must be disabled as well.
bcBinaryLogEnabled :: Lens' BackupConfiguration (Maybe Bool)
bcBinaryLogEnabled
= lens _bcBinaryLogEnabled
(\ s a -> s{_bcBinaryLogEnabled = a})
instance FromJSON BackupConfiguration where
parseJSON
= withObject "BackupConfiguration"
(\ o ->
BackupConfiguration' <$>
(o .:? "enabled") <*> (o .:? "startTime") <*>
(o .:? "kind" .!= "sql#backupConfiguration")
<*> (o .:? "binaryLogEnabled"))
instance ToJSON BackupConfiguration where
toJSON BackupConfiguration'{..}
= object
(catMaybes
[("enabled" .=) <$> _bcEnabled,
("startTime" .=) <$> _bcStartTime,
Just ("kind" .= _bcKind),
("binaryLogEnabled" .=) <$> _bcBinaryLogEnabled])
-- | Database instance import request.
--
-- /See:/ 'instancesImportRequest' smart constructor.
newtype InstancesImportRequest = InstancesImportRequest'
{ _iirImportContext :: Maybe ImportContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesImportRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iirImportContext'
instancesImportRequest
:: InstancesImportRequest
instancesImportRequest =
InstancesImportRequest'
{ _iirImportContext = Nothing
}
-- | Contains details about the import operation.
iirImportContext :: Lens' InstancesImportRequest (Maybe ImportContext)
iirImportContext
= lens _iirImportContext
(\ s a -> s{_iirImportContext = a})
instance FromJSON InstancesImportRequest where
parseJSON
= withObject "InstancesImportRequest"
(\ o ->
InstancesImportRequest' <$> (o .:? "importContext"))
instance ToJSON InstancesImportRequest where
toJSON InstancesImportRequest'{..}
= object
(catMaybes
[("importContext" .=) <$> _iirImportContext])
-- | Preferred location. This specifies where a Cloud SQL instance should
-- preferably be located, either in a specific Compute Engine zone, or
-- co-located with an App Engine application. Note that if the preferred
-- location is not available, the instance will be located as close as
-- possible within the region. Only one location may be specified.
--
-- /See:/ 'locationPreference' smart constructor.
data LocationPreference = LocationPreference'
{ _lpKind :: !Text
, _lpFollowGaeApplication :: !(Maybe Text)
, _lpZone :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocationPreference' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lpKind'
--
-- * 'lpFollowGaeApplication'
--
-- * 'lpZone'
locationPreference
:: LocationPreference
locationPreference =
LocationPreference'
{ _lpKind = "sql#locationPreference"
, _lpFollowGaeApplication = Nothing
, _lpZone = Nothing
}
-- | This is always sql#locationPreference.
lpKind :: Lens' LocationPreference Text
lpKind = lens _lpKind (\ s a -> s{_lpKind = a})
-- | The AppEngine application to follow, it must be in the same region as
-- the Cloud SQL instance.
lpFollowGaeApplication :: Lens' LocationPreference (Maybe Text)
lpFollowGaeApplication
= lens _lpFollowGaeApplication
(\ s a -> s{_lpFollowGaeApplication = a})
-- | The preferred Compute Engine zone (e.g. us-centra1-a, us-central1-b,
-- etc.).
lpZone :: Lens' LocationPreference (Maybe Text)
lpZone = lens _lpZone (\ s a -> s{_lpZone = a})
instance FromJSON LocationPreference where
parseJSON
= withObject "LocationPreference"
(\ o ->
LocationPreference' <$>
(o .:? "kind" .!= "sql#locationPreference") <*>
(o .:? "followGaeApplication")
<*> (o .:? "zone"))
instance ToJSON LocationPreference where
toJSON LocationPreference'{..}
= object
(catMaybes
[Just ("kind" .= _lpKind),
("followGaeApplication" .=) <$>
_lpFollowGaeApplication,
("zone" .=) <$> _lpZone])
-- | Flags list response.
--
-- /See:/ 'flagsListResponse' smart constructor.
data FlagsListResponse = FlagsListResponse'
{ _flrKind :: !Text
, _flrItems :: !(Maybe [Flag])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FlagsListResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'flrKind'
--
-- * 'flrItems'
flagsListResponse
:: FlagsListResponse
flagsListResponse =
FlagsListResponse'
{ _flrKind = "sql#flagsList"
, _flrItems = Nothing
}
-- | This is always sql#flagsList.
flrKind :: Lens' FlagsListResponse Text
flrKind = lens _flrKind (\ s a -> s{_flrKind = a})
-- | List of flags.
flrItems :: Lens' FlagsListResponse [Flag]
flrItems
= lens _flrItems (\ s a -> s{_flrItems = a}) .
_Default
. _Coerce
instance FromJSON FlagsListResponse where
parseJSON
= withObject "FlagsListResponse"
(\ o ->
FlagsListResponse' <$>
(o .:? "kind" .!= "sql#flagsList") <*>
(o .:? "items" .!= mempty))
instance ToJSON FlagsListResponse where
toJSON FlagsListResponse'{..}
= object
(catMaybes
[Just ("kind" .= _flrKind),
("items" .=) <$> _flrItems])
-- | Instance truncate log request.
--
-- /See:/ 'instancesTruncateLogRequest' smart constructor.
newtype InstancesTruncateLogRequest = InstancesTruncateLogRequest'
{ _itlrTruncateLogContext :: Maybe TruncateLogContext
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstancesTruncateLogRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'itlrTruncateLogContext'
instancesTruncateLogRequest
:: InstancesTruncateLogRequest
instancesTruncateLogRequest =
InstancesTruncateLogRequest'
{ _itlrTruncateLogContext = Nothing
}
-- | Contains details about the truncate log operation.
itlrTruncateLogContext :: Lens' InstancesTruncateLogRequest (Maybe TruncateLogContext)
itlrTruncateLogContext
= lens _itlrTruncateLogContext
(\ s a -> s{_itlrTruncateLogContext = a})
instance FromJSON InstancesTruncateLogRequest where
parseJSON
= withObject "InstancesTruncateLogRequest"
(\ o ->
InstancesTruncateLogRequest' <$>
(o .:? "truncateLogContext"))
instance ToJSON InstancesTruncateLogRequest where
toJSON InstancesTruncateLogRequest'{..}
= object
(catMaybes
[("truncateLogContext" .=) <$>
_itlrTruncateLogContext])
-- | Options for exporting data as SQL statements.
--
-- /See:/ 'exportContextSQLExportOptions' smart constructor.
data ExportContextSQLExportOptions = ExportContextSQLExportOptions'
{ _ecsqleoSchemaOnly :: !(Maybe Bool)
, _ecsqleoTables :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExportContextSQLExportOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ecsqleoSchemaOnly'
--
-- * 'ecsqleoTables'
exportContextSQLExportOptions
:: ExportContextSQLExportOptions
exportContextSQLExportOptions =
ExportContextSQLExportOptions'
{ _ecsqleoSchemaOnly = Nothing
, _ecsqleoTables = Nothing
}
-- | Export only schemas.
ecsqleoSchemaOnly :: Lens' ExportContextSQLExportOptions (Maybe Bool)
ecsqleoSchemaOnly
= lens _ecsqleoSchemaOnly
(\ s a -> s{_ecsqleoSchemaOnly = a})
-- | Tables to export, or that were exported, from the specified database. If
-- you specify tables, specify one and only one database.
ecsqleoTables :: Lens' ExportContextSQLExportOptions [Text]
ecsqleoTables
= lens _ecsqleoTables
(\ s a -> s{_ecsqleoTables = a})
. _Default
. _Coerce
instance FromJSON ExportContextSQLExportOptions where
parseJSON
= withObject "ExportContextSQLExportOptions"
(\ o ->
ExportContextSQLExportOptions' <$>
(o .:? "schemaOnly") <*> (o .:? "tables" .!= mempty))
instance ToJSON ExportContextSQLExportOptions where
toJSON ExportContextSQLExportOptions'{..}
= object
(catMaybes
[("schemaOnly" .=) <$> _ecsqleoSchemaOnly,
("tables" .=) <$> _ecsqleoTables])
-- | Database instance restore from backup context.
--
-- /See:/ 'restoreBackupContext' smart constructor.
data RestoreBackupContext = RestoreBackupContext'
{ _rbcInstanceId :: !(Maybe Text)
, _rbcBackupRunId :: !(Maybe (Textual Int64))
, _rbcKind :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RestoreBackupContext' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rbcInstanceId'
--
-- * 'rbcBackupRunId'
--
-- * 'rbcKind'
restoreBackupContext
:: RestoreBackupContext
restoreBackupContext =
RestoreBackupContext'
{ _rbcInstanceId = Nothing
, _rbcBackupRunId = Nothing
, _rbcKind = "sql#restoreBackupContext"
}
-- | The ID of the instance that the backup was taken from.
rbcInstanceId :: Lens' RestoreBackupContext (Maybe Text)
rbcInstanceId
= lens _rbcInstanceId
(\ s a -> s{_rbcInstanceId = a})
-- | The ID of the backup run to restore from.
rbcBackupRunId :: Lens' RestoreBackupContext (Maybe Int64)
rbcBackupRunId
= lens _rbcBackupRunId
(\ s a -> s{_rbcBackupRunId = a})
. mapping _Coerce
-- | This is always sql#restoreBackupContext.
rbcKind :: Lens' RestoreBackupContext Text
rbcKind = lens _rbcKind (\ s a -> s{_rbcKind = a})
instance FromJSON RestoreBackupContext where
parseJSON
= withObject "RestoreBackupContext"
(\ o ->
RestoreBackupContext' <$>
(o .:? "instanceId") <*> (o .:? "backupRunId") <*>
(o .:? "kind" .!= "sql#restoreBackupContext"))
instance ToJSON RestoreBackupContext where
toJSON RestoreBackupContext'{..}
= object
(catMaybes
[("instanceId" .=) <$> _rbcInstanceId,
("backupRunId" .=) <$> _rbcBackupRunId,
Just ("kind" .= _rbcKind)])
| rueshyna/gogol | gogol-sqladmin/gen/Network/Google/SQLAdmin/Types/Product.hs | mpl-2.0 | 130,397 | 0 | 34 | 33,569 | 24,672 | 14,191 | 10,481 | 2,741 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.Projects.Instances.StartExternalSync
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Start External primary instance migration.
--
-- /See:/ <https://developers.google.com/cloud-sql/ Cloud SQL Admin API Reference> for @sql.projects.instances.startExternalSync@.
module Network.Google.Resource.SQL.Projects.Instances.StartExternalSync
(
-- * REST Resource
ProjectsInstancesStartExternalSyncResource
-- * Creating a Request
, projectsInstancesStartExternalSync
, ProjectsInstancesStartExternalSync
-- * Request Lenses
, pisesXgafv
, pisesUploadProtocol
, pisesProject
, pisesAccessToken
, pisesUploadType
, pisesSyncMode
, pisesSkipVerification
, pisesCallback
, pisesInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.projects.instances.startExternalSync@ method which the
-- 'ProjectsInstancesStartExternalSync' request conforms to.
type ProjectsInstancesStartExternalSyncResource =
"v1" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
"startExternalSync" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "syncMode"
ProjectsInstancesStartExternalSyncSyncMode
:>
QueryParam "skipVerification" Bool :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Post '[JSON] Operation
-- | Start External primary instance migration.
--
-- /See:/ 'projectsInstancesStartExternalSync' smart constructor.
data ProjectsInstancesStartExternalSync =
ProjectsInstancesStartExternalSync'
{ _pisesXgafv :: !(Maybe Xgafv)
, _pisesUploadProtocol :: !(Maybe Text)
, _pisesProject :: !Text
, _pisesAccessToken :: !(Maybe Text)
, _pisesUploadType :: !(Maybe Text)
, _pisesSyncMode :: !(Maybe ProjectsInstancesStartExternalSyncSyncMode)
, _pisesSkipVerification :: !(Maybe Bool)
, _pisesCallback :: !(Maybe Text)
, _pisesInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesStartExternalSync' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pisesXgafv'
--
-- * 'pisesUploadProtocol'
--
-- * 'pisesProject'
--
-- * 'pisesAccessToken'
--
-- * 'pisesUploadType'
--
-- * 'pisesSyncMode'
--
-- * 'pisesSkipVerification'
--
-- * 'pisesCallback'
--
-- * 'pisesInstance'
projectsInstancesStartExternalSync
:: Text -- ^ 'pisesProject'
-> Text -- ^ 'pisesInstance'
-> ProjectsInstancesStartExternalSync
projectsInstancesStartExternalSync pPisesProject_ pPisesInstance_ =
ProjectsInstancesStartExternalSync'
{ _pisesXgafv = Nothing
, _pisesUploadProtocol = Nothing
, _pisesProject = pPisesProject_
, _pisesAccessToken = Nothing
, _pisesUploadType = Nothing
, _pisesSyncMode = Nothing
, _pisesSkipVerification = Nothing
, _pisesCallback = Nothing
, _pisesInstance = pPisesInstance_
}
-- | V1 error format.
pisesXgafv :: Lens' ProjectsInstancesStartExternalSync (Maybe Xgafv)
pisesXgafv
= lens _pisesXgafv (\ s a -> s{_pisesXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pisesUploadProtocol :: Lens' ProjectsInstancesStartExternalSync (Maybe Text)
pisesUploadProtocol
= lens _pisesUploadProtocol
(\ s a -> s{_pisesUploadProtocol = a})
-- | ID of the project that contains the instance.
pisesProject :: Lens' ProjectsInstancesStartExternalSync Text
pisesProject
= lens _pisesProject (\ s a -> s{_pisesProject = a})
-- | OAuth access token.
pisesAccessToken :: Lens' ProjectsInstancesStartExternalSync (Maybe Text)
pisesAccessToken
= lens _pisesAccessToken
(\ s a -> s{_pisesAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pisesUploadType :: Lens' ProjectsInstancesStartExternalSync (Maybe Text)
pisesUploadType
= lens _pisesUploadType
(\ s a -> s{_pisesUploadType = a})
-- | External sync mode.
pisesSyncMode :: Lens' ProjectsInstancesStartExternalSync (Maybe ProjectsInstancesStartExternalSyncSyncMode)
pisesSyncMode
= lens _pisesSyncMode
(\ s a -> s{_pisesSyncMode = a})
-- | Whether to skip the verification step (VESS).
pisesSkipVerification :: Lens' ProjectsInstancesStartExternalSync (Maybe Bool)
pisesSkipVerification
= lens _pisesSkipVerification
(\ s a -> s{_pisesSkipVerification = a})
-- | JSONP
pisesCallback :: Lens' ProjectsInstancesStartExternalSync (Maybe Text)
pisesCallback
= lens _pisesCallback
(\ s a -> s{_pisesCallback = a})
-- | Cloud SQL instance ID. This does not include the project ID.
pisesInstance :: Lens' ProjectsInstancesStartExternalSync Text
pisesInstance
= lens _pisesInstance
(\ s a -> s{_pisesInstance = a})
instance GoogleRequest
ProjectsInstancesStartExternalSync
where
type Rs ProjectsInstancesStartExternalSync =
Operation
type Scopes ProjectsInstancesStartExternalSync =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient ProjectsInstancesStartExternalSync'{..}
= go _pisesProject _pisesInstance _pisesXgafv
_pisesUploadProtocol
_pisesAccessToken
_pisesUploadType
_pisesSyncMode
_pisesSkipVerification
_pisesCallback
(Just AltJSON)
sQLAdminService
where go
= buildClient
(Proxy ::
Proxy ProjectsInstancesStartExternalSyncResource)
mempty
| brendanhay/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/Projects/Instances/StartExternalSync.hs | mpl-2.0 | 6,814 | 0 | 21 | 1,609 | 945 | 547 | 398 | 147 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSense.Accounts.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all accounts available to this user.
--
-- /See:/ <http://code.google.com/apis/adsense/management/ AdSense Management API Reference> for @adsense.accounts.list@.
module Network.Google.Resource.AdSense.Accounts.List
(
-- * REST Resource
AccountsListResource
-- * Creating a Request
, accountsList
, AccountsList
-- * Request Lenses
, alXgafv
, alUploadProtocol
, alAccessToken
, alUploadType
, alPageToken
, alPageSize
, alCallback
) where
import Network.Google.AdSense.Types
import Network.Google.Prelude
-- | A resource alias for @adsense.accounts.list@ method which the
-- 'AccountsList' request conforms to.
type AccountsListResource =
"v2" :>
"accounts" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListAccountsResponse
-- | Lists all accounts available to this user.
--
-- /See:/ 'accountsList' smart constructor.
data AccountsList =
AccountsList'
{ _alXgafv :: !(Maybe Xgafv)
, _alUploadProtocol :: !(Maybe Text)
, _alAccessToken :: !(Maybe Text)
, _alUploadType :: !(Maybe Text)
, _alPageToken :: !(Maybe Text)
, _alPageSize :: !(Maybe (Textual Int32))
, _alCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alXgafv'
--
-- * 'alUploadProtocol'
--
-- * 'alAccessToken'
--
-- * 'alUploadType'
--
-- * 'alPageToken'
--
-- * 'alPageSize'
--
-- * 'alCallback'
accountsList
:: AccountsList
accountsList =
AccountsList'
{ _alXgafv = Nothing
, _alUploadProtocol = Nothing
, _alAccessToken = Nothing
, _alUploadType = Nothing
, _alPageToken = Nothing
, _alPageSize = Nothing
, _alCallback = Nothing
}
-- | V1 error format.
alXgafv :: Lens' AccountsList (Maybe Xgafv)
alXgafv = lens _alXgafv (\ s a -> s{_alXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
alUploadProtocol :: Lens' AccountsList (Maybe Text)
alUploadProtocol
= lens _alUploadProtocol
(\ s a -> s{_alUploadProtocol = a})
-- | OAuth access token.
alAccessToken :: Lens' AccountsList (Maybe Text)
alAccessToken
= lens _alAccessToken
(\ s a -> s{_alAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
alUploadType :: Lens' AccountsList (Maybe Text)
alUploadType
= lens _alUploadType (\ s a -> s{_alUploadType = a})
-- | A page token, received from a previous \`ListAccounts\` call. Provide
-- this to retrieve the subsequent page. When paginating, all other
-- parameters provided to \`ListAccounts\` must match the call that
-- provided the page token.
alPageToken :: Lens' AccountsList (Maybe Text)
alPageToken
= lens _alPageToken (\ s a -> s{_alPageToken = a})
-- | The maximum number of accounts to include in the response, used for
-- paging. If unspecified, at most 10000 accounts will be returned. The
-- maximum value is 10000; values above 10000 will be coerced to 10000.
alPageSize :: Lens' AccountsList (Maybe Int32)
alPageSize
= lens _alPageSize (\ s a -> s{_alPageSize = a}) .
mapping _Coerce
-- | JSONP
alCallback :: Lens' AccountsList (Maybe Text)
alCallback
= lens _alCallback (\ s a -> s{_alCallback = a})
instance GoogleRequest AccountsList where
type Rs AccountsList = ListAccountsResponse
type Scopes AccountsList =
'["https://www.googleapis.com/auth/adsense",
"https://www.googleapis.com/auth/adsense.readonly"]
requestClient AccountsList'{..}
= go _alXgafv _alUploadProtocol _alAccessToken
_alUploadType
_alPageToken
_alPageSize
_alCallback
(Just AltJSON)
adSenseService
where go
= buildClient (Proxy :: Proxy AccountsListResource)
mempty
| brendanhay/gogol | gogol-adsense/gen/Network/Google/Resource/AdSense/Accounts/List.hs | mpl-2.0 | 5,130 | 0 | 17 | 1,225 | 809 | 470 | 339 | 112 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE PatternSynonyms #-}
module Dyno.DirectCollocation.Integrate
( withIntegrator
) where
import GHC.Generics ( Generic )
import qualified Control.Concurrent as CC
import Data.Proxy ( Proxy(..) )
import Data.Singletons.TypeLits (KnownNat, natVal, withKnownNat, pattern SNat)
import Data.Singletons.Prelude.Num (PNum((+)), (%+))
import Data.Vector ( Vector )
import qualified Data.Foldable as F
import Casadi.Matrix ( CMatrix )
import Casadi.MX ( MX )
import Casadi.SX ( SX )
import Casadi.Viewable ( Viewable )
import Dyno.View.View ( View(..), J, S, JV, JTuple(..), splitJV, catJV, jfill, fmapJ )
import Dyno.View.Fun ( Fun, callSym, expandFun, toFun )
import Dyno.View.JVec ( JVec(..), jreplicate )
import Dyno.View.HList ( (:*:)(..) )
import Dyno.View.M ( vcat, vsplit )
import qualified Dyno.View.M as M
import Dyno.Vectorize ( Vectorize(..), Id(..), None, unId, vzipWith )
import Dyno.TypeVecs ( Vec )
import qualified Dyno.TypeVecs as TV
import Dyno.LagrangePolynomials ( lagrangeDerivCoeffs )
import Dyno.Solvers ( Solver )
import Dyno.Nlp ( NlpIn(..), NlpOut(..) )
import Dyno.NlpSolver ( callNlpsol, toNlpSol )
import Dyno.DirectCollocation.Types ( CollStage(..), CollPoint(..) )
import Dyno.DirectCollocation.Quadratures ( QuadratureRoots, mkTaus, interpolate, timesFromTaus )
type Sxe = S SX
data IntegratorX x z n deg a =
IntegratorX
{ ixStages :: J (JVec n (CollStage (JV x) (JV z) (JV None) (JV None) deg)) a
, ixXf :: J (JV x) a
} deriving (Generic)
data IntegratorP u p n deg a =
IntegratorP
{ ipTf :: S a
, ipParm :: J (JV p) a
, ipU :: J (JVec n (JVec deg (JV u))) a
} deriving (Generic)
data IntegratorG x r n deg a =
IntegratorG
{ igCollPoints :: J (JVec n (JVec deg (JV r))) a
, igContinuity :: J (JVec n (JV x)) a
} deriving (Generic)
instance (Vectorize x, Vectorize z, KnownNat n, KnownNat deg)
=> View (IntegratorX x z n deg)
instance (Vectorize u, Vectorize p, KnownNat n, KnownNat deg)
=> View (IntegratorP u p n deg)
instance (Vectorize x, Vectorize r, KnownNat n, KnownNat deg)
=> View (IntegratorG x r n deg)
-- todo: code duplication
dot :: forall x deg a b. (View x, CMatrix a, Real b, KnownNat deg)
=> Vec deg b -> Vec deg (J x a) -> J x a
dot cks xs = F.sum $ TV.unVec elemwise
where
elemwise :: Vec deg (J x a)
elemwise = TV.tvzipWith smul cks xs
smul :: b -> J x a -> J x a
smul x y = realToFrac x * y
-- todo: code duplication
interpolateXDots' :: (Real b, View x, CMatrix a, KnownNat deg)
=> Vec deg (Vec deg b) -> Vec deg (J x a) -> Vec deg (J x a)
interpolateXDots' cjks xs = fmap (`dot` xs) cjks
interpolateXDots ::
forall b deg x a
. (Real b, KnownNat deg, View x, CMatrix a)
=> Vec (deg + 1) (Vec (deg + 1) b)
-> Vec (deg + 1) (J x a)
-> Vec deg (J x a)
interpolateXDots cjks xs =
withKnownNat (SNat @deg %+ SNat @1) $
TV.tvtail $ interpolateXDots' cjks xs
-- return dynamics constraints, outputs, and interpolated state
dynStageConstraints' ::
forall x z u p r deg . (KnownNat deg, View x, View z, View u, View p, View r)
=> Vec (deg + 1) (Vec (deg + 1) Double) -> Vec deg Double
-> Fun (S :*: J p :*: J x :*: J (CollPoint x z u)) (J r)
-> (J x :*: J (JVec deg (JTuple x z)) :*: J (JVec deg u) :*: S :*: J p :*: J (JVec deg (JV Id))) MX
-> (J (JVec deg r) :*: J x) MX
dynStageConstraints' cijs taus dynFun (x0 :*: xzs' :*: us' :*: h :*: p :*: stageTimes') =
cat (JVec dynConstrs) :*: xnext
where
xzs = fmap split (unJVec (split xzs')) :: Vec deg (JTuple x z MX)
us = unJVec (split us') :: Vec deg (J u MX)
-- interpolated final state
xnext :: J x MX
xnext = interpolate taus x0 xs
stageTimes = unJVec $ split stageTimes'
-- dae constraints (dynamics)
dynConstrs :: Vec deg (J r MX)
dynConstrs = TV.tvzipWith4 applyDae xdots xzs us stageTimes
applyDae :: J x MX -> JTuple x z MX -> J u MX -> S MX -> J r MX
applyDae x' (JTuple x z) u t = r
where
r = callSym dynFun (t :*: p :*: x' :*: collPoint)
collPoint = cat (CollPoint x z u)
-- state derivatives, maybe these could be useful as outputs
xdots :: Vec deg (J x MX)
xdots = fmap (`M.ms` (1/h)) $ interpolateXDots cijs (x0 TV.<| xs)
xs :: Vec deg (J x MX)
xs = fmap (\(JTuple x _) -> x) xzs
-- dynamics residual and outputs
dynamicsFunction' ::
forall x z u p r a . (View x, View z, View u, Viewable a)
=> (J x a -> J x a -> J z a -> J u a -> J p a -> S a -> J r a)
-> (S :*: J p :*: J x :*: J (CollPoint x z u)) a
-> J r a
dynamicsFunction' dae (t :*: parm :*: x' :*: collPoint) = dae x' x z u parm t
where
CollPoint x z u = split collPoint
withIntegrator ::
forall x z u p r deg n .
(KnownNat n, KnownNat deg, Vectorize x, Vectorize p, Vectorize u, Vectorize z, Vectorize r)
=> Proxy n
-> Proxy deg
-> QuadratureRoots
-> x Double
-> (x Sxe -> x Sxe -> z Sxe -> u Sxe -> p Sxe -> Sxe -> r Sxe)
-> Solver
-> IO (x Double -> Either (u Double) (Vec n (Vec deg (u Double))) -> p Double -> Double -> IO (Either String (x Double)))
withIntegrator _ _ roots initialX dae solver = do
let -- the collocation points
taus :: Vec deg Double
taus = mkTaus roots
n = fromIntegral (natVal (Proxy :: Proxy n))
-- coefficients for getting xdot by lagrange interpolating polynomials
cijs :: Vec (deg + 1) (Vec (deg + 1) Double)
cijs =
withKnownNat (SNat @deg %+ SNat @1) $
lagrangeDerivCoeffs (0 TV.<| taus)
dynFun <- flip (toFun "dynamics") mempty $ dynamicsFunction' $
\x0 x1 x2 x3 x4 x5 ->
let r = dae (vsplit x0) (vsplit x1) (vsplit x2) (vsplit x3)
(vsplit x4) (unId (vsplit x5))
in vcat r
dynStageConFun <- toFun "dynamicsStageCon" (dynStageConstraints' cijs taus dynFun) mempty
-- let callDynStageConFun = callSym dynStageConFun
callDynStageConFun <- callSym <$> expandFun dynStageConFun
let fg :: J (IntegratorX x z n deg) MX
-> J (IntegratorP u p n deg) MX
-> (S MX, J (IntegratorG x r n deg) MX)
fg = getFgIntegrator taus callDynStageConFun
scaleX = Nothing
scaleG = Nothing
-- , nlpScaleX' = Just $ cat $ fillCollTraj
-- (fromMaybe (fill 1) (ocpXScale ocp))
-- (fromMaybe (fill 1) (ocpZScale ocp))
-- (fromMaybe (fill 1) (ocpUScale ocp))
-- (fromMaybe (fill 1) (ocpPScale ocp))
-- (fromMaybe 1 (ocpTScale ocp))
--
-- , nlpScaleG' = Just $ cat $ fillCollConstraints
-- (fromMaybe (fill 1) (ocpXScale ocp))
-- (fromMaybe (fill 1) (ocpResidualScale ocp))
let toParams :: Either (u Double) (Vec n (Vec deg (u Double)))
-> p Double
-> Double
-> J (IntegratorP u p n deg) (Vector Double)
toParams us p tf =
cat $
IntegratorP
{ ipTf = catJV (Id tf)
, ipParm = catJV p
, ipU = case us of
Left u -> jreplicate (jreplicate (catJV u))
Right us' -> cat $ JVec $ fmap (cat . JVec . fmap catJV) us'
}
let toBounds :: x Double -> J (IntegratorX x z n deg) (Vector (Maybe Double))
toBounds x0 =
cat $
IntegratorX
{ ixStages = cat $ JVec $ TV.mkVec' $ take n $ xs0 : repeat (jfill Nothing)
, ixXf = jfill Nothing
}
where
xs0 :: J (CollStage (JV x) (JV z) (JV None) (JV None) deg) (Vector (Maybe Double))
xs0 = cat $ CollStage (catJV (fmap Just x0)) (jfill Nothing) (jfill Nothing) (jfill Nothing)
nlpSol <- toNlpSol "collocation_integrator" solver fg scaleX scaleG Nothing Nothing
let initialGuess = cat $
IntegratorX
{ ixStages = jreplicate $ cat $
CollStage initialX' (jreplicate point) (jfill 0) (jfill 0)
, ixXf = initialX'
}
where
initialX' :: J (JV x) (Vector Double)
initialX' = catJV initialX
point = cat $ CollPoint initialX' (jfill 0) (jfill 0)
initialGuessMVar <- CC.newMVar initialGuess
let doAnIntegration :: x Double -> Either (u Double) (Vec n (Vec deg (u Double))) -> p Double -> Double -> IO (Either String (x Double))
doAnIntegration x0 us p tf = do
xguess <- CC.readMVar initialGuessMVar
let bx = toBounds x0
inputs =
NlpIn
{ nlpX0 = xguess
, nlpBG = jfill (Just 0, Just 0)
, nlpP = toParams us p tf
, nlpBX = fmapJ (\x -> (x, x)) bx
, nlpLamX0 = Nothing
, nlpLamG0 = Nothing
}
(_, ret) <- callNlpsol nlpSol inputs
case ret of
Left msg -> return $ Left $ "failed solving with error: \"" ++ msg ++ "\""
Right nlpOut -> do
let xtopt = xOpt nlpOut
_ <- CC.swapMVar initialGuessMVar xtopt
return $ Right $ splitJV (ixXf (split xtopt))
return doAnIntegration
getFgIntegrator ::
forall x z u p r n deg .
(KnownNat deg, KnownNat n, Vectorize x, Vectorize z, Vectorize u, Vectorize p, Vectorize r)
=> Vec deg Double
-> ((J (JV x) :*: J (JVec deg (JTuple (JV x) (JV z))) :*: J (JVec deg (JV u)) :*: S :*: J (JV p) :*: J (JVec deg (JV Id))) MX -> (J (JVec deg (JV r)) :*: J (JV x)) MX)
-> J (IntegratorX x z n deg) MX
-> J (IntegratorP u p n deg) MX
-> (S MX, J (IntegratorG x r n deg) MX)
getFgIntegrator taus stageFun ix' ip' = (0, cat g)
where
ix = split ix'
ip = split ip'
xf = ixXf ix
tf = ipTf ip
parm = ipParm ip
stages = unJVec (split (ixStages ix)) :: Vec n (J (CollStage (JV x) (JV z) (JV None) (JV None) deg) MX)
spstages :: Vec n (CollStage (JV x) (JV z) (JV None) (JV None) deg MX)
spstages = fmap split stages
us :: Vec n (J (JVec deg (JV u)) MX)
us = unJVec $ split $ ipU ip
-- timestep
dt = tf / fromIntegral n
n = natVal (Proxy :: Proxy n)
-- times at each collocation point
times :: Vec n (Vec deg (S MX))
times = fmap snd $ timesFromTaus 0 (fmap realToFrac taus) dt
times' :: Vec n (J (JVec deg (JV Id)) MX)
times' = fmap (cat . JVec) times
-- initial point at each stage
x0s :: Vec n (J (JV x) MX)
x0s = fmap (\(CollStage x0' _ _ _) -> x0') spstages
-- final point at each stage (for matching constraint)
xfs :: Vec n (J (JV x) MX)
xfs = TV.tvshiftl x0s xf
g = IntegratorG
{ igCollPoints = cat $ JVec dcs
, igContinuity = cat $ JVec integratorMatchingConstraints
}
integratorMatchingConstraints :: Vec n (J (JV x) MX) -- todo: THIS SHOULD BE A NONLINEAR FUNCTION
integratorMatchingConstraints = vzipWith (-) interpolatedXs xfs
dcs :: Vec n (J (JVec deg (JV r)) MX)
interpolatedXs :: Vec n (J (JV x) MX)
(dcs, interpolatedXs) = TV.tvunzip $ TV.tvzipWith3 fff spstages us times'
fff :: CollStage (JV x) (JV z) (JV None) (JV None) deg MX
-> J (JVec deg (JV u)) MX
-> J (JVec deg (JV Id)) MX
-> (J (JVec deg (JV r)) MX, J (JV x) MX)
fff (CollStage x0' xzs' _ _) us' stageTimes = (dc, interpolatedX')
where
dc :*: interpolatedX' = stageFun (x0' :*: xzs :*: us' :*: dt :*: parm :*: stageTimes)
xzs :: J (JVec deg (JTuple (JV x) (JV z))) MX
xzs = cat $ JVec $ fmap toTuple $ unJVec $ split xzs'
toTuple xzu = cat (JTuple x z)
where
CollPoint x z _ = split xzu
| ghorn/dynobud | dynobud/src/Dyno/DirectCollocation/Integrate.hs | lgpl-3.0 | 11,814 | 0 | 23 | 3,434 | 4,831 | 2,504 | 2,327 | 237 | 3 |
-- CIS 194 Homework 2
module HW02_Log where
import Control.Applicative
data MessageType = Info
| Warning
| Error Int
deriving (Show, Eq)
type TimeStamp = Int
data LogMessage = LogMessage MessageType TimeStamp String
| Unknown String
deriving (Show, Eq)
data MessageTree = Leaf
| Node MessageTree LogMessage MessageTree
deriving (Show, Eq)
-- | @testParse p n f@ tests the log file parser @p@ by running it
-- on the first @n@ lines of file @f@.
testParse :: (String -> [LogMessage])
-> Int
-> FilePath
-> IO [LogMessage]
testParse parse n file = take n . parse <$> readFile file
-- | @testWhatWentWrong p w f@ tests the log file parser @p@ and
-- warning message extractor @w@ by running them on the log file
-- @f@.
testWhatWentWrong :: (String -> [LogMessage])
-> ([LogMessage] -> [String])
-> FilePath
-> IO [String]
testWhatWentWrong parse whatWentWrong file
= whatWentWrong . parse <$> readFile file
| haroldcarr/learn-haskell-coq-ml-etc | haskell/course/2014-06-upenn/cis194/src/HW02_Log.hs | unlicense | 1,088 | 0 | 9 | 334 | 231 | 129 | 102 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Custom.Codegen where
import Data.Word
import Data.String
import Data.List
import Data.Function
import qualified Data.Map as Map
import Control.Monad.State
import Control.Applicative
import LLVM.General.AST
import LLVM.General.AST.Global
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.Attribute as A
import qualified LLVM.General.AST.CallingConvention as CC
import qualified LLVM.General.AST.IntegerPredicate as IP
-------------------------------------------------------------------------------
-- Module Level
-------------------------------------------------------------------------------
newtype LLVM a = LLVM { unLLVM :: State AST.Module a }
deriving (Functor, Applicative, Monad, MonadState AST.Module )
runLLVM :: AST.Module -> LLVM a -> AST.Module
runLLVM = flip (execState . unLLVM)
emptyModule :: String -> AST.Module
emptyModule label = defaultModule { moduleName = label }
addDefn :: Definition -> LLVM ()
addDefn d = do
defs <- gets moduleDefinitions
modify $ \s -> s { moduleDefinitions = defs ++ [d] }
define :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM ()
define retty label argtys body = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = body
}
external :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM ()
external retty label argtys body = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = body
}
---------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
int64 :: Type
int64 = IntegerType 64
-------------------------------------------------------------------------------
-- Names
-------------------------------------------------------------------------------
type Names = Map.Map String Int
uniqueName :: String -> Names -> (String, Names)
uniqueName nm ns =
case Map.lookup nm ns of
Nothing -> (nm, Map.insert nm 1 ns)
Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns)
instance IsString Name where
fromString = Name . fromString
-------------------------------------------------------------------------------
-- Codegen State
-------------------------------------------------------------------------------
type SymbolTable = [(String, Operand)]
data CodegenState
= CodegenState {
currentBlock :: Name -- Name of the active block to append to
, blocks :: Map.Map Name BlockState -- Blocks for function
, symtab :: SymbolTable -- Function scope symbol table
, blockCount :: Int -- Count of basic blocks
, count :: Word -- Count of unnamed instructions
, names :: Names -- Name Supply
} deriving Show
data BlockState
= BlockState {
idx :: Int -- Block index
, stack :: [Named Instruction] -- Stack of instructions
, term :: Maybe (Named Terminator) -- Block terminator
} deriving Show
-------------------------------------------------------------------------------
-- Codegen Operations
-------------------------------------------------------------------------------
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState )
sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)]
sortBlocks = sortBy (compare `on` (idx . snd))
createBlocks :: CodegenState -> [BasicBlock]
createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m)
makeBlock :: (Name, BlockState) -> BasicBlock
makeBlock (l, (BlockState _ s t)) = BasicBlock l s (maketerm t)
where
maketerm (Just x) = x
maketerm Nothing = error $ "Block has no terminator: " ++ (show l)
entryBlockName :: String
entryBlockName = "entry"
emptyBlock :: Int -> BlockState
emptyBlock i = BlockState i [] Nothing
emptyCodegen :: CodegenState
emptyCodegen = CodegenState (Name entryBlockName) Map.empty [] 1 0 Map.empty
execCodegen :: Codegen a -> CodegenState
execCodegen m = execState (runCodegen m) emptyCodegen
fresh :: Codegen Word
fresh = do
i <- gets count
modify $ \s -> s { count = 1 + i }
return $ i + 1
instr :: Instruction -> Codegen (Operand)
instr ins = do
n <- fresh
let ref = (UnName n)
blk <- current
let i = stack blk
modifyBlock (blk { stack = i ++ [ref := ins] } )
return $ local ref
terminator :: Named Terminator -> Codegen (Named Terminator)
terminator trm = do
blk <- current
modifyBlock (blk { term = Just trm })
return trm
-------------------------------------------------------------------------------
-- Block Stack
-------------------------------------------------------------------------------
entry :: Codegen Name
entry = gets currentBlock
addBlock :: String -> Codegen Name
addBlock bname = do
bls <- gets blocks
ix <- gets blockCount
nms <- gets names
let new = emptyBlock ix
(qname, supply) = uniqueName bname nms
modify $ \s -> s { blocks = Map.insert (Name qname) new bls
, blockCount = ix + 1
, names = supply
}
return (Name qname)
setBlock :: Name -> Codegen Name
setBlock bname = do
modify $ \s -> s { currentBlock = bname }
return bname
getBlock :: Codegen Name
getBlock = gets currentBlock
modifyBlock :: BlockState -> Codegen ()
modifyBlock new = do
active <- gets currentBlock
modify $ \s -> s { blocks = Map.insert active new (blocks s) }
current :: Codegen BlockState
current = do
c <- gets currentBlock
blks <- gets blocks
case Map.lookup c blks of
Just x -> return x
Nothing -> error $ "No such block: " ++ show c
-------------------------------------------------------------------------------
-- Symbol Table
-------------------------------------------------------------------------------
assign :: String -> Operand -> Codegen ()
assign var x = do
lcls <- gets symtab
modify $ \s -> s { symtab = [(var, x)] ++ lcls }
getvar :: String -> Codegen Operand
getvar var = do
syms <- gets symtab
case lookup var syms of
Just x -> return x
Nothing -> error $ "Local variable not in scope: " ++ show var
-------------------------------------------------------------------------------
-- References
local :: Name -> Operand
local = LocalReference
global :: Name -> C.Constant
global = C.GlobalReference
externf :: Name -> Operand
externf = ConstantOperand . C.GlobalReference
-- Arithmetic and Constants
iadd :: Operand -> Operand -> Codegen Operand
iadd a b = instr $ Add False False a b []
isub :: Operand -> Operand -> Codegen Operand
isub a b = instr $ Sub False False a b []
imul :: Operand -> Operand -> Codegen Operand
imul a b = instr $ Mul True True a b []
idiv :: Operand -> Operand -> Codegen Operand
idiv a b = instr $ SDiv False a b []
icmp :: IP.IntegerPredicate -> Operand -> Operand -> Codegen Operand
icmp cond a b = instr $ ICmp cond a b []
{--
fadd :: Operand -> Operand -> Codegen Operand
fadd a b = instr $ FAdd a b []
fsub :: Operand -> Operand -> Codegen Operand
fsub a b = instr $ FSub a b []
fmul :: Operand -> Operand -> Codegen Operand
fmul a b = instr $ FMul a b []
fdiv :: Operand -> Operand -> Codegen Operand
fdiv a b = instr $ FDiv a b []
fcmp :: FP.FloatingPointPredicate -> Operand -> Operand -> Codegen Operand
fcmp cond a b = instr $ FCmp cond a b []
--}
cons :: C.Constant -> Operand
cons = ConstantOperand
{--
uitofp :: Type -> Operand -> Codegen Operand
uitofp ty a = instr $ UIToFP a ty []
--}
toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])]
toArgs = map (\x -> (x, []))
-- Effects
call :: Operand -> [Operand] -> Codegen Operand
call fn args = instr $ Call False CC.C [] (Right fn) (toArgs args) [] []
alloca :: Type -> Codegen Operand
alloca ty = instr $ Alloca ty Nothing 0 []
store :: Operand -> Operand -> Codegen Operand
store ptr val = instr $ Store False ptr val Nothing 0 []
load :: Operand -> Codegen Operand
load ptr = instr $ Load False ptr Nothing 0 []
-- Control Flow
br :: Name -> Codegen (Named Terminator)
br val = terminator $ Do $ Br val []
cbr :: Operand -> Name -> Name -> Codegen (Named Terminator)
cbr cond tr fl = terminator $ Do $ CondBr cond tr fl []
ret :: Operand -> Codegen (Named Terminator)
ret val = terminator $ Do $ Ret (Just val) []
| eigengo/hwsexp | core/main/Custom/Codegen.hs | apache-2.0 | 8,731 | 0 | 13 | 1,777 | 2,529 | 1,346 | 1,183 | 177 | 2 |
module Synthax.Lexer
( names
, opNames
, symbol
, identifier
, reserved
, reservedOp
, parens
, integer
, float
, semiSep1
) where
import Prelude
import Text.Parsec
import qualified Text.Parsec.Token as Token
import Text.Parsec.Language
import Text.Parsec.String
names :: [String]
names = words "Source Code Module Gain Crossfade Filter Let"
opNames :: [String]
opNames = words "<<< >>>"
lexer :: Token.TokenParser a
lexer = Token.makeTokenParser emptyDef
{ Token.commentStart = "/*"
, Token.commentEnd = "*/"
, Token.commentLine = "#"
, Token.identStart = letter
, Token.identLetter = letter <|> char '_'
, Token.reservedNames = names
, Token.reservedOpNames = opNames
}
identifier :: Parser String
identifier = Token.identifier lexer
symbol :: String -> Parser String
symbol = Token.symbol lexer
reserved :: String -> Parser ()
reserved = Token.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = Token.reservedOp lexer
parens :: Parser a -> Parser a
parens = Token.parens lexer
integer :: Parser Integer
integer = Token.integer lexer
float :: Parser Double
float = Token.float lexer
semiSep1 :: Parser a -> Parser [a]
semiSep1 = Token.semiSep1 lexer
| burz/sonada | Synthax/Lexer.hs | apache-2.0 | 1,211 | 0 | 9 | 222 | 361 | 200 | 161 | 45 | 1 |
v = [1, 5, 10, 50, 100, 500 ]
acm [] [] = 0
acm (a:as) (b:bs) = (a*b) + acm as bs
ans i v =
let a = acm i v
in
if a >= 1000
then 1
else 0
main = do
l <- getLine
let i = map read $ words l :: [Int]
o = ans i v
print o
| a143753/AOJ | 0296.hs | apache-2.0 | 255 | 0 | 11 | 103 | 169 | 87 | 82 | 13 | 2 |
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables, CPP #-}
{-| Utility functions. -}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Utils
( debug
, debugFn
, debugXy
, sepSplit
, findFirst
, stdDev
, if'
, select
, applyIf
, commaJoin
, ensureQuoted
, tryRead
, readMaybe
, formatTable
, printTable
, parseUnit
, parseUnitAssumeBinary
, plural
, niceSort
, niceSortKey
, exitIfBad
, exitErr
, exitWhen
, exitUnless
, logWarningIfBad
, rStripSpace
, newUUID
, isUUID
, getCurrentTime
, getCurrentTimeUSec
, clockTimeToString
, clockTimeToCTime
, clockTimeToUSec
, cTimeToClockTime
, diffClockTimes
, chompPrefix
, warn
, wrap
, trim
, defaultHead
, exitIfEmpty
, splitEithers
, recombineEithers
, resolveAddr
, monadicThe
, setOwnerAndGroupFromNames
, setOwnerWGroupR
, formatOrdinal
, tryAndLogIOError
, withDefaultOnIOError
, lockFile
, FStat
, nullFStat
, getFStat
, getFStatSafe
, needsReload
, watchFile
, watchFileBy
, safeRenameFile
, FilePermissions(..)
, ensurePermissions
, ordNub
, isSubsequenceOf
, frequency
) where
import Control.Concurrent
import Control.Exception (try, bracket)
import Control.Monad
import Control.Monad.Error
import Control.Monad.Fail (MonadFail)
import qualified Data.Attoparsec.ByteString as A
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char (toUpper, isAlphaNum, isDigit, isSpace)
import qualified Data.Either as E
import Data.Function (on)
import Data.IORef
import Data.List
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import Foreign.C.Types (CTime(..))
import Numeric (showOct)
import System.Directory (renameFile, createDirectoryIfMissing)
import System.FilePath.Posix (takeDirectory)
import System.INotify
import System.Posix.Types
import Debug.Trace
import Network.Socket
import Ganeti.BasicTypes
import Ganeti.Compat
import qualified Ganeti.ConstantUtils as ConstantUtils
import Ganeti.Logging
import Ganeti.Runtime
import System.IO
import System.Exit
import System.Posix.Files
import System.Posix.IO
import System.Time (ClockTime(..), getClockTime, TimeDiff(..))
import qualified System.Time as STime
-- * Debug functions
-- | To be used only for debugging, breaks referential integrity.
debug :: Show a => a -> a
debug x = trace (show x) x
-- | Displays a modified form of the second parameter before returning
-- it.
debugFn :: Show b => (a -> b) -> a -> a
debugFn fn x = debug (fn x) `seq` x
-- | Show the first parameter before returning the second one.
debugXy :: Show a => a -> b -> b
debugXy = seq . debug
-- * Miscellaneous
-- | Apply the function if condition holds, otherwise use default value.
applyIf :: Bool -> (a -> a) -> a -> a
applyIf b f x = if b then f x else x
-- | Comma-join a string list.
commaJoin :: [String] -> String
commaJoin = intercalate ","
-- | Split a list on a separator and return a list of lists.
sepSplit :: Eq a => a -> [a] -> [[a]]
sepSplit sep s
| null s = []
| null xs = [x]
| null ys = [x,[]]
| otherwise = x:sepSplit sep ys
where (x, xs) = break (== sep) s
ys = drop 1 xs
-- | Finds the first unused element in a set starting from a given base.
findFirst :: (Ord a, Enum a) => a -> S.Set a -> a
findFirst base xs =
case S.splitMember base xs of
(_, False, _) -> base
(_, True, ys) -> fromMaybe (succ base) $
(fmap fst . find (uncurry (<)) . zip [succ base..] . S.toAscList $ ys)
`mplus` fmap (succ . fst) (S.maxView ys)
-- | Simple pluralize helper
plural :: Int -> String -> String -> String
plural 1 s _ = s
plural _ _ p = p
-- | Ensure a value is quoted if needed.
ensureQuoted :: String -> String
ensureQuoted v = if not (all (\c -> isAlphaNum c || c == '.') v)
then '\'':v ++ "'"
else v
-- * Mathematical functions
-- Simple and slow statistical functions, please replace with better
-- versions
-- | Standard deviation function.
stdDev :: [Double] -> Double
stdDev lst =
-- first, calculate the list length and sum lst in a single step,
-- for performance reasons
let (ll', sx) = foldl' (\(rl, rs) e ->
let rl' = rl + 1
rs' = rs + e
in rl' `seq` rs' `seq` (rl', rs')) (0::Int, 0) lst
ll = fromIntegral ll'::Double
mv = sx / ll
av = foldl' (\accu em -> let d = em - mv in accu + d * d) 0.0 lst
in sqrt (av / ll) -- stddev
-- * Logical functions
-- Avoid syntactic sugar and enhance readability. These functions are proposed
-- by some for inclusion in the Prelude, and at the moment they are present
-- (with various definitions) in the utility-ht package. Some rationale and
-- discussion is available at <http://www.haskell.org/haskellwiki/If-then-else>
-- | \"if\" as a function, rather than as syntactic sugar.
if' :: Bool -- ^ condition
-> a -- ^ \"then\" result
-> a -- ^ \"else\" result
-> a -- ^ \"then\" or "else" result depending on the condition
if' True x _ = x
if' _ _ y = y
-- * Parsing utility functions
-- | Parse results from readsPrec.
parseChoices :: MonadFail m => String -> String -> [(a, String)] -> m a
parseChoices _ _ [(v, "")] = return v
parseChoices name s [(_, e)] =
fail $ name ++ ": leftover characters when parsing '"
++ s ++ "': '" ++ e ++ "'"
parseChoices name s _ = fail $ name ++ ": cannot parse string '" ++ s ++ "'"
-- | Safe 'read' function returning data encapsulated in a Result.
tryRead :: (MonadFail m, Read a) => String -> String -> m a
tryRead name s = parseChoices name s $ reads s
-- | Parse a string using the 'Read' instance.
-- Succeeds if there is exactly one valid result.
--
-- /Backport from Text.Read introduced in base-4.6.0.0/
readMaybe :: Read a => String -> Maybe a
readMaybe s = case reads s of
[(a, "")] -> Just a
_ -> Nothing
-- | Format a table of strings to maintain consistent length.
formatTable :: [[String]] -> [Bool] -> [[String]]
formatTable vals numpos =
let vtrans = transpose vals -- transpose, so that we work on rows
-- rather than columns
mlens = map (maximum . map length) vtrans
expnd = map (\(flds, isnum, ml) ->
map (\val ->
let delta = ml - length val
filler = replicate delta ' '
in if delta > 0
then if isnum
then filler ++ val
else val ++ filler
else val
) flds
) (zip3 vtrans numpos mlens)
in transpose expnd
-- | Constructs a printable table from given header and rows
printTable :: String -> [String] -> [[String]] -> [Bool] -> String
printTable lp header rows isnum =
unlines . map ((++) lp . (:) ' ' . unwords) $
formatTable (header:rows) isnum
-- | Converts a unit (e.g. m or GB) into a scaling factor.
parseUnitValue :: (MonadFail m) => Bool -> String -> m Rational
parseUnitValue noDecimal unit
-- binary conversions first
| null unit = return 1
| unit == "m" || upper == "MIB" = return 1
| unit == "g" || upper == "GIB" = return kbBinary
| unit == "t" || upper == "TIB" = return $ kbBinary * kbBinary
-- SI conversions
| unit == "M" || upper == "MB" = return mbFactor
| unit == "G" || upper == "GB" = return $ mbFactor * kbDecimal
| unit == "T" || upper == "TB" = return $ mbFactor * kbDecimal * kbDecimal
| otherwise = fail $ "Unknown unit '" ++ unit ++ "'"
where upper = map toUpper unit
kbBinary = 1024 :: Rational
kbDecimal = if noDecimal then kbBinary else 1000
decToBin = kbDecimal / kbBinary -- factor for 1K conversion
mbFactor = decToBin * decToBin -- twice the factor for just 1K
-- | Tries to extract number and scale from the given string.
--
-- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is
-- specified, it defaults to MiB. Return value is always an integral
-- value in MiB; if the first argument is True, all kilos are binary.
parseUnitEx :: (MonadFail m, Integral a, Read a) => Bool -> String -> m a
parseUnitEx noDecimal str =
-- TODO: enhance this by splitting the unit parsing code out and
-- accepting floating-point numbers
case (reads str::[(Int, String)]) of
[(v, suffix)] ->
let unit = dropWhile (== ' ') suffix
in do
scaling <- parseUnitValue noDecimal unit
return $ truncate (fromIntegral v * scaling)
_ -> fail $ "Can't parse string '" ++ str ++ "'"
-- | Tries to extract number and scale from the given string.
--
-- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is
-- specified, it defaults to MiB. Return value is always an integral
-- value in MiB.
parseUnit :: (MonadFail m, Integral a, Read a) => String -> m a
parseUnit = parseUnitEx False
-- | Tries to extract a number and scale from a given string, taking
-- all kilos to be binary.
parseUnitAssumeBinary :: (MonadFail m, Integral a, Read a) => String -> m a
parseUnitAssumeBinary = parseUnitEx True
-- | Unwraps a 'Result', exiting the program if it is a 'Bad' value,
-- otherwise returning the actual contained value.
exitIfBad :: String -> Result a -> IO a
exitIfBad msg (Bad s) = exitErr (msg ++ ": " ++ s)
exitIfBad _ (Ok v) = return v
-- | Exits immediately with an error message.
exitErr :: String -> IO a
exitErr errmsg = do
hPutStrLn stderr $ "Error: " ++ errmsg
exitWith (ExitFailure 1)
-- | Exits with an error message if the given boolean condition if true.
exitWhen :: Bool -> String -> IO ()
exitWhen True msg = exitErr msg
exitWhen False _ = return ()
-- | Exits with an error message /unless/ the given boolean condition
-- if true, the opposite of 'exitWhen'.
exitUnless :: Bool -> String -> IO ()
exitUnless cond = exitWhen (not cond)
-- | Unwraps a 'Result', logging a warning message and then returning a default
-- value if it is a 'Bad' value, otherwise returning the actual contained value.
logWarningIfBad :: String -> a -> Result a -> IO a
logWarningIfBad msg defVal (Bad s) = do
logWarning $ msg ++ ": " ++ s
return defVal
logWarningIfBad _ _ (Ok v) = return v
-- | Try an IO interaction, log errors and unfold as a 'Result'.
tryAndLogIOError :: IO a -> String -> (a -> Result b) -> IO (Result b)
tryAndLogIOError io msg okfn =
try io >>= either
(\ e -> do
let combinedmsg = msg ++ ": " ++ show (e :: IOError)
logError combinedmsg
return . Bad $ combinedmsg)
(return . okfn)
-- | Try an IO interaction and return a default value if the interaction
-- throws an IOError.
withDefaultOnIOError :: a -> IO a -> IO a
withDefaultOnIOError a io =
try io >>= either (\ (_ :: IOError) -> return a) return
-- | Print a warning, but do not exit.
warn :: String -> IO ()
warn = hPutStrLn stderr . (++) "Warning: "
-- | Helper for 'niceSort'. Computes the key element for a given string.
extractKey :: [Either Integer String] -- ^ Current (partial) key, reversed
-> String -- ^ Remaining string
-> ([Either Integer String], String)
extractKey ek [] = (reverse ek, [])
extractKey ek xs@(x:_) =
let (span_fn, conv_fn) = if isDigit x
then (isDigit, Left . read)
else (not . isDigit, Right)
(k, rest) = span span_fn xs
in extractKey (conv_fn k:ek) rest
{-| Sort a list of strings based on digit and non-digit groupings.
Given a list of names @['a1', 'a10', 'a11', 'a2']@ this function
will sort the list in the logical order @['a1', 'a2', 'a10', 'a11']@.
The sort algorithm breaks each name in groups of either only-digits or
no-digits, and sorts based on each group.
Internally, this is not implemented via regexes (like the Python
version), but via actual splitting of the string in sequences of
either digits or everything else, and converting the digit sequences
in /Left Integer/ and the non-digit ones in /Right String/, at which
point sorting becomes trivial due to the built-in 'Either' ordering;
we only need one extra step of dropping the key at the end.
-}
niceSort :: [String] -> [String]
niceSort = niceSortKey id
-- | Key-version of 'niceSort'. We use 'sortBy' and @compare `on` fst@
-- since we don't want to add an ordering constraint on the /a/ type,
-- hence the need to only compare the first element of the /(key, a)/
-- tuple.
niceSortKey :: (a -> String) -> [a] -> [a]
niceSortKey keyfn =
map snd . sortBy (compare `on` fst) .
map (\s -> (fst . extractKey [] $ keyfn s, s))
-- | Strip space characthers (including newline). As this is
-- expensive, should only be run on small strings.
rStripSpace :: String -> String
rStripSpace = reverse . dropWhile isSpace . reverse
-- | Returns a random UUID.
-- This is a Linux-specific method as it uses the /proc filesystem.
newUUID :: IO String
newUUID = do
contents <- readFile ConstantUtils.randomUuidFile
return $! rStripSpace $ take 128 contents
-- | Parser that doesn't fail on a valid UUIDs (same as
-- "Ganeti.Constants.uuidRegex").
uuidCheckParser :: A.Parser ()
uuidCheckParser = do
-- Not using Attoparsec.Char8 because "all attempts to use characters
-- above code point U+00FF will give wrong answers" and we don't
-- want such things to be accepted as UUIDs.
let lowerHex = A.satisfy (\c -> (48 <= c && c <= 57) || -- 0-9
(97 <= c && c <= 102)) -- a-f
hx n = A.count n lowerHex
d = A.word8 45 -- '-'
void $ hx 8 >> d >> hx 4 >> d >> hx 4 >> d >> hx 4 >> d >> hx 12
-- | Checks if the string is a valid UUID as in "Ganeti.Constants.uuidRegex".
isUUID :: String -> Bool
isUUID =
isRight . A.parseOnly (uuidCheckParser <* A.endOfInput) . UTF8.fromString
-- | Returns the current time as an 'Integer' representing the number
-- of seconds from the Unix epoch.
getCurrentTime :: IO Integer
getCurrentTime = do
TOD ctime _ <- getClockTime
return ctime
-- | Returns the current time as an 'Integer' representing the number
-- of microseconds from the Unix epoch (hence the need for 'Integer').
getCurrentTimeUSec :: IO Integer
getCurrentTimeUSec = liftM clockTimeToUSec getClockTime
-- | Convert a ClockTime into a (seconds-only) timestamp.
clockTimeToString :: ClockTime -> String
clockTimeToString (TOD t _) = show t
-- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@).
clockTimeToCTime :: ClockTime -> EpochTime
clockTimeToCTime (TOD secs _) = fromInteger secs
-- | Convert a ClockTime the number of microseconds since the epoch.
clockTimeToUSec :: ClockTime -> Integer
clockTimeToUSec (TOD ctime pico) =
-- pico: 10^-12, micro: 10^-6, so we have to shift seconds left and
-- picoseconds right
ctime * 1000000 + pico `div` 1000000
-- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@).
cTimeToClockTime :: EpochTime -> ClockTime
cTimeToClockTime (CTime timet) = TOD (toInteger timet) 0
-- | A version of `diffClockTimes` that works around ghc bug #2519.
diffClockTimes :: ClockTime -> ClockTime -> TimeDiff
diffClockTimes t1 t2 =
let delta = STime.diffClockTimes t1 t2
secondInPicoseconds = 1000000000000
in if tdPicosec delta < 0
then delta { tdSec = tdSec delta - 1
, tdPicosec = tdPicosec delta + secondInPicoseconds
}
else delta
{-| Strip a prefix from a string, allowing the last character of the prefix
(which is assumed to be a separator) to be absent from the string if the string
terminates there.
\>>> chompPrefix \"foo:bar:\" \"a:b:c\"
Nothing
\>>> chompPrefix \"foo:bar:\" \"foo:bar:baz\"
Just \"baz\"
\>>> chompPrefix \"foo:bar:\" \"foo:bar:\"
Just \"\"
\>>> chompPrefix \"foo:bar:\" \"foo:bar\"
Just \"\"
\>>> chompPrefix \"foo:bar:\" \"foo:barbaz\"
Nothing
-}
chompPrefix :: String -> String -> Maybe String
chompPrefix pfx str =
if pfx `isPrefixOf` str || str == init pfx
then Just $ drop (length pfx) str
else Nothing
-- | Breaks a string in lines with length \<= maxWidth.
--
-- NOTE: The split is OK if:
--
-- * It doesn't break a word, i.e. the next line begins with space
-- (@isSpace . head $ rest@) or the current line ends with space
-- (@null revExtra@);
--
-- * It breaks a very big word that doesn't fit anyway (@null revLine@).
wrap :: Int -- ^ maxWidth
-> String -- ^ string that needs wrapping
-> [String] -- ^ string \"broken\" in lines
wrap maxWidth = filter (not . null) . map trim . wrap0
where wrap0 :: String -> [String]
wrap0 text
| length text <= maxWidth = [text]
| isSplitOK = line : wrap0 rest
| otherwise = line' : wrap0 rest'
where (line, rest) = splitAt maxWidth text
(revExtra, revLine) = break isSpace . reverse $ line
(line', rest') = (reverse revLine, reverse revExtra ++ rest)
isSplitOK =
null revLine || null revExtra || startsWithSpace rest
startsWithSpace (x:_) = isSpace x
startsWithSpace _ = False
-- | Removes surrounding whitespace. Should only be used in small
-- strings.
trim :: String -> String
trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace
-- | A safer head version, with a default value.
defaultHead :: a -> [a] -> a
defaultHead def [] = def
defaultHead _ (x:_) = x
-- | A 'head' version in the I/O monad, for validating parameters
-- without which we cannot continue.
exitIfEmpty :: String -> [a] -> IO a
exitIfEmpty _ (x:_) = return x
exitIfEmpty s [] = exitErr s
-- | Obtain the unique element of a list in an arbitrary monad.
monadicThe :: (Eq a, MonadFail m) => String -> [a] -> m a
monadicThe s [] = fail s
monadicThe s (x:xs)
| all (x ==) xs = return x
| otherwise = fail s
-- | Split an 'Either' list into two separate lists (containing the
-- 'Left' and 'Right' elements, plus a \"trail\" list that allows
-- recombination later.
--
-- This is splitter; for recombination, look at 'recombineEithers'.
-- The sum of \"left\" and \"right\" lists should be equal to the
-- original list length, and the trail list should be the same length
-- as well. The entries in the resulting lists are reversed in
-- comparison with the original list.
splitEithers :: [Either a b] -> ([a], [b], [Bool])
splitEithers = foldl' splitter ([], [], [])
where splitter (l, r, t) e =
case e of
Left v -> (v:l, r, False:t)
Right v -> (l, v:r, True:t)
-- | Recombines two \"left\" and \"right\" lists using a \"trail\"
-- list into a single 'Either' list.
--
-- This is the counterpart to 'splitEithers'. It does the opposite
-- transformation, and the output list will be the reverse of the
-- input lists. Since 'splitEithers' also reverses the lists, calling
-- these together will result in the original list.
--
-- Mismatches in the structure of the lists (e.g. inconsistent
-- lengths) are represented via 'Bad'; normally this function should
-- not fail, if lists are passed as generated by 'splitEithers'.
recombineEithers :: (Show a, Show b) =>
[a] -> [b] -> [Bool] -> Result [Either a b]
recombineEithers lefts rights trail =
foldM recombiner ([], lefts, rights) trail >>= checker
where checker (eithers, [], []) = Ok eithers
checker (_, lefts', rights') =
Bad $ "Inconsistent results after recombination, l'=" ++
show lefts' ++ ", r'=" ++ show rights'
recombiner (es, l:ls, rs) False = Ok (Left l:es, ls, rs)
recombiner (es, ls, r:rs) True = Ok (Right r:es, ls, rs)
recombiner (_, ls, rs) t = Bad $ "Inconsistent trail log: l=" ++
show ls ++ ", r=" ++ show rs ++ ",t=" ++
show t
-- | Default hints for the resolver
resolveAddrHints :: Maybe AddrInfo
resolveAddrHints =
Just defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV] }
-- | Resolves a numeric address.
resolveAddr :: Int -> String -> IO (Result (Family, SockAddr))
resolveAddr port str = do
resolved <- getAddrInfo resolveAddrHints (Just str) (Just (show port))
return $ case resolved of
[] -> Bad "Invalid results from lookup?"
best:_ -> Ok (addrFamily best, addrAddress best)
-- | Set the owner and the group of a file (given as names, not numeric id).
setOwnerAndGroupFromNames :: FilePath -> GanetiDaemon -> GanetiGroup -> IO ()
setOwnerAndGroupFromNames filename daemon dGroup = do
-- TODO: it would be nice to rework this (or getEnts) so that runtimeEnts
-- is read only once per daemon startup, and then cached for further usage.
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
-- note: we use directly ! as lookup failures shouldn't happen, due
-- to the map construction
let uid = reUserToUid ents M.! daemon
let gid = reGroupToGid ents M.! dGroup
setOwnerAndGroup filename uid gid
-- | Resets permissions so that the owner can read/write and the group only
-- read. All other permissions are cleared.
setOwnerWGroupR :: FilePath -> IO ()
setOwnerWGroupR path = setFileMode path mode
where mode = foldl unionFileModes nullFileMode
[ownerReadMode, ownerWriteMode, groupReadMode]
-- | Formats an integral number, appending a suffix.
formatOrdinal :: (Integral a, Show a) => a -> String
formatOrdinal num
| num > 10 && num < 20 = suffix "th"
| tens == 1 = suffix "st"
| tens == 2 = suffix "nd"
| tens == 3 = suffix "rd"
| otherwise = suffix "th"
where tens = num `mod` 10
suffix s = show num ++ s
-- | Attempt, in a non-blocking way, to obtain a lock on a given file; report
-- back success.
-- Returns the file descriptor so that the lock can be released by closing
lockFile :: FilePath -> IO (Result Fd)
lockFile path = runResultT . liftIO $ do
handle <- openFile path WriteMode
fd <- handleToFd handle
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
return fd
-- | File stat identifier.
type FStat = (EpochTime, FileID, FileOffset)
-- | Null 'FStat' value.
nullFStat :: FStat
nullFStat = (-1, -1, -1)
-- | Computes the file cache data from a FileStatus structure.
buildFileStatus :: FileStatus -> FStat
buildFileStatus ofs =
let modt = modificationTime ofs
inum = fileID ofs
fsize = fileSize ofs
in (modt, inum, fsize)
-- | Wrapper over 'buildFileStatus'. This reads the data from the
-- filesystem and then builds our cache structure.
getFStat :: FilePath -> IO FStat
getFStat p = liftM buildFileStatus (getFileStatus p)
-- | Safe version of 'getFStat', that ignores IOErrors.
getFStatSafe :: FilePath -> IO FStat
getFStatSafe fpath = liftM (either (const nullFStat) id)
((try $ getFStat fpath) :: IO (Either IOError FStat))
-- | Check if the file needs reloading
needsReload :: FStat -> FilePath -> IO (Maybe FStat)
needsReload oldstat path = do
newstat <- getFStat path
return $ if newstat /= oldstat
then Just newstat
else Nothing
-- | Until the given point in time (useconds since the epoch), wait
-- for the output of a given method to change and return the new value;
-- make use of the promise that the output only changes if the reference
-- has a value different than the given one.
watchFileEx :: (Eq b) => Integer -> b -> IORef b -> (a -> Bool) -> IO a -> IO a
watchFileEx endtime base ref check read_fn = do
current <- getCurrentTimeUSec
if current > endtime then read_fn else do
val <- readIORef ref
if val /= base
then do
new <- read_fn
if check new then return new else do
logDebug "Observed change not relevant"
threadDelay 100000
watchFileEx endtime val ref check read_fn
else do
threadDelay 100000
watchFileEx endtime base ref check read_fn
-- | Within the given timeout (in seconds), wait for for the output
-- of the given method to satisfy a given predicate and return the new value;
-- make use of the promise that the method will only change its value, if
-- the given file changes on disk. If the file does not exist on disk, return
-- immediately.
watchFileBy :: FilePath -> Int -> (a -> Bool) -> IO a -> IO a
watchFileBy fpath timeout check read_fn = do
current <- getCurrentTimeUSec
let endtime = current + fromIntegral timeout * 1000000
fstat <- getFStatSafe fpath
ref <- newIORef fstat
bracket initINotify killINotify $ \inotify -> do
let do_watch e = do
logDebug $ "Notified of change in " ++ fpath
++ "; event: " ++ show e
when (e == Ignored)
(addWatch inotify [Modify, Delete]
(toInotifyPath fpath) do_watch >> return ())
fstat' <- getFStatSafe fpath
writeIORef ref fstat'
_ <- addWatch inotify [Modify, Delete] (toInotifyPath fpath) do_watch
newval <- read_fn
if check newval
then do
logDebug $ "File " ++ fpath ++ " changed during setup of inotify"
return newval
else watchFileEx endtime fstat ref check read_fn
-- | Within the given timeout (in seconds), wait for for the output
-- of the given method to change and return the new value; make use of
-- the promise that the method will only change its value, if
-- the given file changes on disk. If the file does not exist on disk, return
-- immediately.
watchFile :: Eq a => FilePath -> Int -> a -> IO a -> IO a
watchFile fpath timeout old = watchFileBy fpath timeout (/= old)
-- | Type describing ownership and permissions of newly generated
-- directories and files. All parameters are optional, with nothing
-- meaning that the default value should be left untouched.
data FilePermissions = FilePermissions { fpOwner :: Maybe GanetiDaemon
, fpGroup :: Maybe GanetiGroup
, fpPermissions :: FileMode
}
-- | Ensure that a given file or directory has the permissions, and
-- possibly ownerships, as required.
ensurePermissions :: FilePath -> FilePermissions -> IO (Result ())
ensurePermissions fpath perms = do
-- Fetch the list of entities
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't determine user/group ids" runtimeEnts
-- Get the existing file properties
eitherFileStatus <- try $ getFileStatus fpath
:: IO (Either IOError FileStatus)
-- And see if any modifications are needed
(flip $ either (return . Bad . show)) eitherFileStatus $ \fstat -> do
ownertry <- case fpOwner perms of
Nothing -> return $ Right ()
Just owner -> try $ do
let ownerid = reUserToUid ents M.! owner
unless (ownerid == fileOwner fstat) $ do
logDebug $ "Changing owner of " ++ fpath ++ " to " ++ show owner
setOwnerAndGroup fpath ownerid (-1)
grouptry <- case fpGroup perms of
Nothing -> return $ Right ()
Just grp -> try $ do
let groupid = reGroupToGid ents M.! grp
unless (groupid == fileGroup fstat) $ do
logDebug $ "Changing group of " ++ fpath ++ " to " ++ show grp
setOwnerAndGroup fpath (-1) groupid
let fp = fpPermissions perms
permtry <- if fileMode fstat == fp
then return $ Right ()
else try $ do
logInfo $ "Changing permissions of " ++ fpath ++ " to "
++ showOct fp ""
setFileMode fpath fp
let errors = E.lefts ([ownertry, grouptry, permtry] :: [Either IOError ()])
if null errors
then return $ Ok ()
else return . Bad $ show errors
-- | Safely rename a file, creating the target directory, if needed.
safeRenameFile :: FilePermissions -> FilePath -> FilePath -> IO (Result ())
safeRenameFile perms from to = do
directtry <- try $ renameFile from to
case (directtry :: Either IOError ()) of
Right () -> return $ Ok ()
Left _ -> do
result <- try $ do
let dir = takeDirectory to
createDirectoryIfMissing True dir
_ <- ensurePermissions dir perms
renameFile from to
return $ either (Bad . show) Ok (result :: Either IOError ())
-- | Removes duplicates, preserving order.
ordNub :: (Ord a) => [a] -> [a]
ordNub =
let go _ [] = []
go s (x:xs) = if x `S.member` s
then go s xs
else x : go (S.insert x s) xs
in go S.empty
{-# ANN frequency "HLint: ignore Use alternative" #-}
-- | Returns a list of tuples of elements and the number of times they occur
-- in a list
frequency :: Ord t => [t] -> [(Int, t)]
frequency xs = map (\x -> (length x, head x)) . group . sort $ xs
| ganeti/ganeti | src/Ganeti/Utils.hs | bsd-2-clause | 30,303 | 0 | 24 | 7,566 | 6,986 | 3,677 | 3,309 | 497 | 5 |
module Shader where
import Graphics.GL
import Control.Monad
import Control.Monad.Trans
import Foreign
import Foreign.C.String
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as Text
import qualified Data.Text.IO as Text
import Linear
import Data.Foldable
newtype GLProgram = GLProgram { unGLProgram :: GLuint }
newtype AttributeLocation = AttributeLocation { unAttributeLocation :: GLint }
newtype UniformLocation = UniformLocation { unUniformLocation :: GLint }
overPtr :: (MonadIO m, Storable a) => (Ptr a -> IO b) -> m a
overPtr f = liftIO (alloca (\p -> f p >> peek p))
useProgram :: MonadIO m => GLProgram -> m ()
useProgram (GLProgram program) = glUseProgram (fromIntegral program)
uniformM44 :: UniformLocation -> M44 GLfloat -> IO ()
uniformM44 uniform matrix = do
let mvpUniformLoc = fromIntegral (unUniformLocation uniform)
withArray (concatMap toList (transpose matrix)) (\matrixPtr ->
glUniformMatrix4fv mvpUniformLoc 1 GL_FALSE matrixPtr)
---------------
-- Load shaders
---------------
createShaderProgram :: FilePath -> FilePath -> IO GLProgram
createShaderProgram vertexShaderPath fragmentShaderPath =
do vertexShader <- glCreateShader GL_VERTEX_SHADER
compileShader vertexShaderPath vertexShader
fragmentShader <- glCreateShader GL_FRAGMENT_SHADER
compileShader fragmentShaderPath fragmentShader
shaderProg <- glCreateProgram
glAttachShader shaderProg vertexShader
glAttachShader shaderProg fragmentShader
glLinkProgram shaderProg
linked <- overPtr (glGetProgramiv shaderProg GL_LINK_STATUS)
when (linked == fromIntegral GL_FALSE)
(do maxLength <- overPtr (glGetProgramiv shaderProg GL_INFO_LOG_LENGTH)
logLines <- allocaArray
(fromIntegral maxLength)
(\p ->
alloca (\lenP ->
do glGetProgramInfoLog shaderProg maxLength lenP p
len <- peek lenP
peekCStringLen (p,fromIntegral len)))
putStrLn logLines)
return (GLProgram shaderProg)
where compileShader path shader =
do src <- Text.readFile path
BS.useAsCString
(Text.encodeUtf8 src)
(\ptr ->
withArray [ptr]
(\srcs ->
glShaderSource shader 1 srcs nullPtr))
glCompileShader shader
when True
(do maxLength <- overPtr (glGetShaderiv shader GL_INFO_LOG_LENGTH)
logLines <- allocaArray
(fromIntegral maxLength)
(\p ->
alloca (\lenP ->
do glGetShaderInfoLog shader maxLength lenP p
len <- peek lenP
peekCStringLen (p,fromIntegral len)))
when (length logLines > 0)
(do putStrLn ("In " ++ path ++ ":")
putStrLn logLines)
)
getShaderAttribute :: GLProgram -> String -> IO AttributeLocation
getShaderAttribute (GLProgram prog) attributeName = do
location <- withCString attributeName $ \attributeNameCString ->
glGetAttribLocation prog attributeNameCString
when (location == -1) $ error $ "Coudn't bind attribute: " ++ attributeName
return (AttributeLocation location)
getShaderUniform :: GLProgram -> String -> IO UniformLocation
getShaderUniform (GLProgram prog) uniformName = do
location <- withCString uniformName $ \uniformNameCString ->
glGetUniformLocation prog uniformNameCString
when (location == -1) $ error $ "Coudn't bind uniform: " ++ uniformName
return (UniformLocation location)
glGetErrors :: IO ()
glGetErrors = do
code <- glGetError
case code of
GL_NO_ERROR -> return ()
e -> do
case e of
GL_INVALID_ENUM -> putStrLn "* Invalid Enum"
GL_INVALID_VALUE -> putStrLn "* Invalid Value"
GL_INVALID_OPERATION -> putStrLn "* Invalid Operation"
GL_INVALID_FRAMEBUFFER_OPERATION -> putStrLn "* Invalid Framebuffer Operation"
GL_OUT_OF_MEMORY -> putStrLn "* OOM"
GL_STACK_UNDERFLOW -> putStrLn "* Stack underflow"
GL_STACK_OVERFLOW -> putStrLn "* Stack overflow"
_ -> return ()
glGetErrors
| lukexi/halive | demo/Shader.hs | bsd-2-clause | 4,741 | 0 | 24 | 1,608 | 1,100 | 536 | 564 | 94 | 9 |
module Blockchain.Data.Wire (
Message(..),
Capability(..),
obj2WireMessage,
wireMessage2Obj
) where
import Data.Functor
import Data.List
import Data.Word
import Network.Haskoin.Crypto
import Numeric
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import qualified Blockchain.Colors as CL
import Blockchain.Data.BlockDB
import Blockchain.Data.DataDefs
import Blockchain.Data.Peer
import Blockchain.Data.RLP
import Blockchain.Data.SignedTransaction
import Blockchain.Format
import Blockchain.SHA
import Blockchain.Util
import Debug.Trace
data Capability = ETH Integer | SHH Integer deriving (Show)
name2Cap::Integer->String->Capability
name2Cap qqqq "eth" = ETH qqqq
name2Cap qqqq "shh" = SHH qqqq
name2Cap _ x = error $ "Unknown capability string: " ++ x
{-capValue::Capability->String
capValue ETH = "eth"
capValue SHH = "shh"-}
instance RLPSerializable Capability where
rlpEncode (ETH qqqq) = RLPArray [rlpEncode "eth", rlpEncode qqqq]
rlpEncode (SHH qqqq) = RLPArray [rlpEncode "shh", rlpEncode qqqq]
rlpDecode (RLPArray [name, qqqq]) = name2Cap (rlpDecode qqqq) $ rlpDecode name
rlpDecode x = error $ "wrong format given to rlpDecode for Capability: " ++ show (pretty x)
data TerminationReason =
DisconnectRequested
| TCPSubSystemError
| BreachOfProtocol
| UselessPeer
| TooManyPeers
| AlreadyConnected
| IncompatibleP2PProtocolVersion
| NullNodeIdentityReceived
| ClientQuitting
| UnexpectedIdentity
| ConnectedToSelf
| PingTimeout
| OtherSubprotocolReason deriving (Show)
numberToTerminationReason::Integer->TerminationReason
numberToTerminationReason 0x00 = DisconnectRequested
numberToTerminationReason 0x01 = TCPSubSystemError
numberToTerminationReason 0x02 = BreachOfProtocol
numberToTerminationReason 0x03 = UselessPeer
numberToTerminationReason 0x04 = TooManyPeers
numberToTerminationReason 0x05 = AlreadyConnected
numberToTerminationReason 0x06 = IncompatibleP2PProtocolVersion
numberToTerminationReason 0x07 = NullNodeIdentityReceived
numberToTerminationReason 0x08 = ClientQuitting
numberToTerminationReason 0x09 = UnexpectedIdentity
numberToTerminationReason 0x0a = ConnectedToSelf
numberToTerminationReason 0x0b = PingTimeout
numberToTerminationReason 0x0c = OtherSubprotocolReason
numberToTerminationReason _ = error "numberToTerminationReasion called with unsupported number"
terminationReasonToNumber::TerminationReason->Integer
terminationReasonToNumber DisconnectRequested = 0x00
terminationReasonToNumber TCPSubSystemError = 0x01
terminationReasonToNumber BreachOfProtocol = 0x02
terminationReasonToNumber UselessPeer = 0x03
terminationReasonToNumber TooManyPeers = 0x04
terminationReasonToNumber AlreadyConnected = 0x05
terminationReasonToNumber IncompatibleP2PProtocolVersion = 0x06
terminationReasonToNumber NullNodeIdentityReceived = 0x07
terminationReasonToNumber ClientQuitting = 0x08
terminationReasonToNumber UnexpectedIdentity = 0x09
terminationReasonToNumber ConnectedToSelf = 0x0a
terminationReasonToNumber PingTimeout = 0x0b
terminationReasonToNumber OtherSubprotocolReason = 0x0c
data Message =
Hello { version::Int, clientId::String, capability::[Capability], port::Int, nodeId::Word512 } |
Disconnect TerminationReason |
Ping |
Pong |
GetPeers |
Peers [Peer] |
Status { protocolVersion::Int, networkID::String, totalDifficulty::Int, latestHash::SHA, genesisHash:: SHA } |
QqqqStatus Int |
Transactions [SignedTransaction] |
GetBlocks [SHA] |
Blocks [Block] |
BlockHashes [SHA] |
GetBlockHashes { parentSHAs::[SHA], numChildItems::Integer } |
GetTransactions |
NewBlockPacket Block Integer |
PacketCount Integer |
QqqqPacket |
WhisperProtocolVersion Int deriving (Show)
instance Format Message where
format Hello{version=ver, clientId=c, capability=cap, port=p, nodeId=n} =
CL.blue "Hello" ++
" version: " ++ show ver ++ "\n" ++
" cliendId: " ++ show c ++ "\n" ++
" capability: " ++ intercalate ", " (show <$> cap) ++ "\n" ++
" port: " ++ show p ++ "\n" ++
" nodeId: " ++ take 20 (padZeros 64 (showHex n "")) ++ "...."
format (Disconnect reason) = CL.blue "Disconnect" ++ "(" ++ show reason ++ ")"
format Ping = CL.blue "Ping"
format Pong = CL.blue "Pong"
format GetPeers = CL.blue "GetPeers"
format (Peers peers) = CL.blue "Peers: " ++ intercalate ", " (format <$> peers)
format Status{ protocolVersion=ver, networkID=nID, totalDifficulty=d, latestHash=lh, genesisHash=gh } =
CL.blue "Status" ++
" protocolVersion: " ++ show ver ++ "\n" ++
" networkID: " ++ show nID ++ "\n" ++
" totalDifficulty: " ++ show d ++ "\n" ++
" latestHash: " ++ show (pretty lh) ++ "\n" ++
" genesisHash: " ++ show (pretty gh)
format (QqqqStatus ver) =
CL.blue "QqqqStatus " ++
" protocolVersion: " ++ show ver
format (Transactions transactions) =
CL.blue "Transactions:\n " ++ tab (intercalate "\n " (format <$> transactions))
--Short version
format (BlockHashes shas) =
CL.blue "BlockHashes " ++ "(" ++ show (length shas) ++ " new hashes)"
--Long version
{- format (BlockHashes shas) =
CL.blue "BlockHashes:" ++
tab ("\n" ++ intercalate "\n " (show . pretty <$> shas))-}
format (GetBlocks shas) =
CL.blue "GetBlocks:" ++
tab ("\n" ++ intercalate "\n " (show . pretty <$> shas))
format (Blocks blocks) = CL.blue "Blocks:" ++ tab("\n" ++ intercalate "\n " (format <$> blocks))
format (GetBlockHashes pSHAs numChild) =
CL.blue "GetBlockHashes" ++ " (max: " ++ show numChild ++ "):\n " ++
intercalate ",\n " (show . pretty <$> pSHAs)
format (NewBlockPacket block d) = CL.blue "NewBlockPacket" ++ " (" ++ show d ++ ")" ++ tab ("\n" ++ format block)
format (PacketCount c) =
CL.blue "PacketCount:" ++ show c
format QqqqPacket = CL.blue "QqqqPacket"
format GetTransactions = CL.blue "GetTransactions"
format (WhisperProtocolVersion ver) = CL.blue "WhisperProtocolVersion " ++ show ver
obj2WireMessage::Word8->RLPObject->Message
obj2WireMessage 0x0 (RLPArray [ver, cId, RLPArray cap, p, nId]) =
Hello (fromInteger $ rlpDecode ver) (rlpDecode cId) (rlpDecode <$> cap) (fromInteger $ rlpDecode p) $ rlp2Word512 nId
obj2WireMessage 0x1 (RLPArray [reason]) =
Disconnect (numberToTerminationReason $ rlpDecode reason)
obj2WireMessage 0x2 (RLPArray []) = Ping
obj2WireMessage 0x2 (RLPArray [RLPArray []]) = Ping
obj2WireMessage 0x3 (RLPArray []) = Pong
obj2WireMessage 0x4 (RLPArray []) = GetPeers
obj2WireMessage 0x5 (RLPArray peers) = Peers $ rlpDecode <$> peers
obj2WireMessage 0x10 (RLPArray [ver, nID, d, lh, gh]) =
Status {
protocolVersion=fromInteger $ rlpDecode ver,
networkID = rlpDecode nID,
totalDifficulty = fromInteger $ rlpDecode d,
latestHash=rlpDecode lh,
genesisHash=rlpDecode gh
}
obj2WireMessage 0x10 (RLPArray [ver]) =
QqqqStatus $ fromInteger $ rlpDecode ver
obj2WireMessage 0x11 (RLPArray []) = GetTransactions
obj2WireMessage 0x12 (RLPArray transactions) =
Transactions $ rlpDecode <$> transactions
obj2WireMessage 0x13 (RLPArray items) =
GetBlockHashes (rlpDecode <$> init items) $ rlpDecode $ last items
obj2WireMessage 0x14 (RLPArray items) =
BlockHashes $ rlpDecode <$> items
obj2WireMessage 0x15 (RLPArray items) =
GetBlocks $ rlpDecode <$> items
obj2WireMessage 0x16 (RLPArray blocks) =
Blocks $ rlpDecode <$> blocks
obj2WireMessage 0x17 (RLPArray [block, td]) =
NewBlockPacket (rlpDecode block) (rlpDecode td)
obj2WireMessage 0x18 (RLPArray [c]) =
PacketCount $ rlpDecode c
obj2WireMessage 0x19 (RLPArray []) =
QqqqPacket
obj2WireMessage 0x20 (RLPArray [ver]) =
WhisperProtocolVersion $ fromInteger $ rlpDecode ver
obj2WireMessage x y = error ("Missing case in obj2WireMessage: " ++ show x ++ ", " ++ show (pretty y))
wireMessage2Obj::Message->(Word8, RLPObject)
wireMessage2Obj Hello { version = ver,
clientId = cId,
capability = cap,
port = p,
nodeId = nId } =
(0x0, RLPArray [
rlpEncode $ toInteger ver,
rlpEncode cId,
RLPArray $ rlpEncode <$> cap,
rlpEncode $ toInteger p,
word5122RLP nId
])
wireMessage2Obj (Disconnect reason) = (0x0, RLPArray [rlpEncode $ terminationReasonToNumber reason])
wireMessage2Obj Ping = (0x2, RLPArray [])
wireMessage2Obj Pong = (0x3, RLPArray [])
wireMessage2Obj GetPeers = (0x4, RLPArray [])
wireMessage2Obj (Peers peers) = (0x5, RLPArray $ (rlpEncode <$> peers))
wireMessage2Obj (Status ver nID d lh gh) =
(0x10, RLPArray [rlpEncode $ toInteger ver, rlpEncode nID, rlpEncode $ toInteger d, rlpEncode lh, rlpEncode gh])
wireMessage2Obj (QqqqStatus ver) = (0x10, RLPArray [rlpEncode $ toInteger ver])
wireMessage2Obj GetTransactions = (0x11, RLPArray [])
wireMessage2Obj (Transactions transactions) = (0x12, RLPArray (rlpEncode <$> transactions))
wireMessage2Obj (GetBlockHashes pSHAs numChildren) =
(0x13, RLPArray $ (rlpEncode <$> pSHAs) ++ [rlpEncode numChildren])
wireMessage2Obj (BlockHashes shas) =
(0x14, RLPArray (rlpEncode <$> shas))
wireMessage2Obj (GetBlocks shas) =
(0x15, RLPArray (rlpEncode <$> shas))
wireMessage2Obj (Blocks blocks) =
(0x16, RLPArray (rlpEncode <$> blocks))
wireMessage2Obj (NewBlockPacket block d) =
(0x17, RLPArray [rlpEncode block, rlpEncode d])
wireMessage2Obj (PacketCount c) =
(0x18, RLPArray [rlpEncode c])
wireMessage2Obj QqqqPacket =
(0x19, RLPArray [])
wireMessage2Obj (WhisperProtocolVersion ver) =
(0x20, RLPArray [rlpEncode $ toInteger ver])
| kejace/ethereum-client-haskell | src/Blockchain/Data/Wire.hs | bsd-3-clause | 9,629 | 0 | 22 | 1,709 | 2,877 | 1,510 | 1,367 | 210 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provides common combinators for concurrency in Javascript.
--
-- The emulated threading Javascript threading model provided by
-- Sunroof is based on cooperative multithreading
-- (since Javascript is not multithreaded).
module Language.Sunroof.Concurrent
( loop
, forkJS
, threadDelay
, yield
) where
import Language.Sunroof.Types
import Language.Sunroof.Classes
import Language.Sunroof.JS.Number ( JSNumber )
import Language.Sunroof.JS.Browser ( window, setTimeout )
import Language.Sunroof.Utils
-- -------------------------------------------------------------
-- General Concurrent Combinators.
-- -------------------------------------------------------------
-- | @loop x f@ executes the function @f@ repeatedly.
-- After each iteration the result value of the function
-- is feed back as input of the next iteration.
-- The initial value supplied for the first iteration is @x@.
-- This loop will never terminate.
loop :: (Sunroof a) => a -> (a -> JSB a) -> JSB ()
loop start m = do
f <- fixJS $ \ f -> continuation $ \ a -> do
a' <- m a
yield -- stop after every loop for pause
goto f a'
goto f start -- and call the looping function
-- | Fork of the given computation in a different thread.
forkJS :: (SunroofThread t1) => JS t1 () -> JS t2 ()
forkJS m = do
_ <- window # setTimeout (\() -> blockableJS m) 0
return ()
-- | Delay the execution of all instructions after this one by
-- the given amount of milliseconds.
threadDelay :: JSNumber -> JSB ()
threadDelay n = callcc $ \ o -> do
_ <- window # setTimeout (\x -> goto o x) n
done
-- | Give another thread time to execute.
yield :: JSB ()
yield = threadDelay 0
| ku-fpg/sunroof-compiler | Language/Sunroof/Concurrent.hs | bsd-3-clause | 1,754 | 0 | 15 | 364 | 345 | 189 | 156 | 28 | 1 |
module Graph where
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | The type of graph whose vertices are of type n.
type Graph n = Map.Map n (Set.Set n)
empty :: Graph n
empty = Map.empty
union :: (Eq n, Ord n) => Graph n -> Graph n -> Graph n
union i1 i2 = Map.unionWith Set.union i1 i2
clique :: (Eq n, Ord n) => Set.Set n -> Graph n
clique set = Map.fromSet (\x -> set `Set.difference` Set.singleton x) set
vertices :: (Eq n, Ord n) => Graph n -> [n]
vertices = Map.keys
-- | The in-degree of the specified vertex in the graph.
degree :: (Eq n, Ord n) => Graph n -> n -> Int
degree i v = Set.size (i Map.! v)
removeVertex :: (Eq n, Ord n) => Graph n -> n -> Graph n
removeVertex intGr v = Map.map (Set.delete v) $ Map.delete v intGr
-- | The set of sources (vertices whose in-degrees are 0).
sources :: (Eq n, Ord n) => Graph n -> [n]
sources gr = Set.toList $ Map.keysSet gr Set.\\ Map.foldl' Set.union Set.empty gr
neighbors :: (Eq n, Ord n) => Graph n -> n -> [n]
neighbors i v = Set.toList $ i Map.! v
| koba-e964/hayashii-mcc | Graph.hs | bsd-3-clause | 1,043 | 0 | 10 | 225 | 470 | 245 | 225 | 20 | 1 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE ScopedTypeVariables, ExistentialQuantification, RankNTypes, OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.Widget.Workspaces
-- Copyright : (c) Ivan A. Malison
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Ivan A. Malison
-- Stability : unstable
-- Portability : unportable
-----------------------------------------------------------------------------
module System.Taffybar.Widget.Workspaces where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Concurrent
import qualified Control.Concurrent.MVar as MV
import Control.Exception.Enclosed (catchAny)
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import Control.RateLimit
import Data.Default (Default(..))
import qualified Data.Foldable as F
import Data.GI.Base.ManagedPtr (unsafeCastTo)
import Data.Int
import Data.List (intersect, sortBy, (\\))
import qualified Data.Map as M
import Data.Maybe
import qualified Data.MultiMap as MM
import Data.Ord
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Time.Units
import Data.Tuple.Select
import Data.Tuple.Sequence
import qualified GI.Gdk.Enums as Gdk
import qualified GI.Gdk.Structs.EventScroll as Gdk
import qualified GI.GdkPixbuf.Objects.Pixbuf as Gdk
import qualified GI.Gtk as Gtk
import Prelude
import StatusNotifier.Tray (scalePixbufToSize)
import System.Log.Logger
import System.Taffybar.Context
import System.Taffybar.Information.EWMHDesktopInfo
import System.Taffybar.Information.SafeX11
import System.Taffybar.Information.X11DesktopInfo
import System.Taffybar.Util
import System.Taffybar.Widget.Generic.AutoSizeImage (autoSizeImage)
import System.Taffybar.Widget.Util
import System.Taffybar.WindowIcon
import Text.Printf
data WorkspaceState
= Active
| Visible
| Hidden
| Empty
| Urgent
deriving (Show, Eq)
getCSSClass :: (Show s) => s -> T.Text
getCSSClass = T.toLower . T.pack . show
cssWorkspaceStates :: [T.Text]
cssWorkspaceStates = map getCSSClass [Active, Visible, Hidden, Empty, Urgent]
data WindowData = WindowData
{ windowId :: X11Window
, windowTitle :: String
, windowClass :: String
, windowUrgent :: Bool
, windowActive :: Bool
, windowMinimized :: Bool
} deriving (Show, Eq)
data WidgetUpdate = WorkspaceUpdate Workspace | IconUpdate [X11Window]
data Workspace = Workspace
{ workspaceIdx :: WorkspaceId
, workspaceName :: String
, workspaceState :: WorkspaceState
, windows :: [WindowData]
} deriving (Show, Eq)
data WorkspacesContext = WorkspacesContext
{ controllersVar :: MV.MVar (M.Map WorkspaceId WWC)
, workspacesVar :: MV.MVar (M.Map WorkspaceId Workspace)
, workspacesWidget :: Gtk.Box
, workspacesConfig :: WorkspacesConfig
, taffyContext :: Context
}
type WorkspacesIO a = ReaderT WorkspacesContext IO a
liftContext :: TaffyIO a -> WorkspacesIO a
liftContext action = asks taffyContext >>= lift . runReaderT action
liftX11Def :: a -> X11Property a -> WorkspacesIO a
liftX11Def dflt prop = liftContext $ runX11Def dflt prop
setWorkspaceWidgetStatusClass ::
(MonadIO m, Gtk.IsWidget a) => Workspace -> a -> m ()
setWorkspaceWidgetStatusClass workspace widget =
updateWidgetClasses
widget
[getCSSClass $ workspaceState workspace]
cssWorkspaceStates
updateWidgetClasses ::
(Foldable t1, Foldable t, Gtk.IsWidget a, MonadIO m)
=> a
-> t1 T.Text
-> t T.Text
-> m ()
updateWidgetClasses widget toAdd toRemove = do
context <- Gtk.widgetGetStyleContext widget
let hasClass = Gtk.styleContextHasClass context
addIfMissing klass =
hasClass klass >>= (`when` Gtk.styleContextAddClass context klass) . not
removeIfPresent klass = unless (klass `elem` toAdd) $
hasClass klass >>= (`when` Gtk.styleContextRemoveClass context klass)
mapM_ removeIfPresent toRemove
mapM_ addIfMissing toAdd
class WorkspaceWidgetController wc where
getWidget :: wc -> WorkspacesIO Gtk.Widget
updateWidget :: wc -> WidgetUpdate -> WorkspacesIO wc
updateWidgetX11 :: wc -> WidgetUpdate -> WorkspacesIO wc
updateWidgetX11 cont _ = return cont
data WWC = forall a. WorkspaceWidgetController a => WWC a
instance WorkspaceWidgetController WWC where
getWidget (WWC wc) = getWidget wc
updateWidget (WWC wc) update = WWC <$> updateWidget wc update
updateWidgetX11 (WWC wc) update = WWC <$> updateWidgetX11 wc update
type ControllerConstructor = Workspace -> WorkspacesIO WWC
type ParentControllerConstructor =
ControllerConstructor -> ControllerConstructor
type WindowIconPixbufGetter =
Int32 -> WindowData -> TaffyIO (Maybe Gdk.Pixbuf)
data WorkspacesConfig =
WorkspacesConfig
{ widgetBuilder :: ControllerConstructor
, widgetGap :: Int
, maxIcons :: Maybe Int
, minIcons :: Int
, getWindowIconPixbuf :: WindowIconPixbufGetter
, labelSetter :: Workspace -> WorkspacesIO String
, showWorkspaceFn :: Workspace -> Bool
, borderWidth :: Int
, updateEvents :: [String]
, updateRateLimitMicroseconds :: Integer
, iconSort :: [WindowData] -> WorkspacesIO [WindowData]
, urgentWorkspaceState :: Bool
}
defaultWorkspacesConfig :: WorkspacesConfig
defaultWorkspacesConfig =
WorkspacesConfig
{ widgetBuilder = buildButtonController defaultBuildContentsController
, widgetGap = 0
, maxIcons = Nothing
, minIcons = 0
, getWindowIconPixbuf = defaultGetWindowIconPixbuf
, labelSetter = return . workspaceName
, showWorkspaceFn = const True
, borderWidth = 2
, iconSort = sortWindowsByPosition
, updateEvents = allEWMHProperties \\ [ewmhWMIcon]
, updateRateLimitMicroseconds = 100000
, urgentWorkspaceState = False
}
instance Default WorkspacesConfig where
def = defaultWorkspacesConfig
hideEmpty :: Workspace -> Bool
hideEmpty Workspace { workspaceState = Empty } = False
hideEmpty _ = True
wLog :: MonadIO m => Priority -> String -> m ()
wLog l s = liftIO $ logM "System.Taffybar.Widget.Workspaces" l s
updateVar :: MV.MVar a -> (a -> WorkspacesIO a) -> WorkspacesIO a
updateVar var modify = do
ctx <- ask
lift $ MV.modifyMVar var $ fmap (\a -> (a, a)) . flip runReaderT ctx . modify
updateWorkspacesVar :: WorkspacesIO (M.Map WorkspaceId Workspace)
updateWorkspacesVar = do
workspacesRef <- asks workspacesVar
updateVar workspacesRef buildWorkspaceData
getWorkspaceToWindows ::
[X11Window] -> X11Property (MM.MultiMap WorkspaceId X11Window)
getWorkspaceToWindows =
foldM
(\theMap window ->
MM.insert <$> getWorkspace window <*> pure window <*> pure theMap)
MM.empty
getWindowData :: Maybe X11Window
-> [X11Window]
-> X11Window
-> X11Property WindowData
getWindowData activeWindow urgentWindows window = do
wTitle <- getWindowTitle window
wClass <- getWindowClass window
wMinimized <- getWindowMinimized window
return
WindowData
{ windowId = window
, windowTitle = wTitle
, windowClass = wClass
, windowUrgent = window `elem` urgentWindows
, windowActive = Just window == activeWindow
, windowMinimized = wMinimized
}
buildWorkspaceData :: M.Map WorkspaceId Workspace
-> WorkspacesIO (M.Map WorkspaceId Workspace)
buildWorkspaceData _ = ask >>= \context -> liftX11Def M.empty $ do
names <- getWorkspaceNames
wins <- getWindows
workspaceToWindows <- getWorkspaceToWindows wins
urgentWindows <- filterM isWindowUrgent wins
activeWindow <- getActiveWindow
active:visible <- getVisibleWorkspaces
let getWorkspaceState idx ws
| idx == active = Active
| idx `elem` visible = Visible
| urgentWorkspaceState (workspacesConfig context) &&
not (null (ws `intersect` urgentWindows)) =
Urgent
| null ws = Empty
| otherwise = Hidden
foldM
(\theMap (idx, name) -> do
let ws = MM.lookup idx workspaceToWindows
windowInfos <- mapM (getWindowData activeWindow urgentWindows) ws
return $
M.insert
idx
Workspace
{ workspaceIdx = idx
, workspaceName = name
, workspaceState = getWorkspaceState idx ws
, windows = windowInfos
}
theMap)
M.empty
names
addWidgetsToTopLevel :: WorkspacesIO ()
addWidgetsToTopLevel = do
WorkspacesContext
{ controllersVar = controllersRef
, workspacesWidget = cont
} <- ask
controllersMap <- lift $ MV.readMVar controllersRef
-- Elems returns elements in ascending order of their keys so this will always
-- add the widgets in the correct order
mapM_ addWidget $ M.elems controllersMap
lift $ Gtk.widgetShowAll cont
addWidget :: WWC -> WorkspacesIO ()
addWidget controller = do
cont <- asks workspacesWidget
workspaceWidget <- getWidget controller
lift $ do
-- XXX: This hbox exists to (hopefully) prevent the issue where workspace
-- widgets appear out of order, in the switcher, by acting as an empty
-- place holder when the actual widget is hidden.
hbox <- Gtk.boxNew Gtk.OrientationHorizontal 0
void $ Gtk.widgetGetParent workspaceWidget >>=
traverse (unsafeCastTo Gtk.Box) >>=
traverse (flip Gtk.containerRemove workspaceWidget)
Gtk.containerAdd hbox workspaceWidget
Gtk.containerAdd cont hbox
workspacesNew :: WorkspacesConfig -> TaffyIO Gtk.Widget
workspacesNew cfg = ask >>= \tContext -> lift $ do
cont <- Gtk.boxNew Gtk.OrientationHorizontal $ fromIntegral (widgetGap cfg)
controllersRef <- MV.newMVar M.empty
workspacesRef <- MV.newMVar M.empty
let context =
WorkspacesContext
{ controllersVar = controllersRef
, workspacesVar = workspacesRef
, workspacesWidget = cont
, workspacesConfig = cfg
, taffyContext = tContext
}
-- This will actually create all the widgets
runReaderT updateAllWorkspaceWidgets context
updateHandler <- onWorkspaceUpdate context
iconHandler <- onIconsChanged context
let doUpdate = lift . updateHandler
handleConfigureEvents e@(ConfigureEvent {}) = doUpdate e
handleConfigureEvents _ = return ()
(workspaceSubscription, iconSubscription, geometrySubscription) <-
flip runReaderT tContext $ sequenceT
( subscribeToPropertyEvents (updateEvents cfg) $ doUpdate
, subscribeToPropertyEvents [ewmhWMIcon] (lift . onIconChanged iconHandler)
, subscribeToAll handleConfigureEvents
)
let doUnsubscribe = flip runReaderT tContext $
mapM_ unsubscribe
[ iconSubscription
, workspaceSubscription
, geometrySubscription
]
_ <- Gtk.onWidgetUnrealize cont doUnsubscribe
_ <- widgetSetClassGI cont "workspaces"
Gtk.toWidget cont
updateAllWorkspaceWidgets :: WorkspacesIO ()
updateAllWorkspaceWidgets = do
wLog DEBUG "Updating workspace widgets"
workspacesMap <- updateWorkspacesVar
wLog DEBUG $ printf "Workspaces: %s" $ show workspacesMap
wLog DEBUG "Adding and removing widgets"
updateWorkspaceControllers
let updateController' idx controller =
maybe (return controller)
(updateWidget controller . WorkspaceUpdate) $
M.lookup idx workspacesMap
logUpdateController i =
wLog DEBUG $ printf "Updating %s workspace widget" $ show i
updateController i cont = logUpdateController i >>
updateController' i cont
wLog DEBUG "Done updating individual widget"
doWidgetUpdate updateController
wLog DEBUG "Showing and hiding controllers"
setControllerWidgetVisibility
setControllerWidgetVisibility :: WorkspacesIO ()
setControllerWidgetVisibility = do
ctx@WorkspacesContext
{ workspacesVar = workspacesRef
, controllersVar = controllersRef
, workspacesConfig = cfg
} <- ask
lift $ do
workspacesMap <- MV.readMVar workspacesRef
controllersMap <- MV.readMVar controllersRef
forM_ (M.elems workspacesMap) $ \ws ->
let action = if showWorkspaceFn cfg ws
then Gtk.widgetShow
else Gtk.widgetHide
in
traverse (flip runReaderT ctx . getWidget)
(M.lookup (workspaceIdx ws) controllersMap) >>=
maybe (return ()) action
doWidgetUpdate :: (WorkspaceId -> WWC -> WorkspacesIO WWC) -> WorkspacesIO ()
doWidgetUpdate updateController = do
c@WorkspacesContext { controllersVar = controllersRef } <- ask
lift $ MV.modifyMVar_ controllersRef $ \controllers -> do
wLog DEBUG "Updating controllers ref"
controllersList <-
mapM
(\(idx, controller) -> do
newController <- runReaderT (updateController idx controller) c
return (idx, newController)) $
M.toList controllers
return $ M.fromList controllersList
updateWorkspaceControllers :: WorkspacesIO ()
updateWorkspaceControllers = do
WorkspacesContext
{ controllersVar = controllersRef
, workspacesVar = workspacesRef
, workspacesWidget = cont
, workspacesConfig = cfg
} <- ask
workspacesMap <- lift $ MV.readMVar workspacesRef
controllersMap <- lift $ MV.readMVar controllersRef
let newWorkspacesSet = M.keysSet workspacesMap
existingWorkspacesSet = M.keysSet controllersMap
when (existingWorkspacesSet /= newWorkspacesSet) $ do
let addWorkspaces = Set.difference newWorkspacesSet existingWorkspacesSet
removeWorkspaces = Set.difference existingWorkspacesSet newWorkspacesSet
builder = widgetBuilder cfg
_ <- updateVar controllersRef $ \controllers -> do
let oldRemoved = F.foldl (flip M.delete) controllers removeWorkspaces
buildController idx = builder <$> M.lookup idx workspacesMap
buildAndAddController theMap idx =
maybe (return theMap) (>>= return . flip (M.insert idx) theMap)
(buildController idx)
foldM buildAndAddController oldRemoved $ Set.toList addWorkspaces
-- Clear the container and repopulate it
lift $ Gtk.containerForeach cont (Gtk.containerRemove cont)
addWidgetsToTopLevel
rateLimitFn
:: forall req resp.
WorkspacesContext
-> (req -> IO resp)
-> ResultsCombiner req resp
-> IO (req -> IO resp)
rateLimitFn context =
let limit = (updateRateLimitMicroseconds $ workspacesConfig context)
rate = fromMicroseconds limit :: Microsecond in
generateRateLimitedFunction $ PerInvocation rate
onWorkspaceUpdate :: WorkspacesContext -> IO (Event -> IO ())
onWorkspaceUpdate context = do
rateLimited <- rateLimitFn context doUpdate combineRequests
let withLog event = do
case event of
PropertyEvent _ _ _ _ _ atom _ _ ->
wLog DEBUG $ printf "Event %s" $ show atom
_ -> return ()
void $ forkIO $ rateLimited event
return withLog
where
combineRequests _ b = Just (b, const ((), ()))
doUpdate _ = postGUIASync $ runReaderT updateAllWorkspaceWidgets context
onIconChanged :: (Set.Set X11Window -> IO ()) -> Event -> IO ()
onIconChanged handler event =
case event of
PropertyEvent { ev_window = wid } -> do
wLog DEBUG $ printf "Icon changed event %s" $ show wid
handler $ Set.singleton wid
_ -> return ()
onIconsChanged :: WorkspacesContext -> IO (Set.Set X11Window -> IO ())
onIconsChanged context = rateLimitFn context onIconsChanged' combineRequests
where
combineRequests windows1 windows2 =
Just (Set.union windows1 windows2, const ((), ()))
onIconsChanged' wids = do
wLog DEBUG $ printf "Icon update execute %s" $ show wids
postGUIASync $ flip runReaderT context $
doWidgetUpdate
(\idx c ->
wLog DEBUG (printf "Updating %s icons." $ show idx) >>
updateWidget c (IconUpdate $ Set.toList wids))
initializeWWC ::
WorkspaceWidgetController a => a -> Workspace -> ReaderT WorkspacesContext IO WWC
initializeWWC controller ws =
WWC <$> updateWidget controller (WorkspaceUpdate ws)
-- | A WrappingController can be used to wrap some child widget with another
-- abitrary widget.
data WrappingController = WrappingController
{ wrappedWidget :: Gtk.Widget
, wrappedController :: WWC
}
instance WorkspaceWidgetController WrappingController where
getWidget = lift . Gtk.toWidget . wrappedWidget
updateWidget wc update = do
updated <- updateWidget (wrappedController wc) update
return wc { wrappedController = updated }
data WorkspaceContentsController = WorkspaceContentsController
{ containerWidget :: Gtk.Widget
, contentsControllers :: [WWC]
}
buildContentsController :: [ControllerConstructor] -> ControllerConstructor
buildContentsController constructors ws = do
controllers <- mapM ($ ws) constructors
ctx <- ask
tempController <- lift $ do
cons <- Gtk.boxNew Gtk.OrientationHorizontal 0
mapM_ (flip runReaderT ctx . getWidget >=> Gtk.containerAdd cons) controllers
outerBox <- Gtk.toWidget cons >>= buildPadBox
_ <- widgetSetClassGI cons "contents"
widget <- Gtk.toWidget outerBox
return
WorkspaceContentsController
{ containerWidget = widget
, contentsControllers = controllers
}
initializeWWC tempController ws
defaultBuildContentsController :: ControllerConstructor
defaultBuildContentsController =
buildContentsController [buildLabelController, buildIconController]
bottomLeftAlignedBoxWrapper :: T.Text -> ControllerConstructor -> ControllerConstructor
bottomLeftAlignedBoxWrapper boxClass constructor ws = do
controller <- constructor ws
widget <- getWidget controller
ebox <- Gtk.eventBoxNew
_ <- widgetSetClassGI ebox boxClass
Gtk.widgetSetHalign ebox Gtk.AlignStart
Gtk.widgetSetValign ebox Gtk.AlignEnd
Gtk.containerAdd ebox widget
wrapped <- Gtk.toWidget ebox
let wrappingController = WrappingController
{ wrappedWidget = wrapped
, wrappedController = controller
}
initializeWWC wrappingController ws
buildLabelOverlayController :: ControllerConstructor
buildLabelOverlayController =
buildOverlayContentsController
[buildIconController]
[bottomLeftAlignedBoxWrapper "overlay-box" buildLabelController]
buildOverlayContentsController ::
[ControllerConstructor] -> [ControllerConstructor] -> ControllerConstructor
buildOverlayContentsController mainConstructors overlayConstructors ws = do
controllers <- mapM ($ ws) mainConstructors
overlayControllers <- mapM ($ ws) overlayConstructors
ctx <- ask
tempController <- lift $ do
mainContents <- Gtk.boxNew Gtk.OrientationHorizontal 0
mapM_ (flip runReaderT ctx . getWidget >=> Gtk.containerAdd mainContents)
controllers
outerBox <- Gtk.toWidget mainContents >>= buildPadBox
_ <- widgetSetClassGI mainContents "contents"
overlay <- Gtk.overlayNew
Gtk.containerAdd overlay outerBox
mapM_ (flip runReaderT ctx . getWidget >=>
Gtk.overlayAddOverlay overlay) overlayControllers
widget <- Gtk.toWidget overlay
return
WorkspaceContentsController
{ containerWidget = widget
, contentsControllers = controllers ++ overlayControllers
}
initializeWWC tempController ws
instance WorkspaceWidgetController WorkspaceContentsController where
getWidget = return . containerWidget
updateWidget cc update = do
WorkspacesContext {} <- ask
case update of
WorkspaceUpdate newWorkspace ->
lift $ setWorkspaceWidgetStatusClass newWorkspace $ containerWidget cc
_ -> return ()
newControllers <- mapM (`updateWidget` update) $ contentsControllers cc
return cc {contentsControllers = newControllers}
updateWidgetX11 cc update = do
newControllers <- mapM (`updateWidgetX11` update) $ contentsControllers cc
return cc {contentsControllers = newControllers}
newtype LabelController = LabelController { label :: Gtk.Label }
buildLabelController :: ControllerConstructor
buildLabelController ws = do
tempController <- lift $ do
lbl <- Gtk.labelNew Nothing
_ <- widgetSetClassGI lbl "workspace-label"
return LabelController { label = lbl }
initializeWWC tempController ws
instance WorkspaceWidgetController LabelController where
getWidget = lift . Gtk.toWidget . label
updateWidget lc (WorkspaceUpdate newWorkspace) = do
WorkspacesContext { workspacesConfig = cfg } <- ask
labelText <- labelSetter cfg newWorkspace
lift $ do
Gtk.labelSetMarkup (label lc) $ T.pack labelText
setWorkspaceWidgetStatusClass newWorkspace $ label lc
return lc
updateWidget lc _ = return lc
data IconWidget = IconWidget
{ iconContainer :: Gtk.EventBox
, iconImage :: Gtk.Image
, iconWindow :: MV.MVar (Maybe WindowData)
, iconForceUpdate :: IO ()
}
getPixbufForIconWidget :: Bool
-> MV.MVar (Maybe WindowData)
-> Int32
-> WorkspacesIO (Maybe Gdk.Pixbuf)
getPixbufForIconWidget transparentOnNone dataVar size = do
ctx <- ask
let tContext = taffyContext ctx
getPBFromData = getWindowIconPixbuf $ workspacesConfig ctx
getPB' = runMaybeT $
MaybeT (lift $ MV.readMVar dataVar) >>= MaybeT . getPBFromData size
getPB = if transparentOnNone
then maybeTCombine getPB' (Just <$> pixBufFromColor size 0)
else getPB'
lift $ runReaderT getPB tContext
buildIconWidget :: Bool -> Workspace -> WorkspacesIO IconWidget
buildIconWidget transparentOnNone ws = do
ctx <- ask
lift $ do
windowVar <- MV.newMVar Nothing
img <- Gtk.imageNew
refreshImage <-
autoSizeImage img
(flip runReaderT ctx . getPixbufForIconWidget transparentOnNone windowVar)
Gtk.OrientationHorizontal
ebox <- Gtk.eventBoxNew
_ <- widgetSetClassGI img "window-icon"
_ <- widgetSetClassGI ebox "window-icon-container"
Gtk.containerAdd ebox img
_ <-
Gtk.onWidgetButtonPressEvent ebox $
const $ liftIO $ do
info <- MV.readMVar windowVar
case info of
Just updatedInfo ->
flip runReaderT ctx $
liftX11Def () $ focusWindow $ windowId updatedInfo
_ -> liftIO $ void $ switch ctx (workspaceIdx ws)
return True
return
IconWidget
{ iconContainer = ebox
, iconImage = img
, iconWindow = windowVar
, iconForceUpdate = refreshImage
}
data IconController = IconController
{ iconsContainer :: Gtk.Box
, iconImages :: [IconWidget]
, iconWorkspace :: Workspace
}
buildIconController :: ControllerConstructor
buildIconController ws = do
tempController <-
lift $ do
hbox <- Gtk.boxNew Gtk.OrientationHorizontal 0
return
IconController
{iconsContainer = hbox, iconImages = [], iconWorkspace = ws}
initializeWWC tempController ws
instance WorkspaceWidgetController IconController where
getWidget = lift . Gtk.toWidget . iconsContainer
updateWidget ic (WorkspaceUpdate newWorkspace) = do
newImages <- updateImages ic newWorkspace
return ic { iconImages = newImages, iconWorkspace = newWorkspace }
updateWidget ic (IconUpdate updatedIcons) =
updateWindowIconsById ic updatedIcons >> return ic
updateWindowIconsById ::
IconController -> [X11Window] -> WorkspacesIO ()
updateWindowIconsById ic windowIds =
mapM_ maybeUpdateWindowIcon $ iconImages ic
where
maybeUpdateWindowIcon widget =
do
info <- lift $ MV.readMVar $ iconWindow widget
when (maybe False (flip elem windowIds . windowId) info) $
updateIconWidget ic widget info
scaledWindowIconPixbufGetter :: WindowIconPixbufGetter -> WindowIconPixbufGetter
scaledWindowIconPixbufGetter getter size =
getter size >=>
lift . traverse (scalePixbufToSize size Gtk.OrientationHorizontal)
constantScaleWindowIconPixbufGetter ::
Int32 -> WindowIconPixbufGetter -> WindowIconPixbufGetter
constantScaleWindowIconPixbufGetter constantSize getter =
const $ scaledWindowIconPixbufGetter getter constantSize
handleIconGetterException :: WindowIconPixbufGetter -> WindowIconPixbufGetter
handleIconGetterException getter =
\size windowData -> catchAny (getter size windowData) $ \e -> do
wLog WARNING $ printf "Failed to get window icon for %s: %s" (show windowData) (show e)
return Nothing
getWindowIconPixbufFromEWMH :: WindowIconPixbufGetter
getWindowIconPixbufFromEWMH = handleIconGetterException $ \size windowData ->
runX11Def Nothing (getIconPixBufFromEWMH size $ windowId windowData)
getWindowIconPixbufFromClass :: WindowIconPixbufGetter
getWindowIconPixbufFromClass = handleIconGetterException $ \size windowData ->
lift $ getWindowIconFromClasses size (windowClass windowData)
getWindowIconPixbufFromDesktopEntry :: WindowIconPixbufGetter
getWindowIconPixbufFromDesktopEntry = handleIconGetterException $ \size windowData ->
getWindowIconFromDesktopEntryByClasses size (windowClass windowData)
getWindowIconPixbufFromChrome :: WindowIconPixbufGetter
getWindowIconPixbufFromChrome _ windowData =
getPixBufFromChromeData $ windowId windowData
defaultGetWindowIconPixbuf :: WindowIconPixbufGetter
defaultGetWindowIconPixbuf =
scaledWindowIconPixbufGetter unscaledDefaultGetWindowIconPixbuf
unscaledDefaultGetWindowIconPixbuf :: WindowIconPixbufGetter
unscaledDefaultGetWindowIconPixbuf =
getWindowIconPixbufFromDesktopEntry <|||>
getWindowIconPixbufFromClass <|||>
getWindowIconPixbufFromEWMH
addCustomIconsToDefaultWithFallbackByPath
:: (WindowData -> Maybe FilePath)
-> FilePath
-> WindowIconPixbufGetter
addCustomIconsToDefaultWithFallbackByPath getCustomIconPath fallbackPath =
addCustomIconsAndFallback
getCustomIconPath
(const $ lift $ getPixbufFromFilePath fallbackPath)
unscaledDefaultGetWindowIconPixbuf
addCustomIconsAndFallback
:: (WindowData -> Maybe FilePath)
-> (Int32 -> TaffyIO (Maybe Gdk.Pixbuf))
-> WindowIconPixbufGetter
-> WindowIconPixbufGetter
addCustomIconsAndFallback getCustomIconPath fallback defaultGetter =
scaledWindowIconPixbufGetter $
getCustomIcon <|||> defaultGetter <|||> (\s _ -> fallback s)
where
getCustomIcon :: Int32 -> WindowData -> TaffyIO (Maybe Gdk.Pixbuf)
getCustomIcon _ wdata =
lift $
maybe (return Nothing) getPixbufFromFilePath $ getCustomIconPath wdata
sortWindowsByPosition :: [WindowData] -> WorkspacesIO [WindowData]
sortWindowsByPosition wins = do
let getGeometryWorkspaces w = getDisplay >>= liftIO . (`safeGetGeometry` w)
getGeometries = mapM
(forkM return
((((sel2 &&& sel3) <$>) .) getGeometryWorkspaces) .
windowId)
wins
windowGeometries <- liftX11Def [] getGeometries
let getLeftPos wd =
fromMaybe (999999999, 99999999) $ lookup (windowId wd) windowGeometries
compareWindowData a b =
compare
(windowMinimized a, getLeftPos a)
(windowMinimized b, getLeftPos b)
return $ sortBy compareWindowData wins
updateImages :: IconController -> Workspace -> WorkspacesIO [IconWidget]
updateImages ic ws = do
WorkspacesContext {workspacesConfig = cfg} <- ask
sortedWindows <- iconSort cfg $ windows ws
wLog DEBUG $ printf "Updating images for %s" (show ws)
let updateIconWidget' getImageAction wdata = do
iconWidget <- getImageAction
_ <- updateIconWidget ic iconWidget wdata
return iconWidget
existingImages = map return $ iconImages ic
buildAndAddIconWidget transparentOnNone = do
iw <- buildIconWidget transparentOnNone ws
lift $ Gtk.containerAdd (iconsContainer ic) $ iconContainer iw
return iw
infiniteImages =
existingImages ++
replicate (minIcons cfg - length existingImages)
(buildAndAddIconWidget True) ++
repeat (buildAndAddIconWidget False)
windowCount = length $ windows ws
maxNeeded = maybe windowCount (min windowCount) $ maxIcons cfg
newImagesNeeded = length existingImages < max (minIcons cfg) maxNeeded
-- XXX: Only one of the two things being zipped can be an infinite list,
-- which is why this newImagesNeeded contortion is needed.
imgSrcs =
if newImagesNeeded
then infiniteImages
else existingImages
getImgs = maybe imgSrcs (`take` imgSrcs) $ maxIcons cfg
justWindows = map Just sortedWindows
windowDatas =
if newImagesNeeded
then justWindows ++
replicate (minIcons cfg - length justWindows) Nothing
else justWindows ++ repeat Nothing
newImgs <-
zipWithM updateIconWidget' getImgs windowDatas
when newImagesNeeded $ lift $ Gtk.widgetShowAll $ iconsContainer ic
return newImgs
getWindowStatusString :: WindowData -> T.Text
getWindowStatusString windowData = T.toLower $ T.pack $
case windowData of
WindowData { windowMinimized = True } -> "minimized"
WindowData { windowActive = True } -> show Active
WindowData { windowUrgent = True } -> show Urgent
_ -> "normal"
possibleStatusStrings :: [T.Text]
possibleStatusStrings =
map
(T.toLower . T.pack)
[show Active, show Urgent, "minimized", "normal", "inactive"]
updateIconWidget
:: IconController
-> IconWidget
-> Maybe WindowData
-> WorkspacesIO ()
updateIconWidget _ IconWidget
{ iconContainer = iconButton
, iconWindow = windowRef
, iconForceUpdate = updateIcon
} windowData = do
let statusString = maybe "inactive" getWindowStatusString windowData :: T.Text
title = T.pack . windowTitle <$> windowData
setIconWidgetProperties =
updateWidgetClasses iconButton [statusString] possibleStatusStrings
void $ updateVar windowRef $ const $ return windowData
Gtk.widgetSetTooltipText iconButton title
lift $ updateIcon >> setIconWidgetProperties
data WorkspaceButtonController = WorkspaceButtonController
{ button :: Gtk.EventBox
, buttonWorkspace :: Workspace
, contentsController :: WWC
}
buildButtonController :: ParentControllerConstructor
buildButtonController contentsBuilder workspace = do
cc <- contentsBuilder workspace
workspacesRef <- asks workspacesVar
ctx <- ask
widget <- getWidget cc
lift $ do
ebox <- Gtk.eventBoxNew
Gtk.containerAdd ebox widget
Gtk.eventBoxSetVisibleWindow ebox False
_ <-
Gtk.onWidgetScrollEvent ebox $ \scrollEvent -> do
dir <- Gdk.getEventScrollDirection scrollEvent
workspaces <- liftIO $ MV.readMVar workspacesRef
let switchOne a =
liftIO $
flip runReaderT ctx $
liftX11Def
()
(switchOneWorkspace a (length (M.toList workspaces) - 1)) >>
return True
case dir of
Gdk.ScrollDirectionUp -> switchOne True
Gdk.ScrollDirectionLeft -> switchOne True
Gdk.ScrollDirectionDown -> switchOne False
Gdk.ScrollDirectionRight -> switchOne False
_ -> return False
_ <- Gtk.onWidgetButtonPressEvent ebox $ const $ switch ctx $ workspaceIdx workspace
return $
WWC
WorkspaceButtonController
{ button = ebox, buttonWorkspace = workspace, contentsController = cc }
switch :: (MonadIO m) => WorkspacesContext -> WorkspaceId -> m Bool
switch ctx idx = do
liftIO $ flip runReaderT ctx $ liftX11Def () $ switchToWorkspace idx
return True
instance WorkspaceWidgetController WorkspaceButtonController
where
getWidget wbc = lift $ Gtk.toWidget $ button wbc
updateWidget wbc update = do
newContents <- updateWidget (contentsController wbc) update
return wbc { contentsController = newContents }
| teleshoes/taffybar | src/System/Taffybar/Widget/Workspaces.hs | bsd-3-clause | 32,155 | 0 | 28 | 7,020 | 7,976 | 4,004 | 3,972 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances,
BangPatterns, ConstraintKinds #-}
module Numeric.DSDE.SDESolver where
import Numeric.DSDE.SDE.GeometricBrownian
import Numeric.DSDE.RNG
import Numeric.DSDE.SDE
import qualified System.Random.MWC as M
-- | The Euler-Maruyama solving method. Order 1/2.
data EulerMaruyama = EulerMaruyama
-- | The Milstein solving method. Order 1.
data Milstein = Milstein
-- | Type class describing a method of solving SDE problems.
-- Defined by the next value produced in a solving sequence.
class SDESolver a where
w_iplus1 :: (Monad m, SDE sde, RNGGen rng m p, Parameter p) =>
a -> sde p -> rng -> p -> p -> p -> m p
solverName :: a -> String
instance SDESolver EulerMaruyama where
{-# INLINE w_iplus1 #-}
{-# SPECIALIZE w_iplus1 :: EulerMaruyama -> GeometricBrownian Double -> M.GenIO -> Double -> Double -> Double -> IO Double #-}
w_iplus1 _ !sde !rng !t_i !w_i !deltat = getRand rng >>= \rand -> return $
w_i
+ f sde t_i w_i * deltat
+ g sde t_i w_i * deltaB rand
where deltaB r = sqrt deltat * r
solverName _ = "Euler-Maruyama"
instance SDESolver Milstein where
w_iplus1 _ !sde !rng !t_i !w_i !deltat = getRand rng >>= \rand -> return $
w_i
+ f sde t_i w_i * deltat
+ g' * deltaB rand
+ g'/2 * partgoverparty sde t_i w_i * (deltaB rand^^(2 :: Integer) - deltat)
where
deltaB r = sqrt deltat * r
g' = g sde t_i w_i
solverName _ = "Milstein"
| davnils/sde-solver | src/Numeric/DSDE/SDESolver.hs | bsd-3-clause | 1,664 | 0 | 16 | 497 | 398 | 203 | 195 | 31 | 0 |
{-# LANGUAGE TypeSynonymInstances #-}
module Kwil.Lexer where
import Data.Char
import Data.List
import Data.Maybe (listToMaybe)
import Control.Monad.Trans.State.Lazy
import Control.Monad
data Token = LowerCaseID String
| UpperCaseID String
| Equals
| LBracket
| RBracket
| LParen
| RParen
| ClassDot -- ::
| QualifierDot -- .
| CaseSeparator
| LambdaSymbol
| ArrowSymbol
| Semicolon
| Number Integer
| If
| Then
| Else
| Data
| Case
| Of
| Otherwise
| EndOfFile deriving (Read, Show, Eq)
isIDChar c = isAlphaNum c || c == '_'
-- Here's a list of reserved words that
-- cannot be used in names
reservedTokens = [(";", Semicolon),
("{", LBracket),
("}", RBracket),
("(", LParen),
(")", RParen),
("::", ClassDot),
(".", QualifierDot),
("=", Equals),
("|", CaseSeparator),
("\\", LambdaSymbol),
("->", ArrowSymbol),
("if", If),
("then", Then),
("else", Else),
("data", Data),
("case", Case),
("of", Of),
("otherwise", Otherwise)]
data KToken = KToken {
token :: Token,
location :: Location
}
instance Show KToken where
show (KToken token location) = show token
data Location = Loc {
filename :: String,
line :: Integer,
col :: Integer
}
moveRight steps loc = loc {col = col loc + toInteger steps}
-- when newlining, we return to the start of the line
moveDown steps loc = loc {col=1, line=line loc + toInteger steps}
-- utility function, drop but return count as well
dropCount f l = let (dropped, kept) = span f l in (length dropped, kept)
data ReaderData = ReaderData String Location
data InputReader a = Reader { runReader :: ReaderData -> (a, ReaderData) }
instance Functor InputReader where
fmap f reader = Reader $ \rData ->
let (result,state) = runReader reader rData in
(f result, state)
instance Applicative InputReader where
pure a = Reader $ \rData -> (a, rData)
rF <*> rA = Reader $ \rData ->
let (f, newState) = runReader rF rData
(a, newState') = runReader rA newState in (f a, newState')
instance Monad InputReader where
(>>=) (Reader firstOp) nextOpDecider = Reader $ \rData ->
-- run through the reader, and use it to find the next state
let (result, newState) = firstOp rData
nextOp = nextOpDecider result in
runReader nextOp newState
-- just get one character but don't edit the data
peek = Reader $ \rData@(ReaderData input loc) -> (listToMaybe input, rData)
-- take one character and move the cursor location
takeOne :: InputReader Char
takeOne = Reader $ \(ReaderData (i:is) loc) ->
if i == '\n' then
let newLoc = moveDown 1 loc in (i, ReaderData is newLoc)
else
let newLoc = moveRight 1 loc in (i, ReaderData is newLoc)
-- just get the current location
curLoc :: InputReader Location
curLoc = Reader $ \rData@(ReaderData _ loc) -> (loc, rData)
-- just get the current input
curInput :: InputReader String
curInput = Reader $ \rData@(ReaderData input pos) -> (input, rData)
-- full token helper: take the location and pair it with an AST token
aToken :: Token -> Location -> InputReader KToken
aToken token loc = return $ KToken token loc
-- take N tokens
takeN = flip replicateM takeOne
-- takeWhile for our reader... take values that match our preditcate
takeWhileR :: (Char -> Bool) -> InputReader String
takeWhileR predicate = do
mTop <- peek
case mTop of
Just top ->
if predicate top then do
takeOne -- take the top character off
rest <- takeWhileR predicate
return $ top:rest
else
return []
Nothing -> return []
parseFile :: String -> String -> [KToken]
parseFile input fname =
let initialLocation = Loc fname 1 1
rData = ReaderData input initialLocation
-- run the reader itself
(tokens, endState) = runReader parseKTokens rData in
tokens
parseKTokens :: InputReader [KToken]
parseKTokens = do
token <- parseKTokenM
case token of
KToken EndOfFile loc -> return [token]
_ -> do
tokens <- parseKTokens
return (token:tokens)
parseKTokenM :: InputReader KToken
parseKTokenM = do
maybeFirstChar <- peek
loc <- curLoc
case maybeFirstChar of
Nothing -> aToken EndOfFile loc
Just _ -> parseReserved
parseReserved :: InputReader KToken
parseReserved = do
input <- curInput
-- check for each reserved token whether the token representation
-- is a prefix of the input
case find (flip isPrefixOf input . fst) reservedTokens of
Just (tokenRepr, astToken) -> do
tokenLoc <- curLoc
takeN (length tokenRepr) -- pop the tokens
aToken astToken tokenLoc
Nothing -> parseIDs
parseIDs = do
(Just fstChar) <- peek -- we've previously checked for non-emptiness
loc <- curLoc
if isLower fstChar then do
lowerID <- takeWhileR isIDChar
aToken (LowerCaseID lowerID) loc
else if isUpper fstChar then do
upperID <- takeWhileR isIDChar
aToken (UpperCaseID upperID) loc
else if isDigit fstChar then do
digits <- takeWhileR isDigit
aToken (Number (read digits)) loc
else if isSpace fstChar then do
parseSpaces
parseKTokenM
else error $ " Lexer token error : token was <<" ++ (fstChar:">>")
parseSpaces = takeWhileR isSpace
| rtpg/kwil | Kwil/Lexer.hs | bsd-3-clause | 5,667 | 0 | 16 | 1,637 | 1,611 | 866 | 745 | 147 | 5 |
{-# LANGUAGE
FlexibleInstances
, TypeSynonymInstances
#-}
module Data.String.ToString (ToString (..)) where
import Data.CaseInsensitive (CI, foldedCase)
import qualified Data.ByteString.Lazy.UTF8 as LBU
import qualified Data.ByteString.UTF8 as SBU
import qualified Data.Text as ST
import qualified Data.Text.Lazy as LT
class ToString a where
toString :: a -> String
instance ToString String where
toString = id
instance ToString SBU.ByteString where
toString = SBU.toString
instance ToString LBU.ByteString where
toString = LBU.toString
instance ToString ST.Text where
toString = ST.unpack
instance ToString LT.Text where
toString = LT.unpack
instance ToString s => ToString (CI s) where
toString = toString . foldedCase
| silkapp/tostring | src/Data/String/ToString.hs | bsd-3-clause | 783 | 0 | 7 | 154 | 195 | 115 | 80 | 23 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Duration.UK.Corpus
( corpus
, negativeCorpus
) where
import Prelude
import Data.String
import Duckling.Duration.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
import Duckling.TimeGrain.Types (Grain(..))
context :: Context
context = testContext {locale = makeLocale UK Nothing}
corpus :: Corpus
corpus = (context, testOptions, allExamples)
negativeCorpus :: NegativeCorpus
negativeCorpus = (context, testOptions, examples)
where
examples =
[ "в дні"
, "секретар"
, "хвилини"
]
allExamples :: [Example]
allExamples = concat
[ examples (DurationData 1 Second)
[ "1 секунда"
, "одна сек"
]
, examples (DurationData 2 Minute)
[ "2 хв"
, "дві хвилини"
]
, examples (DurationData 30 Day)
[ "30 днів"
]
, examples (DurationData 7 Week)
[ "сім тижнів"
]
, examples (DurationData 1 Month)
[ "1 місяць"
]
, examples (DurationData 2 Year)
[ "2 роки"
]
, examples (DurationData 30 Minute)
[ "півгодини"
, "1/2 години"
]
, examples (DurationData 12 Hour)
[ "пів дня"
]
, examples (DurationData 90 Minute)
[ "півтори години"
]
, examples (DurationData 27 Month)
[ "2 роки і 3 місяці"
, "2 роки, 3 місяці"
]
, examples (DurationData 31719604 Second)
[ "1 рік, 2 дня, 3 години і 4 секунди"
]
]
| facebookincubator/duckling | Duckling/Duration/UK/Corpus.hs | bsd-3-clause | 2,019 | 0 | 9 | 636 | 381 | 220 | 161 | 49 | 1 |
-- JavaScript Contract Compiler
module Main where
import System.Console.GetOpt
import System.Environment
import System.Directory
import System.FilePath
import System.Exit
import Control.Monad
import BrownPLT.JavaScript.Contracts
import Paths_JsContracts -- created by Cabal
import BrownPLT.JavaScript.Parser (parseJavaScriptFromFile)
import Data.List
data Flag
= Help
| Release
| Debug
| Namespace String
| Interface String
| NoExport
deriving (Eq,Ord,Show)
options :: [ OptDescr Flag ]
options =
[ Option ['h'] ["help"] (NoArg Help)
"display this help message"
, Option ['r'] ["release"] (NoArg Release)
"encapsulate, ignoring all contracts"
, Option ['d'] ["debug"] (NoArg Debug)
"enable contracts and encapsulate (default)"
, Option ['n'] ["namespace"] (ReqArg Namespace "NAMESPACE")
"exports names to the namespace"
, Option [] ["no-export"] (NoArg NoExport)
"do not export names to the global object"
, Option ['i'] ["interface"] (ReqArg Interface "PATH")
"path to the interface; uses module.jsi by default"
]
usage = usageInfo
"Usage: jscc [options] module.js\nOptions:\n" options
main = do
args <- getArgs
dataDir <- getDataDir
let (opts', nonOpts, errors) = getOpt Permute options args
let opts = sort opts'
unless (null errors) $ do
mapM_ putStrLn errors
fail "jscc terminated"
checkHelp opts
(isDebugMode, opts) <- getDebugMode opts
(namespace, opts) <- getNamespace opts
(ifacePath, opts) <- getInterfacePath opts nonOpts
(isExport, opts) <- getExportGlobals opts
when (not $ null opts) $ do
putStrLn $ "spurious arguments: " ++ (show opts)
fail "jscc terminated"
case nonOpts of
[implPath] -> do
checkFile implPath
rawImpl <- readFile implPath
let boilerplatePath = dataDir </> "contracts.js"
rawBoilerplate <- readFile boilerplatePath
interface <- parseInterface ifacePath
let result = if isDebugMode
then compileFormatted rawImpl implPath rawBoilerplate
isExport interface
else compileRelease rawImpl implPath rawBoilerplate
isExport interface namespace
putStrLn result
return ()
otherwise -> do
putStrLn "expected a single filename.js"
fail "jscc terminated"
checkFile path = do
exists <- doesFileExist path
unless exists $ do
putStrLn $ "could not find the file: " ++ path
exitFailure
getDebugMode (Release:rest) = return (False,rest)
getDebugMode (Debug:rest) = return (True,rest)
getDebugMode rest = return (True,rest)
getNamespace ((Namespace s):rest) = return (Just s, rest)
getNamespace rest = return (Nothing,rest)
checkHelp (Help:_) = do
putStrLn usage
exitSuccess
checkHelp _ = return ()
getExportGlobals (NoExport:rest) = return (False, rest)
getExportGlobals rest = return (True, rest)
getInterfacePath :: [Flag] -> [String] -> IO (FilePath,[Flag])
getInterfacePath (Interface path:rest) _ = do
checkFile path
return (path,rest)
getInterfacePath rest (implPath:_) = do
let path = addExtension (dropExtension implPath) "jsi"
checkFile path
return (path,rest)
getInterfacePath _ [] = do
putStrLn "Invalid arguments (use -h for help)"
exitFailure
| brownplt/javascript-contracts | src/Jscc.hs | bsd-3-clause | 3,294 | 0 | 16 | 721 | 996 | 499 | 497 | 95 | 3 |
{-|
Module : Numeric.ER.ShowHTML
Description : Misc facilities for HTML rendering.
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Numeric.ER.ShowHTML where
import qualified Text.Html as H
import Text.Regex
{-|
Render HTML is a way that can be inlined in
Javascript strings etc.
-}
showHTML ::
(H.HTML t) =>
t -> String
showHTML v =
escapeNewLines $
renderHtmlNoHeader $
H.toHtml v
where
-- stripHeader s =
-- (splitRegex (mkRegex "-->") s) !! 1
escapeNewLines s =
(subRegex (mkRegex "([^\\])$") s "\\1\\\\")
abovesTable attrs cells =
H.table H.! attrs H.<< (H.aboves $ map (H.td H.<<) cells)
besidesTable attrs cells =
H.table H.! attrs H.<< (H.aboves [H.besides $ map (H.td H.<<) cells])
renderHtmlNoHeader :: H.Html -> String
renderHtmlNoHeader theHtml =
foldr (.) id (map (H.renderHtml' 0)
(H.getHtmlElements theHtml)) "\n"
toHtmlDefault :: (Show a) => a -> H.Html
toHtmlDefault = H.toHtml . show
instance (H.HTML a) => H.HTML (Maybe a) where
toHtml Nothing = H.toHtml $ "[Nothing]"
toHtml (Just a) = H.toHtml a
| michalkonecny/polypaver | src/Numeric/ER/ShowHTML.hs | bsd-3-clause | 1,285 | 0 | 12 | 349 | 336 | 179 | 157 | 25 | 1 |
module BuildParseTests (test) where
import Blaze.ByteString.Builder (Builder, toByteString)
import Data.ByteString (ByteString)
import Data.Attoparsec.ByteString (Parser, parseOnly, endOfInput)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Network.SPDY.Frames
import Network.SPDY.Internal.Deserialize
import Network.SPDY.Internal.Serialize
import Instances ()
test :: Test
test = testGroup "Build-parse tests"
[ testProperty "Raw frame header" prop_buildParseRawFrameHeader
, testProperty "DataLength" prop_buildParseDataLength
, testProperty "StreamID" prop_buildParseStreamID
, testProperty "HeaderCount" prop_buildParseHeaderCount
, testProperty "HeaderName" prop_buildParseHeaderName
, testProperty "HeaderValue" prop_buildParseHeaderValue
, testProperty "Priority" prop_buildParsePriority
, testProperty "Slot" prop_buildParseSlot
, testProperty "TerminationStatus" prop_buildParseTerminationStatus
, testProperty "SettingID" prop_buildParseSettingID
, testProperty "SettingValue" prop_buildParseSettingValue
, testProperty "SettingIDAndFlags" prop_buildParseSettingIDAndFlags
, testProperty "PingID" prop_buildParsePingID
, testProperty "GoAwayStatus" prop_buildParseGoAwayStatus
, testProperty "DeltaWindowSize" prop_buildParseDeltaWindowSize
, testProperty "Slot16" prop_buildParseSlot16
, testProperty "Proof" prop_buildParseProof
, testProperty "Certificate" prop_buildParseCertificate ]
prop_buildParseRawFrameHeader :: RawFrameHeader -> Bool
prop_buildParseRawFrameHeader = prop_buildParse rawHeaderBuilder parseFrameHeader
prop_buildParseDataLength :: DataLength -> Bool
prop_buildParseDataLength = prop_buildParse toBuilder parseDataLength
prop_buildParseStreamID :: StreamID -> Bool
prop_buildParseStreamID = prop_buildParse toBuilder parseStreamID
prop_buildParsePriority :: Priority -> Bool
prop_buildParsePriority = prop_buildParse toBuilder parsePriority
prop_buildParseSlot :: Slot -> Bool
prop_buildParseSlot = prop_buildParse toBuilder parseSlot
prop_buildParseHeaderCount :: HeaderCount -> Bool
prop_buildParseHeaderCount = prop_buildParse toBuilder parseHeaderCount
prop_buildParseHeaderName :: HeaderName -> Bool
prop_buildParseHeaderName = prop_buildParse toBuilder parseHeaderName
prop_buildParseHeaderValue :: HeaderValue -> Bool
prop_buildParseHeaderValue = prop_buildParse toBuilder parseHeaderValue
prop_buildParseTerminationStatus :: TerminationStatus -> Bool
prop_buildParseTerminationStatus = prop_buildParse toBuilder parseTerminationStatus
prop_buildParseSettingID :: SettingID -> Bool
prop_buildParseSettingID = prop_buildParse toBuilder parseSettingID
prop_buildParseSettingValue :: SettingValue -> Bool
prop_buildParseSettingValue = prop_buildParse toBuilder parseSettingValue
prop_buildParseSettingIDAndFlags :: SettingIDAndFlags -> Bool
prop_buildParseSettingIDAndFlags = prop_buildParse toBuilder parseSettingIDAndFlags
prop_buildParsePingID :: PingID -> Bool
prop_buildParsePingID = prop_buildParse toBuilder parsePingID
prop_buildParseGoAwayStatus :: GoAwayStatus -> Bool
prop_buildParseGoAwayStatus = prop_buildParse toBuilder parseGoAwayStatus
prop_buildParseDeltaWindowSize :: DeltaWindowSize -> Bool
prop_buildParseDeltaWindowSize = prop_buildParse toBuilder parseDeltaWindowSize
prop_buildParseSlot16 :: Slot16 -> Bool
prop_buildParseSlot16 = prop_buildParse toBuilder parseSlot16
prop_buildParseProof :: Proof -> Bool
prop_buildParseProof = prop_buildParse toBuilder parseProof
prop_buildParseCertificate :: Certificate -> Bool
prop_buildParseCertificate = prop_buildParse toBuilder parseCertificate
prop_buildParse :: Eq a => (a -> Builder) -> Parser a -> a -> Bool
prop_buildParse builderFor parser =
prop_serializeParse (toByteString . builderFor) (parseOnly parser')
where parser' = do r <- parser; endOfInput; return r
prop_serializeParse :: Eq a => (a -> ByteString) -> (ByteString -> Either String a) -> a -> Bool
prop_serializeParse serialize parse x =
either (const False) (x ==) $ parse $ serialize x
| kcharter/spdy-base | test/BuildParseTests.hs | bsd-3-clause | 4,180 | 0 | 10 | 522 | 770 | 410 | 360 | 73 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE PartialTypeSignatures #-}
module Language.CodeGen.LLVM where
import Prelude hiding (init)
import Control.Monad.State
import Data.Foldable(toList)
import Data.Sequence (Seq,(<|),(|>),(><))
import qualified Data.Sequence as S
import LLVM.General.AST (Module,Definition,BasicBlock,Named,Instruction)
import qualified LLVM.General.AST as L
import qualified LLVM.General.AST.Linkage as Linkage
import qualified LLVM.General.AST.Visibility as Visibility
import qualified LLVM.General.AST.CallingConvention as CallingConvention
import qualified LLVM.General.AST.Type as Type
import qualified LLVM.General.AST.Constant as Constant
import qualified LLVM.General.AST.Float as Float
import Language.SimpleExpression (SimpleExpr)
import qualified Language.SimpleExpression as Simple
import Language.Expression (Expr)
import qualified Language.Expression as Full
import Language.Function
data Two = One | Two
instance Show Two where
show One = "1"
show Two = "2"
data GroundExpr a where
Var :: Seq Two -> GroundExpr Double
Const :: Double -> GroundExpr Double
App :: GroundExpr (a -> b) -> GroundExpr a -> GroundExpr b
Fun :: Fun (a -> b) -> GroundExpr (a -> b)
deriving instance Show (GroundExpr a)
data Assignment where
Assignment :: Seq Two -> Maybe Double -> GroundExpr Double -> Assignment
deriving instance Show Assignment
type Assignments = Seq Assignment
compile :: SynArrow SimpleExpr Function b Double -> Module
compile f = case optimize f of
LoopD i (Arr (Function g)) -> modul "test" $ assignments (inj (S.Const 0) i) $ g E.Var
compile' :: SynArrow SimpleExpr Function b Double -> IO (Either String String)
compile' s = L.withContext $ \ctx ->
runExceptT $ L.withModuleFromAST ctx (compile s) $ \m ->
L.moduleLLVMAssembly m
modul :: String -> Assignments -> Module
modul n ass = L.defaultModule
{ L.moduleName = n
, L.moduleDefinitions = return $ mainFunction ass
}
mainFunction :: Assignments -> Definition
mainFunction ass =
L.GlobalDefinition $ L.Function Linkage.Internal
Visibility.Default
Nothing
CallingConvention.Fast
[]
(Type.IntegerType 32)
(L.Name "main")
([],False)
[]
Nothing
Nothing
0
Nothing
Nothing
$ mainBody ass
mainBody :: Assignments -> [BasicBlock]
mainBody as =
let cgs = execState (genInstructions as) empty
in [ L.BasicBlock
(L.Name "init")
(toList $ initBlock cgs)
(L.Do (L.Br (L.Name "loop") []))
, L.BasicBlock
(L.Name "loop")
(toList $ loopBlock cgs)
(L.Do (L.Br (L.Name "loop") []))
]
genInstructions :: Seq Assignment -> Gen ()
genInstructions as = do
forM_ as $ \(Assignment to init e) -> do
case init of
Just c -> initInst $ initName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.ConstantOperand (Constant.Float (Float.Double c))) []
Nothing -> initInst $ initName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.ConstantOperand (Constant.Float (Float.Double 0))) []
lastName <- loopExpr e
loopInst $ loopName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.LocalReference Type.double lastName) []
where
loopExpr :: GroundExpr Double -> Gen L.Name
loopExpr expr = case expr of
Var n -> do
let lx = loopName n
ix = initName n
loopInst $ lx L.:= L.Phi Type.double [ (L.LocalReference Type.double ix, L.Name "init")
, (L.LocalReference Type.double lx, L.Name "loop")
] []
return lx
Const c -> do
x <- fresh
loopInst $ x L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double c))) (L.ConstantOperand (Constant.Float (Float.Double c))) []
return x
GFun2 Add e1 e2 -> app2 L.Add e1 e2
GFun2 Mult e1 e2 -> app2 L.Mul e1 e2
GFun2 Sub e1 e2 -> app2 L.Sub e1 e2
app2 :: Op2 -> GroundExpr Double -> GroundExpr Double -> Gen L.Name
app2 f e1 e2 = do
x1 <- loopExpr e1
x2 <- loopExpr e2
x3 <- fresh
loopInst $ x3 L.:= f False False (L.LocalReference Type.double x1) (L.LocalReference Type.double x2) []
return x3
type Op2 = Bool -> Bool -> L.Operand -> L.Operand -> L.InstructionMetadata -> Instruction
initName :: Seq Two -> L.Name
initName ids = L.Name $ 'i' : (concat (toList (fmap show ids)))
loopName :: Seq Two -> L.Name
loopName ids = L.Name $ 'l' : (concat (toList (fmap show ids)))
data CodeGenState = CodeGenState
{ initBlock :: Seq (Named Instruction)
, loopBlock :: Seq (Named Instruction)
, nextVariable :: Word
}
empty :: CodeGenState
empty = CodeGenState S.empty S.empty 0
type Gen = State CodeGenState
initInst :: Named Instruction -> Gen ()
initInst inst = modify $ \cgs -> cgs {initBlock = initBlock cgs |> inst}
loopInst :: Named Instruction -> Gen ()
loopInst inst = modify $ \cgs -> cgs {loopBlock = loopBlock cgs |> inst}
fresh :: Gen L.Name
fresh = state (\cgs -> let i = nextVariable cgs in (L.UnName i, cgs {nextVariable = i+1}))
assignments :: SimpleExpr e -> Expr e -> Seq Assignment
assignments prod0 = go S.empty prod0 . Full.optimizeExpr
where
go :: Seq Two -> SimpleExpr e -> Expr e -> Seq Assignment
go to prod expr = case (prod,expr) of
(Simple.Inj p1 p2,Full.Inj e1 e2) ->
go (to |> One) p1 e1 >< go (to |> Two) p2 e2
(Simple.Const c,e1) -> ass to (Just c) e1
(Simple.Unit,e1) -> ass to Nothing e1
_ -> error "cannot happen"
ass to p expr = case lowerExpr expr of
Just e -> return $ Assignment to p e
Nothing -> error "Expr is not ground."
pattern Fun1 f e1 = Full.App (Full.Fun f) e1
pattern GFun1 f e1 = App (Fun f) e1
pattern Fun2 f e1 e2 = Full.App (Full.App (Full.Fun f) e1) e2
pattern GFun2 f e1 e2 = App (App (Fun f) e1) e2
lowerExpr :: Expr a -> Maybe (GroundExpr Double)
lowerExpr = go S.empty
where
go :: Seq Two -> Expr a -> Maybe (GroundExpr Double)
go addr expr = case expr of
Full.Proj1 e1 -> go (One <| addr) e1
Full.Proj2 e1 -> go (Two <| addr) e1
Full.Var -> Just $ Var addr
Full.Const d -> Just $ Const d
Fun2 Add e1 e2 -> GFun2 Add <$> go addr e1 <*> go addr e2
Fun2 Mult e1 e2 -> GFun2 Mult <$> go addr e1 <*> go addr e2
Fun2 Sub e1 e2 -> GFun2 Sub <$> go addr e1 <*> go addr e2
Fun2 Div e1 e2 -> GFun2 Div <$> go addr e1 <*> go addr e2
Fun1 Abs e1 -> GFun1 Abs <$> go addr e1
Fun1 Signum e1 -> GFun1 Signum <$> go addr e1
Fun1 Sin e1 -> GFun1 Sin <$> go addr e1
Fun1 Cos e1 -> GFun1 Cos <$> go addr e1
Full.Fun _ -> Nothing
Full.Inj _ _ -> Nothing
Full.App _ _ -> Nothing
| svenkeidel/hsynth | src/Language/CodeGen/LLVM.hs | bsd-3-clause | 7,098 | 0 | 22 | 1,770 | 2,768 | 1,391 | 1,377 | 163 | 15 |
{-# OPTIONS_GHC -w #-}
module Parser where
import AST
import Lexer
-- parser produced by Happy Version 1.18.10
data HappyAbsSyn t4 t5 t6 t7 t8 t9 t10 t11 t12 t13 t14 t15 t16 t17 t18
= HappyTerminal (Token)
| HappyErrorToken Int
| HappyAbsSyn4 t4
| HappyAbsSyn5 t5
| HappyAbsSyn6 t6
| HappyAbsSyn7 t7
| HappyAbsSyn8 t8
| HappyAbsSyn9 t9
| HappyAbsSyn10 t10
| HappyAbsSyn11 t11
| HappyAbsSyn12 t12
| HappyAbsSyn13 t13
| HappyAbsSyn14 t14
| HappyAbsSyn15 t15
| HappyAbsSyn16 t16
| HappyAbsSyn17 t17
| HappyAbsSyn18 t18
action_0 (19) = happyShift action_3
action_0 (4) = happyGoto action_4
action_0 (5) = happyGoto action_2
action_0 _ = happyFail
action_1 (19) = happyShift action_3
action_1 (5) = happyGoto action_2
action_1 _ = happyFail
action_2 (19) = happyShift action_9
action_2 (6) = happyGoto action_8
action_2 _ = happyReduce_4
action_3 (22) = happyShift action_6
action_3 (59) = happyShift action_7
action_3 (18) = happyGoto action_5
action_3 _ = happyFail
action_4 (61) = happyAccept
action_4 _ = happyFail
action_5 (57) = happyShift action_11
action_5 _ = happyFail
action_6 _ = happyReduce_55
action_7 _ = happyReduce_54
action_8 _ = happyReduce_1
action_9 (22) = happyShift action_6
action_9 (59) = happyShift action_7
action_9 (18) = happyGoto action_10
action_9 _ = happyFail
action_10 (57) = happyShift action_13
action_10 _ = happyFail
action_11 (20) = happyShift action_12
action_11 _ = happyFail
action_12 (21) = happyShift action_15
action_12 _ = happyFail
action_13 (7) = happyGoto action_14
action_13 _ = happyReduce_5
action_14 (20) = happyShift action_20
action_14 (22) = happyShift action_6
action_14 (33) = happyShift action_21
action_14 (34) = happyShift action_22
action_14 (59) = happyShift action_7
action_14 (8) = happyGoto action_17
action_14 (11) = happyGoto action_18
action_14 (18) = happyGoto action_19
action_14 _ = happyReduce_8
action_15 (31) = happyShift action_16
action_15 _ = happyFail
action_16 (22) = happyShift action_27
action_16 _ = happyFail
action_17 (58) = happyShift action_26
action_17 _ = happyFail
action_18 (22) = happyShift action_6
action_18 (59) = happyShift action_7
action_18 (18) = happyGoto action_25
action_18 _ = happyFail
action_19 _ = happyReduce_16
action_20 (22) = happyShift action_6
action_20 (33) = happyShift action_21
action_20 (34) = happyShift action_22
action_20 (59) = happyShift action_7
action_20 (11) = happyGoto action_24
action_20 (18) = happyGoto action_19
action_20 _ = happyFail
action_21 (55) = happyShift action_23
action_21 _ = happyReduce_15
action_22 _ = happyReduce_14
action_23 (56) = happyShift action_32
action_23 _ = happyFail
action_24 (22) = happyShift action_6
action_24 (59) = happyShift action_7
action_24 (18) = happyGoto action_31
action_24 _ = happyFail
action_25 (52) = happyShift action_30
action_25 _ = happyFail
action_26 (19) = happyShift action_9
action_26 (6) = happyGoto action_29
action_26 _ = happyReduce_4
action_27 (53) = happyShift action_28
action_27 _ = happyFail
action_28 (32) = happyShift action_34
action_28 _ = happyFail
action_29 _ = happyReduce_3
action_30 _ = happyReduce_6
action_31 (53) = happyShift action_33
action_31 _ = happyFail
action_32 _ = happyReduce_13
action_33 (22) = happyShift action_6
action_33 (33) = happyShift action_21
action_33 (34) = happyShift action_22
action_33 (59) = happyShift action_7
action_33 (9) = happyGoto action_36
action_33 (11) = happyGoto action_37
action_33 (18) = happyGoto action_19
action_33 _ = happyReduce_10
action_34 (55) = happyShift action_35
action_34 _ = happyFail
action_35 (56) = happyShift action_40
action_35 _ = happyFail
action_36 (54) = happyShift action_39
action_36 _ = happyFail
action_37 (22) = happyShift action_6
action_37 (59) = happyShift action_7
action_37 (18) = happyGoto action_38
action_37 _ = happyFail
action_38 (50) = happyShift action_44
action_38 (10) = happyGoto action_43
action_38 _ = happyReduce_12
action_39 (57) = happyShift action_42
action_39 _ = happyFail
action_40 (22) = happyShift action_6
action_40 (59) = happyShift action_7
action_40 (18) = happyGoto action_41
action_40 _ = happyFail
action_41 (54) = happyShift action_47
action_41 _ = happyFail
action_42 (7) = happyGoto action_46
action_42 _ = happyReduce_5
action_43 _ = happyReduce_9
action_44 (22) = happyShift action_6
action_44 (33) = happyShift action_21
action_44 (34) = happyShift action_22
action_44 (59) = happyShift action_7
action_44 (11) = happyGoto action_45
action_44 (18) = happyGoto action_19
action_44 _ = happyFail
action_45 (22) = happyShift action_6
action_45 (59) = happyShift action_7
action_45 (18) = happyGoto action_56
action_45 _ = happyFail
action_46 (22) = happyShift action_6
action_46 (25) = happyShift action_52
action_46 (27) = happyShift action_53
action_46 (28) = happyShift action_54
action_46 (33) = happyShift action_21
action_46 (34) = happyShift action_22
action_46 (57) = happyShift action_55
action_46 (59) = happyShift action_7
action_46 (11) = happyGoto action_18
action_46 (12) = happyGoto action_49
action_46 (13) = happyGoto action_50
action_46 (18) = happyGoto action_51
action_46 _ = happyReduce_25
action_47 (57) = happyShift action_48
action_47 _ = happyFail
action_48 (7) = happyGoto action_67
action_48 _ = happyReduce_5
action_49 (22) = happyShift action_6
action_49 (25) = happyShift action_52
action_49 (27) = happyShift action_53
action_49 (28) = happyShift action_54
action_49 (57) = happyShift action_55
action_49 (59) = happyShift action_7
action_49 (12) = happyGoto action_49
action_49 (13) = happyGoto action_66
action_49 (18) = happyGoto action_59
action_49 _ = happyReduce_25
action_50 (24) = happyShift action_65
action_50 _ = happyFail
action_51 (49) = happyShift action_63
action_51 (55) = happyShift action_64
action_51 _ = happyReduce_16
action_52 (53) = happyShift action_62
action_52 _ = happyFail
action_53 (53) = happyShift action_61
action_53 _ = happyFail
action_54 (53) = happyShift action_60
action_54 _ = happyFail
action_55 (22) = happyShift action_6
action_55 (25) = happyShift action_52
action_55 (27) = happyShift action_53
action_55 (28) = happyShift action_54
action_55 (57) = happyShift action_55
action_55 (59) = happyShift action_7
action_55 (12) = happyGoto action_49
action_55 (13) = happyGoto action_58
action_55 (18) = happyGoto action_59
action_55 _ = happyReduce_25
action_56 (50) = happyShift action_44
action_56 (10) = happyGoto action_57
action_56 _ = happyReduce_12
action_57 _ = happyReduce_11
action_58 (58) = happyShift action_84
action_58 _ = happyFail
action_59 (49) = happyShift action_63
action_59 (55) = happyShift action_64
action_59 _ = happyFail
action_60 (22) = happyShift action_6
action_60 (23) = happyShift action_72
action_60 (30) = happyShift action_73
action_60 (35) = happyShift action_74
action_60 (36) = happyShift action_75
action_60 (37) = happyShift action_76
action_60 (53) = happyShift action_77
action_60 (59) = happyShift action_7
action_60 (60) = happyShift action_78
action_60 (14) = happyGoto action_83
action_60 (15) = happyGoto action_70
action_60 (18) = happyGoto action_71
action_60 _ = happyFail
action_61 (22) = happyShift action_6
action_61 (23) = happyShift action_72
action_61 (30) = happyShift action_73
action_61 (35) = happyShift action_74
action_61 (36) = happyShift action_75
action_61 (37) = happyShift action_76
action_61 (53) = happyShift action_77
action_61 (59) = happyShift action_7
action_61 (60) = happyShift action_78
action_61 (14) = happyGoto action_82
action_61 (15) = happyGoto action_70
action_61 (18) = happyGoto action_71
action_61 _ = happyFail
action_62 (22) = happyShift action_6
action_62 (23) = happyShift action_72
action_62 (30) = happyShift action_73
action_62 (35) = happyShift action_74
action_62 (36) = happyShift action_75
action_62 (37) = happyShift action_76
action_62 (53) = happyShift action_77
action_62 (59) = happyShift action_7
action_62 (60) = happyShift action_78
action_62 (14) = happyGoto action_81
action_62 (15) = happyGoto action_70
action_62 (18) = happyGoto action_71
action_62 _ = happyFail
action_63 (22) = happyShift action_6
action_63 (23) = happyShift action_72
action_63 (30) = happyShift action_73
action_63 (35) = happyShift action_74
action_63 (36) = happyShift action_75
action_63 (37) = happyShift action_76
action_63 (53) = happyShift action_77
action_63 (59) = happyShift action_7
action_63 (60) = happyShift action_78
action_63 (14) = happyGoto action_80
action_63 (15) = happyGoto action_70
action_63 (18) = happyGoto action_71
action_63 _ = happyFail
action_64 (22) = happyShift action_6
action_64 (23) = happyShift action_72
action_64 (30) = happyShift action_73
action_64 (35) = happyShift action_74
action_64 (36) = happyShift action_75
action_64 (37) = happyShift action_76
action_64 (53) = happyShift action_77
action_64 (59) = happyShift action_7
action_64 (60) = happyShift action_78
action_64 (14) = happyGoto action_79
action_64 (15) = happyGoto action_70
action_64 (18) = happyGoto action_71
action_64 _ = happyFail
action_65 (22) = happyShift action_6
action_65 (23) = happyShift action_72
action_65 (30) = happyShift action_73
action_65 (35) = happyShift action_74
action_65 (36) = happyShift action_75
action_65 (37) = happyShift action_76
action_65 (53) = happyShift action_77
action_65 (59) = happyShift action_7
action_65 (60) = happyShift action_78
action_65 (14) = happyGoto action_69
action_65 (15) = happyGoto action_70
action_65 (18) = happyGoto action_71
action_65 _ = happyFail
action_66 _ = happyReduce_24
action_67 (22) = happyShift action_6
action_67 (25) = happyShift action_52
action_67 (27) = happyShift action_53
action_67 (28) = happyShift action_54
action_67 (33) = happyShift action_21
action_67 (34) = happyShift action_22
action_67 (57) = happyShift action_55
action_67 (59) = happyShift action_7
action_67 (11) = happyGoto action_18
action_67 (12) = happyGoto action_49
action_67 (13) = happyGoto action_68
action_67 (18) = happyGoto action_51
action_67 _ = happyReduce_25
action_68 (58) = happyShift action_108
action_68 _ = happyFail
action_69 (38) = happyShift action_85
action_69 (39) = happyShift action_86
action_69 (40) = happyShift action_87
action_69 (41) = happyShift action_88
action_69 (42) = happyShift action_89
action_69 (43) = happyShift action_90
action_69 (44) = happyShift action_91
action_69 (45) = happyShift action_92
action_69 (46) = happyShift action_93
action_69 (47) = happyShift action_94
action_69 (48) = happyShift action_95
action_69 (52) = happyShift action_107
action_69 _ = happyFail
action_70 (51) = happyShift action_105
action_70 (55) = happyShift action_106
action_70 _ = happyReduce_41
action_71 _ = happyReduce_43
action_72 (22) = happyShift action_6
action_72 (33) = happyShift action_104
action_72 (59) = happyShift action_7
action_72 (18) = happyGoto action_103
action_72 _ = happyFail
action_73 _ = happyReduce_44
action_74 _ = happyReduce_38
action_75 _ = happyReduce_39
action_76 (22) = happyShift action_6
action_76 (23) = happyShift action_72
action_76 (30) = happyShift action_73
action_76 (35) = happyShift action_74
action_76 (36) = happyShift action_75
action_76 (37) = happyShift action_76
action_76 (53) = happyShift action_77
action_76 (59) = happyShift action_7
action_76 (60) = happyShift action_78
action_76 (14) = happyGoto action_102
action_76 (15) = happyGoto action_70
action_76 (18) = happyGoto action_71
action_76 _ = happyFail
action_77 (22) = happyShift action_6
action_77 (23) = happyShift action_72
action_77 (30) = happyShift action_73
action_77 (35) = happyShift action_74
action_77 (36) = happyShift action_75
action_77 (37) = happyShift action_76
action_77 (53) = happyShift action_77
action_77 (59) = happyShift action_7
action_77 (60) = happyShift action_78
action_77 (14) = happyGoto action_101
action_77 (15) = happyGoto action_70
action_77 (18) = happyGoto action_71
action_77 _ = happyFail
action_78 _ = happyReduce_37
action_79 (38) = happyShift action_85
action_79 (39) = happyShift action_86
action_79 (40) = happyShift action_87
action_79 (41) = happyShift action_88
action_79 (42) = happyShift action_89
action_79 (43) = happyShift action_90
action_79 (44) = happyShift action_91
action_79 (45) = happyShift action_92
action_79 (46) = happyShift action_93
action_79 (47) = happyShift action_94
action_79 (48) = happyShift action_95
action_79 (56) = happyShift action_100
action_79 _ = happyFail
action_80 (38) = happyShift action_85
action_80 (39) = happyShift action_86
action_80 (40) = happyShift action_87
action_80 (41) = happyShift action_88
action_80 (42) = happyShift action_89
action_80 (43) = happyShift action_90
action_80 (44) = happyShift action_91
action_80 (45) = happyShift action_92
action_80 (46) = happyShift action_93
action_80 (47) = happyShift action_94
action_80 (48) = happyShift action_95
action_80 (52) = happyShift action_99
action_80 _ = happyFail
action_81 (38) = happyShift action_85
action_81 (39) = happyShift action_86
action_81 (40) = happyShift action_87
action_81 (41) = happyShift action_88
action_81 (42) = happyShift action_89
action_81 (43) = happyShift action_90
action_81 (44) = happyShift action_91
action_81 (45) = happyShift action_92
action_81 (46) = happyShift action_93
action_81 (47) = happyShift action_94
action_81 (48) = happyShift action_95
action_81 (54) = happyShift action_98
action_81 _ = happyFail
action_82 (38) = happyShift action_85
action_82 (39) = happyShift action_86
action_82 (40) = happyShift action_87
action_82 (41) = happyShift action_88
action_82 (42) = happyShift action_89
action_82 (43) = happyShift action_90
action_82 (44) = happyShift action_91
action_82 (45) = happyShift action_92
action_82 (46) = happyShift action_93
action_82 (47) = happyShift action_94
action_82 (48) = happyShift action_95
action_82 (54) = happyShift action_97
action_82 _ = happyFail
action_83 (38) = happyShift action_85
action_83 (39) = happyShift action_86
action_83 (40) = happyShift action_87
action_83 (41) = happyShift action_88
action_83 (42) = happyShift action_89
action_83 (43) = happyShift action_90
action_83 (44) = happyShift action_91
action_83 (45) = happyShift action_92
action_83 (46) = happyShift action_93
action_83 (47) = happyShift action_94
action_83 (48) = happyShift action_95
action_83 (54) = happyShift action_96
action_83 _ = happyFail
action_84 _ = happyReduce_17
action_85 (22) = happyShift action_6
action_85 (23) = happyShift action_72
action_85 (30) = happyShift action_73
action_85 (35) = happyShift action_74
action_85 (36) = happyShift action_75
action_85 (37) = happyShift action_76
action_85 (53) = happyShift action_77
action_85 (59) = happyShift action_7
action_85 (60) = happyShift action_78
action_85 (14) = happyGoto action_131
action_85 (15) = happyGoto action_70
action_85 (18) = happyGoto action_71
action_85 _ = happyFail
action_86 (22) = happyShift action_6
action_86 (23) = happyShift action_72
action_86 (30) = happyShift action_73
action_86 (35) = happyShift action_74
action_86 (36) = happyShift action_75
action_86 (37) = happyShift action_76
action_86 (53) = happyShift action_77
action_86 (59) = happyShift action_7
action_86 (60) = happyShift action_78
action_86 (14) = happyGoto action_130
action_86 (15) = happyGoto action_70
action_86 (18) = happyGoto action_71
action_86 _ = happyFail
action_87 (22) = happyShift action_6
action_87 (23) = happyShift action_72
action_87 (30) = happyShift action_73
action_87 (35) = happyShift action_74
action_87 (36) = happyShift action_75
action_87 (37) = happyShift action_76
action_87 (53) = happyShift action_77
action_87 (59) = happyShift action_7
action_87 (60) = happyShift action_78
action_87 (14) = happyGoto action_129
action_87 (15) = happyGoto action_70
action_87 (18) = happyGoto action_71
action_87 _ = happyFail
action_88 (22) = happyShift action_6
action_88 (23) = happyShift action_72
action_88 (30) = happyShift action_73
action_88 (35) = happyShift action_74
action_88 (36) = happyShift action_75
action_88 (37) = happyShift action_76
action_88 (53) = happyShift action_77
action_88 (59) = happyShift action_7
action_88 (60) = happyShift action_78
action_88 (14) = happyGoto action_128
action_88 (15) = happyGoto action_70
action_88 (18) = happyGoto action_71
action_88 _ = happyFail
action_89 (22) = happyShift action_6
action_89 (23) = happyShift action_72
action_89 (30) = happyShift action_73
action_89 (35) = happyShift action_74
action_89 (36) = happyShift action_75
action_89 (37) = happyShift action_76
action_89 (53) = happyShift action_77
action_89 (59) = happyShift action_7
action_89 (60) = happyShift action_78
action_89 (14) = happyGoto action_127
action_89 (15) = happyGoto action_70
action_89 (18) = happyGoto action_71
action_89 _ = happyFail
action_90 (22) = happyShift action_6
action_90 (23) = happyShift action_72
action_90 (30) = happyShift action_73
action_90 (35) = happyShift action_74
action_90 (36) = happyShift action_75
action_90 (37) = happyShift action_76
action_90 (53) = happyShift action_77
action_90 (59) = happyShift action_7
action_90 (60) = happyShift action_78
action_90 (14) = happyGoto action_126
action_90 (15) = happyGoto action_70
action_90 (18) = happyGoto action_71
action_90 _ = happyFail
action_91 (22) = happyShift action_6
action_91 (23) = happyShift action_72
action_91 (30) = happyShift action_73
action_91 (35) = happyShift action_74
action_91 (36) = happyShift action_75
action_91 (37) = happyShift action_76
action_91 (53) = happyShift action_77
action_91 (59) = happyShift action_7
action_91 (60) = happyShift action_78
action_91 (14) = happyGoto action_125
action_91 (15) = happyGoto action_70
action_91 (18) = happyGoto action_71
action_91 _ = happyFail
action_92 (22) = happyShift action_6
action_92 (23) = happyShift action_72
action_92 (30) = happyShift action_73
action_92 (35) = happyShift action_74
action_92 (36) = happyShift action_75
action_92 (37) = happyShift action_76
action_92 (53) = happyShift action_77
action_92 (59) = happyShift action_7
action_92 (60) = happyShift action_78
action_92 (14) = happyGoto action_124
action_92 (15) = happyGoto action_70
action_92 (18) = happyGoto action_71
action_92 _ = happyFail
action_93 (22) = happyShift action_6
action_93 (23) = happyShift action_72
action_93 (30) = happyShift action_73
action_93 (35) = happyShift action_74
action_93 (36) = happyShift action_75
action_93 (37) = happyShift action_76
action_93 (53) = happyShift action_77
action_93 (59) = happyShift action_7
action_93 (60) = happyShift action_78
action_93 (14) = happyGoto action_123
action_93 (15) = happyGoto action_70
action_93 (18) = happyGoto action_71
action_93 _ = happyFail
action_94 (22) = happyShift action_6
action_94 (23) = happyShift action_72
action_94 (30) = happyShift action_73
action_94 (35) = happyShift action_74
action_94 (36) = happyShift action_75
action_94 (37) = happyShift action_76
action_94 (53) = happyShift action_77
action_94 (59) = happyShift action_7
action_94 (60) = happyShift action_78
action_94 (14) = happyGoto action_122
action_94 (15) = happyGoto action_70
action_94 (18) = happyGoto action_71
action_94 _ = happyFail
action_95 (22) = happyShift action_6
action_95 (23) = happyShift action_72
action_95 (30) = happyShift action_73
action_95 (35) = happyShift action_74
action_95 (36) = happyShift action_75
action_95 (37) = happyShift action_76
action_95 (53) = happyShift action_77
action_95 (59) = happyShift action_7
action_95 (60) = happyShift action_78
action_95 (14) = happyGoto action_121
action_95 (15) = happyGoto action_70
action_95 (18) = happyGoto action_71
action_95 _ = happyFail
action_96 (52) = happyShift action_120
action_96 _ = happyFail
action_97 (22) = happyShift action_6
action_97 (25) = happyShift action_52
action_97 (27) = happyShift action_53
action_97 (28) = happyShift action_54
action_97 (57) = happyShift action_55
action_97 (59) = happyShift action_7
action_97 (12) = happyGoto action_119
action_97 (18) = happyGoto action_59
action_97 _ = happyFail
action_98 (22) = happyShift action_6
action_98 (25) = happyShift action_52
action_98 (27) = happyShift action_53
action_98 (28) = happyShift action_54
action_98 (57) = happyShift action_55
action_98 (59) = happyShift action_7
action_98 (12) = happyGoto action_118
action_98 (18) = happyGoto action_59
action_98 _ = happyFail
action_99 _ = happyReduce_22
action_100 (49) = happyShift action_117
action_100 _ = happyFail
action_101 (38) = happyShift action_85
action_101 (39) = happyShift action_86
action_101 (40) = happyShift action_87
action_101 (41) = happyShift action_88
action_101 (42) = happyShift action_89
action_101 (43) = happyShift action_90
action_101 (44) = happyShift action_91
action_101 (45) = happyShift action_92
action_101 (46) = happyShift action_93
action_101 (47) = happyShift action_94
action_101 (48) = happyShift action_95
action_101 (54) = happyShift action_116
action_101 _ = happyFail
action_102 _ = happyReduce_40
action_103 (53) = happyShift action_115
action_103 _ = happyFail
action_104 (55) = happyShift action_114
action_104 _ = happyFail
action_105 (22) = happyShift action_6
action_105 (29) = happyShift action_113
action_105 (59) = happyShift action_7
action_105 (18) = happyGoto action_112
action_105 _ = happyFail
action_106 (22) = happyShift action_6
action_106 (23) = happyShift action_72
action_106 (30) = happyShift action_73
action_106 (35) = happyShift action_74
action_106 (36) = happyShift action_75
action_106 (37) = happyShift action_76
action_106 (53) = happyShift action_77
action_106 (59) = happyShift action_7
action_106 (60) = happyShift action_78
action_106 (14) = happyGoto action_111
action_106 (15) = happyGoto action_70
action_106 (18) = happyGoto action_71
action_106 _ = happyFail
action_107 (58) = happyShift action_110
action_107 _ = happyFail
action_108 (58) = happyShift action_109
action_108 _ = happyFail
action_109 _ = happyReduce_2
action_110 (20) = happyShift action_20
action_110 (8) = happyGoto action_138
action_110 _ = happyReduce_8
action_111 (38) = happyShift action_85
action_111 (39) = happyShift action_86
action_111 (40) = happyShift action_87
action_111 (41) = happyShift action_88
action_111 (42) = happyShift action_89
action_111 (43) = happyShift action_90
action_111 (44) = happyShift action_91
action_111 (45) = happyShift action_92
action_111 (46) = happyShift action_93
action_111 (47) = happyShift action_94
action_111 (48) = happyShift action_95
action_111 (56) = happyShift action_137
action_111 _ = happyFail
action_112 (53) = happyShift action_136
action_112 _ = happyFail
action_113 _ = happyReduce_48
action_114 (22) = happyShift action_6
action_114 (23) = happyShift action_72
action_114 (30) = happyShift action_73
action_114 (35) = happyShift action_74
action_114 (36) = happyShift action_75
action_114 (37) = happyShift action_76
action_114 (53) = happyShift action_77
action_114 (59) = happyShift action_7
action_114 (60) = happyShift action_78
action_114 (14) = happyGoto action_135
action_114 (15) = happyGoto action_70
action_114 (18) = happyGoto action_71
action_114 _ = happyFail
action_115 (54) = happyShift action_134
action_115 _ = happyFail
action_116 _ = happyReduce_42
action_117 (22) = happyShift action_6
action_117 (23) = happyShift action_72
action_117 (30) = happyShift action_73
action_117 (35) = happyShift action_74
action_117 (36) = happyShift action_75
action_117 (37) = happyShift action_76
action_117 (53) = happyShift action_77
action_117 (59) = happyShift action_7
action_117 (60) = happyShift action_78
action_117 (14) = happyGoto action_133
action_117 (15) = happyGoto action_70
action_117 (18) = happyGoto action_71
action_117 _ = happyFail
action_118 (26) = happyShift action_132
action_118 _ = happyReduce_19
action_119 _ = happyReduce_20
action_120 _ = happyReduce_21
action_121 _ = happyReduce_30
action_122 (48) = happyShift action_95
action_122 _ = happyReduce_29
action_123 (48) = happyShift action_95
action_123 _ = happyReduce_28
action_124 (46) = happyShift action_93
action_124 (47) = happyShift action_94
action_124 (48) = happyShift action_95
action_124 _ = happyReduce_34
action_125 (45) = happyShift action_92
action_125 (46) = happyShift action_93
action_125 (47) = happyShift action_94
action_125 (48) = happyShift action_95
action_125 _ = happyReduce_33
action_126 (43) = happyFail
action_126 (44) = happyShift action_91
action_126 (45) = happyShift action_92
action_126 (46) = happyShift action_93
action_126 (47) = happyShift action_94
action_126 (48) = happyShift action_95
action_126 _ = happyReduce_36
action_127 (42) = happyFail
action_127 (43) = happyShift action_90
action_127 (44) = happyShift action_91
action_127 (45) = happyShift action_92
action_127 (46) = happyShift action_93
action_127 (47) = happyShift action_94
action_127 (48) = happyShift action_95
action_127 _ = happyReduce_35
action_128 (41) = happyFail
action_128 (42) = happyShift action_89
action_128 (43) = happyShift action_90
action_128 (44) = happyShift action_91
action_128 (45) = happyShift action_92
action_128 (46) = happyShift action_93
action_128 (47) = happyShift action_94
action_128 (48) = happyShift action_95
action_128 _ = happyReduce_32
action_129 (40) = happyFail
action_129 (41) = happyShift action_88
action_129 (42) = happyShift action_89
action_129 (43) = happyShift action_90
action_129 (44) = happyShift action_91
action_129 (45) = happyShift action_92
action_129 (46) = happyShift action_93
action_129 (47) = happyShift action_94
action_129 (48) = happyShift action_95
action_129 _ = happyReduce_31
action_130 (38) = happyShift action_85
action_130 (40) = happyShift action_87
action_130 (41) = happyShift action_88
action_130 (42) = happyShift action_89
action_130 (43) = happyShift action_90
action_130 (44) = happyShift action_91
action_130 (45) = happyShift action_92
action_130 (46) = happyShift action_93
action_130 (47) = happyShift action_94
action_130 (48) = happyShift action_95
action_130 _ = happyReduce_27
action_131 (40) = happyShift action_87
action_131 (41) = happyShift action_88
action_131 (42) = happyShift action_89
action_131 (43) = happyShift action_90
action_131 (44) = happyShift action_91
action_131 (45) = happyShift action_92
action_131 (46) = happyShift action_93
action_131 (47) = happyShift action_94
action_131 (48) = happyShift action_95
action_131 _ = happyReduce_26
action_132 (22) = happyShift action_6
action_132 (25) = happyShift action_52
action_132 (27) = happyShift action_53
action_132 (28) = happyShift action_54
action_132 (57) = happyShift action_55
action_132 (59) = happyShift action_7
action_132 (12) = happyGoto action_143
action_132 (18) = happyGoto action_59
action_132 _ = happyFail
action_133 (38) = happyShift action_85
action_133 (39) = happyShift action_86
action_133 (40) = happyShift action_87
action_133 (41) = happyShift action_88
action_133 (42) = happyShift action_89
action_133 (43) = happyShift action_90
action_133 (44) = happyShift action_91
action_133 (45) = happyShift action_92
action_133 (46) = happyShift action_93
action_133 (47) = happyShift action_94
action_133 (48) = happyShift action_95
action_133 (52) = happyShift action_142
action_133 _ = happyFail
action_134 _ = happyReduce_46
action_135 (38) = happyShift action_85
action_135 (39) = happyShift action_86
action_135 (40) = happyShift action_87
action_135 (41) = happyShift action_88
action_135 (42) = happyShift action_89
action_135 (43) = happyShift action_90
action_135 (44) = happyShift action_91
action_135 (45) = happyShift action_92
action_135 (46) = happyShift action_93
action_135 (47) = happyShift action_94
action_135 (48) = happyShift action_95
action_135 (56) = happyShift action_141
action_135 _ = happyFail
action_136 (22) = happyShift action_6
action_136 (23) = happyShift action_72
action_136 (30) = happyShift action_73
action_136 (35) = happyShift action_74
action_136 (36) = happyShift action_75
action_136 (37) = happyShift action_76
action_136 (53) = happyShift action_77
action_136 (59) = happyShift action_7
action_136 (60) = happyShift action_78
action_136 (14) = happyGoto action_139
action_136 (15) = happyGoto action_70
action_136 (16) = happyGoto action_140
action_136 (18) = happyGoto action_71
action_136 _ = happyReduce_51
action_137 _ = happyReduce_47
action_138 _ = happyReduce_7
action_139 (38) = happyShift action_85
action_139 (39) = happyShift action_86
action_139 (40) = happyShift action_87
action_139 (41) = happyShift action_88
action_139 (42) = happyShift action_89
action_139 (43) = happyShift action_90
action_139 (44) = happyShift action_91
action_139 (45) = happyShift action_92
action_139 (46) = happyShift action_93
action_139 (47) = happyShift action_94
action_139 (48) = happyShift action_95
action_139 (50) = happyShift action_146
action_139 (17) = happyGoto action_145
action_139 _ = happyReduce_53
action_140 (54) = happyShift action_144
action_140 _ = happyFail
action_141 _ = happyReduce_45
action_142 _ = happyReduce_23
action_143 _ = happyReduce_18
action_144 _ = happyReduce_49
action_145 _ = happyReduce_50
action_146 (22) = happyShift action_6
action_146 (23) = happyShift action_72
action_146 (30) = happyShift action_73
action_146 (35) = happyShift action_74
action_146 (36) = happyShift action_75
action_146 (37) = happyShift action_76
action_146 (53) = happyShift action_77
action_146 (59) = happyShift action_7
action_146 (60) = happyShift action_78
action_146 (14) = happyGoto action_147
action_146 (15) = happyGoto action_70
action_146 (18) = happyGoto action_71
action_146 _ = happyFail
action_147 (38) = happyShift action_85
action_147 (39) = happyShift action_86
action_147 (40) = happyShift action_87
action_147 (41) = happyShift action_88
action_147 (42) = happyShift action_89
action_147 (43) = happyShift action_90
action_147 (44) = happyShift action_91
action_147 (45) = happyShift action_92
action_147 (46) = happyShift action_93
action_147 (47) = happyShift action_94
action_147 (48) = happyShift action_95
action_147 (50) = happyShift action_146
action_147 (17) = happyGoto action_148
action_147 _ = happyReduce_53
action_148 _ = happyReduce_52
happyReduce_1 = happySpecReduce_2 4 happyReduction_1
happyReduction_1 (HappyAbsSyn6 happy_var_2)
(HappyAbsSyn5 happy_var_1)
= HappyAbsSyn4
(Fix . AProgram $ fixMap (happy_var_1 : happy_var_2)
)
happyReduction_1 _ _ = notHappyAtAll
happyReduce_2 = happyReduce 18 5 happyReduction_2
happyReduction_2 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn13 happy_var_16) `HappyStk`
(HappyAbsSyn7 happy_var_15) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_12) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn5
(AClass happy_var_2 [] [Fix $ mainMethod happy_var_12 happy_var_15 happy_var_16]
) `HappyStk` happyRest
happyReduce_3 = happyReduce 7 6 happyReduction_3
happyReduction_3 ((HappyAbsSyn6 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn8 happy_var_5) `HappyStk`
(HappyAbsSyn7 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(AClass happy_var_2 (fixMap happy_var_4) (fixMap happy_var_5) : happy_var_7
) `HappyStk` happyRest
happyReduce_4 = happySpecReduce_0 6 happyReduction_4
happyReduction_4 = HappyAbsSyn6
([]
)
happyReduce_5 = happySpecReduce_0 7 happyReduction_5
happyReduction_5 = HappyAbsSyn7
([]
)
happyReduce_6 = happyReduce 4 7 happyReduction_6
happyReduction_6 (_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
(HappyAbsSyn7 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn7
(happy_var_1 ++ [AVar happy_var_2 happy_var_3]
) `HappyStk` happyRest
happyReduce_7 = happyReduce 14 8 happyReduction_7
happyReduction_7 ((HappyAbsSyn8 happy_var_14) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_11) `HappyStk`
_ `HappyStk`
(HappyAbsSyn13 happy_var_9) `HappyStk`
(HappyAbsSyn7 happy_var_8) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn8
(AMethod happy_var_2 happy_var_3 (fixMap happy_var_5) (fixMap happy_var_8) (fixMap happy_var_9) (Fix happy_var_11) : happy_var_14
) `HappyStk` happyRest
happyReduce_8 = happySpecReduce_0 8 happyReduction_8
happyReduction_8 = HappyAbsSyn8
([]
)
happyReduce_9 = happySpecReduce_3 9 happyReduction_9
happyReduction_9 (HappyAbsSyn10 happy_var_3)
(HappyAbsSyn18 happy_var_2)
(HappyAbsSyn11 happy_var_1)
= HappyAbsSyn9
(AVar happy_var_1 happy_var_2 : happy_var_3
)
happyReduction_9 _ _ _ = notHappyAtAll
happyReduce_10 = happySpecReduce_0 9 happyReduction_10
happyReduction_10 = HappyAbsSyn9
([]
)
happyReduce_11 = happyReduce 4 10 happyReduction_11
happyReduction_11 ((HappyAbsSyn10 happy_var_4) `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn10
(AVar happy_var_2 happy_var_3 : happy_var_4
) `HappyStk` happyRest
happyReduce_12 = happySpecReduce_0 10 happyReduction_12
happyReduction_12 = HappyAbsSyn10
([]
)
happyReduce_13 = happySpecReduce_3 11 happyReduction_13
happyReduction_13 _
_
_
= HappyAbsSyn11
(TypeIntegerArray
)
happyReduce_14 = happySpecReduce_1 11 happyReduction_14
happyReduction_14 _
= HappyAbsSyn11
(TypeBoolean
)
happyReduce_15 = happySpecReduce_1 11 happyReduction_15
happyReduction_15 _
= HappyAbsSyn11
(TypeInteger
)
happyReduce_16 = happySpecReduce_1 11 happyReduction_16
happyReduction_16 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn11
(TypeAppDefined happy_var_1
)
happyReduction_16 _ = notHappyAtAll
happyReduce_17 = happySpecReduce_3 12 happyReduction_17
happyReduction_17 _
(HappyAbsSyn13 happy_var_2)
_
= HappyAbsSyn12
(AStatScope (fixMap happy_var_2)
)
happyReduction_17 _ _ _ = notHappyAtAll
happyReduce_18 = happyReduce 7 12 happyReduction_18
happyReduction_18 ((HappyAbsSyn12 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AIf (Fix happy_var_3) (Fix happy_var_5) (Fix happy_var_7)
) `HappyStk` happyRest
happyReduce_19 = happyReduce 5 12 happyReduction_19
happyReduction_19 ((HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AIf (Fix happy_var_3) (Fix happy_var_5) (Fix $ AStatScope [])
) `HappyStk` happyRest
happyReduce_20 = happyReduce 5 12 happyReduction_20
happyReduction_20 ((HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AWhile (Fix happy_var_3) (Fix happy_var_5)
) `HappyStk` happyRest
happyReduce_21 = happyReduce 5 12 happyReduction_21
happyReduction_21 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(APrint (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_22 = happyReduce 4 12 happyReduction_22
happyReduction_22 (_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn12
(AAssignment (Fix $ AExprIdentifier happy_var_1) (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_23 = happyReduce 7 12 happyReduction_23
happyReduction_23 (_ `HappyStk`
(HappyAbsSyn14 happy_var_6) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn12
(AIndexedAssignment (Fix $ AExprIdentifier happy_var_1) (Fix happy_var_3) (Fix happy_var_6)
) `HappyStk` happyRest
happyReduce_24 = happySpecReduce_2 13 happyReduction_24
happyReduction_24 (HappyAbsSyn13 happy_var_2)
(HappyAbsSyn12 happy_var_1)
= HappyAbsSyn13
(happy_var_1 : happy_var_2
)
happyReduction_24 _ _ = notHappyAtAll
happyReduce_25 = happySpecReduce_0 13 happyReduction_25
happyReduction_25 = HappyAbsSyn13
([]
)
happyReduce_26 = happySpecReduce_3 14 happyReduction_26
happyReduction_26 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLogicalAnd (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_26 _ _ _ = notHappyAtAll
happyReduce_27 = happySpecReduce_3 14 happyReduction_27
happyReduction_27 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLogicalOr (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_27 _ _ _ = notHappyAtAll
happyReduce_28 = happySpecReduce_3 14 happyReduction_28
happyReduction_28 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandPlus (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_28 _ _ _ = notHappyAtAll
happyReduce_29 = happySpecReduce_3 14 happyReduction_29
happyReduction_29 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandMinus (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_29 _ _ _ = notHappyAtAll
happyReduce_30 = happySpecReduce_3 14 happyReduction_30
happyReduction_30 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandMult (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_30 _ _ _ = notHappyAtAll
happyReduce_31 = happySpecReduce_3 14 happyReduction_31
happyReduction_31 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLess (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_31 _ _ _ = notHappyAtAll
happyReduce_32 = happySpecReduce_3 14 happyReduction_32
happyReduction_32 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLess (Fix happy_var_3) (Fix happy_var_1)
)
happyReduction_32 _ _ _ = notHappyAtAll
happyReduce_33 = happySpecReduce_3 14 happyReduction_33
happyReduction_33 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandEqual (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_33 _ _ _ = notHappyAtAll
happyReduce_34 = happySpecReduce_3 14 happyReduction_34
happyReduction_34 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprNegation (Fix $ AExprOp OperandEqual (Fix happy_var_1) (Fix happy_var_3))
)
happyReduction_34 _ _ _ = notHappyAtAll
happyReduce_35 = happySpecReduce_3 14 happyReduction_35
happyReduction_35 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLessEqual (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_35 _ _ _ = notHappyAtAll
happyReduce_36 = happySpecReduce_3 14 happyReduction_36
happyReduction_36 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLessEqual (Fix happy_var_3) (Fix happy_var_1)
)
happyReduction_36 _ _ _ = notHappyAtAll
happyReduce_37 = happySpecReduce_1 14 happyReduction_37
happyReduction_37 (HappyTerminal (TIntLiteral happy_var_1))
= HappyAbsSyn14
(AExprInt happy_var_1
)
happyReduction_37 _ = notHappyAtAll
happyReduce_38 = happySpecReduce_1 14 happyReduction_38
happyReduction_38 _
= HappyAbsSyn14
(AExprTrue
)
happyReduce_39 = happySpecReduce_1 14 happyReduction_39
happyReduction_39 _
= HappyAbsSyn14
(AExprFalse
)
happyReduce_40 = happySpecReduce_2 14 happyReduction_40
happyReduction_40 (HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn14
(AExprNegation (Fix happy_var_2)
)
happyReduction_40 _ _ = notHappyAtAll
happyReduce_41 = happySpecReduce_1 14 happyReduction_41
happyReduction_41 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn14
(happy_var_1
)
happyReduction_41 _ = notHappyAtAll
happyReduce_42 = happySpecReduce_3 15 happyReduction_42
happyReduction_42 _
(HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn15
(happy_var_2
)
happyReduction_42 _ _ _ = notHappyAtAll
happyReduce_43 = happySpecReduce_1 15 happyReduction_43
happyReduction_43 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn15
(AExprIdentifier happy_var_1
)
happyReduction_43 _ = notHappyAtAll
happyReduce_44 = happySpecReduce_1 15 happyReduction_44
happyReduction_44 _
= HappyAbsSyn15
(AExprThis
)
happyReduce_45 = happyReduce 5 15 happyReduction_45
happyReduction_45 (_ `HappyStk`
(HappyAbsSyn14 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprIntArray (Fix happy_var_4)
) `HappyStk` happyRest
happyReduce_46 = happyReduce 4 15 happyReduction_46
happyReduction_46 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprNewObject happy_var_2
) `HappyStk` happyRest
happyReduce_47 = happyReduce 4 15 happyReduction_47
happyReduction_47 (_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn15 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprList (Fix happy_var_1) (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_48 = happySpecReduce_3 15 happyReduction_48
happyReduction_48 _
_
(HappyAbsSyn15 happy_var_1)
= HappyAbsSyn15
(AExprLength (Fix happy_var_1)
)
happyReduction_48 _ _ _ = notHappyAtAll
happyReduce_49 = happyReduce 6 15 happyReduction_49
happyReduction_49 (_ `HappyStk`
(HappyAbsSyn16 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn15 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprInvocation (Fix happy_var_1) happy_var_3 (fixMap happy_var_5)
) `HappyStk` happyRest
happyReduce_50 = happySpecReduce_2 16 happyReduction_50
happyReduction_50 (HappyAbsSyn17 happy_var_2)
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn16
(happy_var_1 : happy_var_2
)
happyReduction_50 _ _ = notHappyAtAll
happyReduce_51 = happySpecReduce_0 16 happyReduction_51
happyReduction_51 = HappyAbsSyn16
([]
)
happyReduce_52 = happySpecReduce_3 17 happyReduction_52
happyReduction_52 (HappyAbsSyn17 happy_var_3)
(HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn17
(happy_var_2 : happy_var_3
)
happyReduction_52 _ _ _ = notHappyAtAll
happyReduce_53 = happySpecReduce_0 17 happyReduction_53
happyReduction_53 = HappyAbsSyn17
([]
)
happyReduce_54 = happySpecReduce_1 18 happyReduction_54
happyReduction_54 (HappyTerminal (TIdLiteral happy_var_1))
= HappyAbsSyn18
(happy_var_1
)
happyReduction_54 _ = notHappyAtAll
happyReduce_55 = happySpecReduce_1 18 happyReduction_55
happyReduction_55 _
= HappyAbsSyn18
("main"
)
happyNewToken action sts stk [] =
action 61 61 notHappyAtAll (HappyState action) sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = action i i tk (HappyState action) sts stk tks in
case tk of {
TClass -> cont 19;
TPublic -> cont 20;
TStatic -> cont 21;
TMain -> cont 22;
TNew -> cont 23;
TReturn -> cont 24;
TIf -> cont 25;
TElse -> cont 26;
TWhile -> cont 27;
TPrint -> cont 28;
TLength -> cont 29;
TThis -> cont 30;
TVoid -> cont 31;
TString -> cont 32;
TInt -> cont 33;
TBoolean -> cont 34;
TTrue -> cont 35;
TFalse -> cont 36;
TNegation -> cont 37;
TLogicAnd -> cont 38;
TLogicOr -> cont 39;
TCompareLess -> cont 40;
TCompareGreater -> cont 41;
TCompareLessEqual -> cont 42;
TCompareGreaterEqual -> cont 43;
TCompareEqual -> cont 44;
TCompareNotEqual -> cont 45;
TAdd -> cont 46;
TSub -> cont 47;
TMul -> cont 48;
TAssignment -> cont 49;
TComma -> cont 50;
TDot -> cont 51;
TSemiColon -> cont 52;
TLeftParen -> cont 53;
TRightParen -> cont 54;
TLeftBracket -> cont 55;
TRightBracket -> cont 56;
TLeftBrace -> cont 57;
TRightBrace -> cont 58;
TIdLiteral happy_dollar_dollar -> cont 59;
TIntLiteral happy_dollar_dollar -> cont 60;
_ -> happyError' (tk:tks)
}
happyError_ 61 tk tks = happyError' tks
happyError_ _ tk tks = happyError' (tk:tks)
newtype HappyIdentity a = HappyIdentity a
happyIdentity = HappyIdentity
happyRunIdentity (HappyIdentity a) = a
instance Monad HappyIdentity where
return = HappyIdentity
(HappyIdentity p) >>= q = q p
happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b
happyThen = (>>=)
happyReturn :: () => a -> HappyIdentity a
happyReturn = (return)
happyThen1 m k tks = (>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> HappyIdentity a
happyReturn1 = \a tks -> (return) a
happyError' :: () => [(Token)] -> HappyIdentity a
happyError' = HappyIdentity . parserError
parseMiniJava tks = happyRunIdentity happySomeParser where
happySomeParser = happyThen (happyParse action_0 tks) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
mainMethod arg vars code = AMethod TypeVoid "main" [Fix $ AVar TypeVoid arg] (fixMap vars) (fixMap code) (Fix AExprVoid)
fixMap = map Fix
parserError :: [Token] -> a
parserError (t1:t2:t3:t4:t5:_) = error $ "Parse error, next tokens:" ++ renderTokens [t1, t2, t3, t4, t5]
parserError tokens = error $ "Parse error @eof:" ++ renderTokens tokens
renderTokens = concatMap ((" " ++) . show)
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "<command-line>" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp
{-# LINE 30 "templates/GenericTemplate.hs" #-}
{-# LINE 51 "templates/GenericTemplate.hs" #-}
{-# LINE 61 "templates/GenericTemplate.hs" #-}
{-# LINE 70 "templates/GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is (1), it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept (1) tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
{-# LINE 148 "templates/GenericTemplate.hs" #-}
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Int -> -- token number
Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k - ((1) :: Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
new_state = action
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
{-# LINE 246 "templates/GenericTemplate.hs" #-}
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery ((1) is the error token)
-- parse error if we are in recovery and we fail again
happyFail (1) tk old_st _ stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "failing" $
happyError_ i tk
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail (1) tk old_st (((HappyState (action))):(sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll :: a
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# LINE 312 "templates/GenericTemplate.hs" #-}
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
| davnils/minijava-compiler | src/Parser.hs | bsd-3-clause | 52,493 | 699 | 25 | 8,121 | 17,811 | 9,162 | 8,649 | 1,391 | 43 |
{-# LANGUAGE OverloadedStrings #-}
module MateVMRuntime.Utilities where
import Data.Word
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as B
import Data.List
import JVM.ClassFile
import Data.IORef
import System.IO.Unsafe
import MateVMRuntime.Types
import MateVMRuntime.NativeSizes
import System.CPUTime
import Text.Printf
import MateVMRuntime.Debug
buildMethodID :: Class Direct -> Word16 -> MethodInfo
buildMethodID cls idx = MethodInfo (ntName nt) rc (ntSignature nt)
where
(rc, nt) = case constsPool cls M.! idx of
(CMethod rc' nt') -> (rc', nt')
(CIfaceMethod rc' nt') -> (rc', nt')
_ -> error "buildMethodID: something wrong. abort."
buildFieldOffset :: Class Direct -> Word16 -> (B.ByteString, B.ByteString)
buildFieldOffset cls idx = (rc, ntName fnt)
where (CField rc fnt) = constsPool cls M.! idx
buildClassID :: Class Direct -> Word16 -> B.ByteString
buildClassID cls idx = cl
where (CClass cl) = constsPool cls M.! idx
methodNameTypeByIdx :: Class Direct -> Word16 -> NameType (Method Direct)
methodNameTypeByIdx cls idx = case constsPool cls M.! idx of
(CMethod _ nt') -> nt'
(CIfaceMethod _ nt') -> nt'
_ -> error "methodGetArgsCount: something wrong. abort."
methodGetArgsCount :: NameType (Method Direct) -> NativeWord
methodGetArgsCount nt = genericLength args
where (MethodSignature args _) = ntSignature nt
lookupMethodWithSig :: B.ByteString -> MethodSignature -> Class Direct -> Maybe (Method Direct)
lookupMethodWithSig name sig cls =
find (\x -> methodName x == name && methodSignature x == sig) $ classMethods cls
checkNothing :: String -> Maybe a -> a
checkNothing m Nothing = error m
checkNothing _ (Just v) = v
compileTime :: IORef Integer
{-# NOINLINE compileTime #-}
compileTime = unsafePerformIO $ newIORef 0
-- measure time, from http://www.haskell.org/haskellwiki/Timing_computations
time :: String -> IO t -> IO t
time desc a = do
start <- getCPUTime
v <- a
end <- getCPUTime
let diff = end - start
if (isPrefixOf "compile" desc)
then do
ct <- readIORef compileTime
writeIORef compileTime $ ct + diff
else do
printfTime $ printf "%s: %0.6f\n" desc (((fromIntegral diff) / (10^12)) :: Double)
return v
printCompileTime :: IO ()
printCompileTime = do
ct <- readIORef compileTime
printfTime $ printf "compiletime: %0.6f\n" ((fromIntegral ct) / (10^12) :: Double)
| LouisJenkinsCS/Minimal-JVM | MateVMRuntime/Utilities.hs | bsd-3-clause | 2,414 | 0 | 16 | 441 | 808 | 410 | 398 | 60 | 3 |
{-# LANGUAGE LambdaCase #-}
module ShaderRick where
import Graphics.GL.Pal
import Data.IORef
import Control.Monad.Trans
shaderRecompiler :: MonadIO m => FilePath -> FilePath -> (Program -> IO r) -> m (IO (r, String))
shaderRecompiler vertShaderPath fragShaderPath makeResult = liftIO $ do
(shader, anyError) <- createShaderProgram' vertShaderPath fragShaderPath
result <- makeResult shader
resultRef <- newIORef (result, anyError)
lookForChange <- watchFiles [vertShaderPath, fragShaderPath]
return $ do
lookForChange >>= \case
Nothing -> return ()
Just _ -> do
(newShader, newError) <- createShaderProgram' vertShaderPath fragShaderPath
goodResult <- if null newError
then makeResult newShader
else fst <$> readIORef resultRef
writeIORef resultRef (goodResult, newError)
readIORef resultRef
| lukexi/tinyrick | src/ShaderRick.hs | bsd-3-clause | 888 | 0 | 20 | 194 | 249 | 124 | 125 | 21 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.DeepSeq
import Criterion.Main
import Data.Binary
import Data.ByteString.Lazy (ByteString)
import Data.Group
import Data.Maybe
import MCL.Curves.Fp254BNb
main :: IO ()
main = defaultMain
[ bgroup "Fp"
[ benchFpArith fp_a fp_b
, bench "hash_to" $ nf hashToFp "54o6vyua2984v357b35n63"
, bgroup "from_integer"
[ bench "small" $ nf mkFp small_integer
, bench "large" $ nf mkFp large_integer
]
, bench "eq" $ nf (uncurry (==)) (fp_a, fp_b)
, bench "to_integer" $ nf fromFp fp_a
, bench "is_zero" $ nf fp_isZero fp_a
, bench "sqrt" $ nf fp_squareRoot fp_a
, bench "show" $ nf show fp_a
, benchBinary fp_a
]
, bgroup "Fp2"
[ benchFpArith fp2_a fp2_b
, bench "from_base" $ nf (uncurry mkFp2) (fp_a, fp_b)
, bench "eq" $ nf (uncurry (==)) (fp2_a, fp2_b)
, bench "is_zero" $ nf fp_isZero fp_a
, bench "sqrt" $ nf fp2_squareRoot fp2_a
, bench "show" $ nf show fp2_a
, benchBinary fp2_a
]
, bgroup "Fp12"
[ benchFpArith fp12_a fp12_b
, bench "eq" $ nf (uncurry (==)) (fp12_a, fp12_b)
, bench "is_zero" $ nf fp12_isZero fp12_a
, bench "show" $ nf show fp12_a
, benchBinary fp12_a
]
, bgroup "Fr"
[ benchFpArith fr_a fr_b
, bench "hash_to" $ nf hashToFr "6mn8o8rmn634wcxq354x31"
, bgroup "from_integer"
[ bench "small" $ nf mkFr small_integer
, bench "large" $ nf mkFr large_integer
]
, bench "eq" $ nf (uncurry (==)) (fr_a, fr_b)
, bench "to_integer" $ nf fromFr fr_a
, bench "is_zero" $ nf fr_isZero fr_a
, bench "show" $ nf show fr_a
, benchBinary fr_a
]
, bgroup "G1"
[ benchGroupArith g1_powFr g1_p g1_q
, bench "construct" $ nf (uncurry mkG1) (g1_p_x, g1_p_y)
, bench "map_to" $ nf mapToG1 fp_a
, bench "eq" $ nf (uncurry (==)) (g1_p, g1_q)
, bench "is_zero" $ nf g1_isZero g1_p
, bench "affine" $ nf g1_affineCoords g1_p
, bench "show" $ nf show g1_p
, benchBinary g1_p
]
, bgroup "G2"
[ benchGroupArith g2_powFr g2_p g2_q
, bench "construct" $ nf (uncurry mkG2) (g2_p_x, g2_p_y)
, bench "map_to" $ nf mapToG2 fp2_a
, bench "eq" $ nf (uncurry (==)) (g2_p, g2_q)
, bench "is_zero" $ nf g2_isZero g2_p
, bench "affine" $ nf g2_affineCoords g2_p
, bench "show" $ nf show g2_p
, benchBinary g2_p
]
, bgroup "GT"
[ bench "pow" $ nf (uncurry pow) (gt_a, large_integer)
, bench "pow_native" $ nf (uncurry gt_powFr) (gt_a, large_integer_fr)
]
, bgroup "pairing"
[ bench "compute1" $ nf (uncurry pairing) (g1_p, g2_q)
, bench "compute2" $ nf (uncurry pairing) (g1_q, g2_p)
]
]
----------------------------------------
benchFpArith :: (Fractional a, NFData a) => a -> a -> Benchmark
benchFpArith a b = bgroup "arith"
[ bench "add" $ nf (uncurry (+)) (a, b)
, bench "subtract" $ nf (uncurry (-)) (a, b)
, bench "multiply" $ nf (uncurry (*)) (a, b)
, bench "negate" $ nf negate a
, bench "invert" $ nf recip a
]
benchGroupArith :: (Group g, NFData g) => (g -> Fr -> g) -> g -> g -> Benchmark
benchGroupArith fpow p q = bgroup "arith"
[ bench "add" $ nf (uncurry mappend) (p, q)
, bench "invert" $ nf invert p
, bgroup "mul"
[ bench "small" $ nf (uncurry pow) (p, small_integer)
, bench "large" $ nf (uncurry pow) (p, large_integer)
, bench "native" $ nf (uncurry fpow) (p, large_integer_fr)
]
]
benchBinary :: forall a. (Binary a, NFData a) => a -> Benchmark
benchBinary a = bgroup "binary"
[ bench "put" $ nf encode a
, bench "get" $ nf (decode :: ByteString -> a) (encode a)
]
----------------------------------------
fr_a, fr_b :: Fr
fr_a = hashToFr "a"
fr_b = hashToFr "b"
fp_a, fp_b :: Fp
fp_a = hashToFp "a"
fp_b = hashToFp "b"
fp2_a, fp2_b :: Fp2
fp2_a = mkFp2 (hashToFp "a") (hashToFp "b")
fp2_b = mkFp2 (hashToFp "c") (hashToFp "d")
fp12_a, fp12_b :: Fp12
fp12_a = mkFp12 (mkFp2 (hashToFp "a") (hashToFp "b"))
(mkFp2 (hashToFp "c") (hashToFp "d"))
(mkFp2 (hashToFp "e") (hashToFp "f"))
(mkFp2 (hashToFp "g") (hashToFp "h"))
(mkFp2 (hashToFp "i") (hashToFp "j"))
(mkFp2 (hashToFp "k") (hashToFp "l"))
fp12_b = mkFp12 (mkFp2 (hashToFp "m") (hashToFp "n"))
(mkFp2 (hashToFp "o") (hashToFp "p"))
(mkFp2 (hashToFp "q") (hashToFp "r"))
(mkFp2 (hashToFp "s") (hashToFp "t"))
(mkFp2 (hashToFp "u") (hashToFp "v"))
(mkFp2 (hashToFp "w") (hashToFp "x"))
----------------------------------------
g1_p, g1_q :: G1
g1_p = mapToG1 fp_a
g1_q = mapToG1 fp_b
g1_p_x, g1_p_y :: Fp
(g1_p_x, g1_p_y) = fromJust $ g1_affineCoords g1_p
----------------------------------------
g2_p, g2_q :: G2
g2_p = mapToG2 fp2_a
g2_q = mapToG2 fp2_b
g2_p_x, g2_p_y :: Fp2
(g2_p_x, g2_p_y) = fromJust $ g2_affineCoords g2_p
gt_a :: GT
gt_a = pairing g1_p g2_q
----------------------------------------
small_integer, large_integer :: Integer
small_integer = 42
large_integer = fr_modulus `quot` 2
large_integer_fr :: Fr
large_integer_fr = mkFr large_integer
| arybczak/haskell-mcl | benchmark/Main.hs | bsd-3-clause | 5,452 | 0 | 12 | 1,515 | 1,960 | 1,014 | 946 | 131 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012
--
-- License : BSD-style
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Provides overloaded pitch literals.
--
-------------------------------------------------------------------------------------
module Music.Pitch.Literal.Pitch (
-- * IsPitch class
IsPitch(..),
-- * Literal values
-- ** Four octaves up
cs'''', ds'''', es'''', fs'''', gs'''', as'''', bs'''',
c'''' , d'''' , e'''' , f'''' , g'''' , a'''' , b'''' ,
cb'''', db'''', eb'''', fb'''', gb'''', ab'''', bb'''',
-- ** Three octaves up
cs''', ds''', es''', fs''', gs''', as''', bs''',
c''' , d''' , e''' , f''' , g''' , a''' , b''' ,
cb''', db''', eb''', fb''', gb''', ab''', bb''',
-- ** Two octaves up
cs'', ds'', es'', fs'', gs'', as'', bs'',
c'' , d'' , e'' , f'' , g'' , a'' , b'' ,
cb'', db'', eb'', fb'', gb'', ab'', bb'',
-- ** One octave up
cs' , ds' , es' , fs' , gs' , as' , bs' ,
c' , d' , e' , f' , g' , a' , b' ,
cb' , db' , eb' , fb' , gb' , ab' , bb' ,
-- ** Standard octave
cs , ds , es , fs , gs , as , bs ,
c , d , e , f , g , a , b ,
cb , db , eb , fb , gb , ab , bb ,
-- ** One octave down
cs_ , ds_ , es_ , fs_ , gs_ , as_ , bs_ ,
c_ , d_ , e_ , f_ , g_ , a_ , b_ ,
cb_ , db_ , eb_ , fb_ , gb_ , ab_ , bb_ ,
-- ** Two octaves down
cs__, ds__, es__, fs__, gs__, as__, bs__,
c__ , d__ , e__ , f__ , g__ , a__ , b__ ,
cb__, db__, eb__, fb__, gb__, ab__, bb__,
-- ** Three octaves down
cs___, ds___, es___, fs___, gs___, as___, bs___,
c___ , d___ , e___ , f___ , g___ , a___ , b___ ,
cb___, db___, eb___, fb___, gb___, ab___, bb___,
-- ** Four octaves down
cs____, ds____, es____, fs____, gs____, as____, bs____,
c____ , d____ , e____ , f____ , g____ , a____ , b____ ,
cb____, db____, eb____, fb____, gb____, ab____, bb____,
) where
import Control.Applicative
import Data.Fixed
import Data.Int
import Data.Ratio
import Data.Semigroup
import Data.Word
import Data.AffineSpace ((.-.))
import Music.Pitch.Common.Types
-- Pitch literal, defined as @(class, alteration, octave)@, where
--
-- * @class@ is a pitch class number in @[0..6]@, starting from C.
--
-- * @alteration@ is the number of semitones, i.e. 0 is natural, 1 for sharp 2 for double sharp, -1 for flat and -2 for double flat.
-- Alteration is in 'Maybe' because some pitch representations differ between explicit and explicit accidentals, i.e. a diatonic
-- pitch type may assume @(0,Nothing,...)@ to mean C sharp rather than C.
--
-- * @octave@ is octave number in scientific pitch notation - 4.
--
-- Middle C is represented by the pitch literal @(0, Nothing, 0)@.
--
-- newtype PitchL = PitchL { getPitchL :: (Int, Maybe Double, Int) }
-- deriving (Eq, Show, Ord)
class IsPitch a where
fromPitch :: Pitch -> a
instance IsPitch a => IsPitch (Maybe a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch (First a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch (Last a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch [a] where
fromPitch = pure . fromPitch
instance (Monoid b, IsPitch a) => IsPitch (b, a) where
fromPitch = pure . fromPitch
-- TODO clean by inlining this whole thing or similar
viaPitchL :: (Int, Int, Int) -> Pitch
viaPitchL (pc, sem, oct) = Pitch $ mkInterval' sem (oct * 7 + pc)
where
mkInterval' diff diatonic = Interval (diatonicToChromatic (fromIntegral diatonic) + fromIntegral diff, fromIntegral diatonic)
diatonicToChromatic :: DiatonicSteps -> ChromaticSteps
diatonicToChromatic d = fromIntegral $ (octaves*12) + go restDia
where
-- restDia is always in [0..6]
(octaves, restDia) = fromIntegral d `divMod` 7
go = ([0,2,4,5,7,9,11] !!)
cs'''' = fromPitch $ viaPitchL (0, 1, 4)
ds'''' = fromPitch $ viaPitchL (1, 1, 4)
es'''' = fromPitch $ viaPitchL (2, 1, 4)
fs'''' = fromPitch $ viaPitchL (3, 1, 4)
gs'''' = fromPitch $ viaPitchL (4, 1, 4)
as'''' = fromPitch $ viaPitchL (5, 1, 4)
bs'''' = fromPitch $ viaPitchL (6, 1, 4)
c'''' = fromPitch $ viaPitchL (0, 0, 4)
d'''' = fromPitch $ viaPitchL (1, 0, 4)
e'''' = fromPitch $ viaPitchL (2, 0, 4)
f'''' = fromPitch $ viaPitchL (3, 0, 4)
g'''' = fromPitch $ viaPitchL (4, 0, 4)
a'''' = fromPitch $ viaPitchL (5, 0, 4)
b'''' = fromPitch $ viaPitchL (6, 0, 4)
cb'''' = fromPitch $ viaPitchL (0, (-1), 4)
db'''' = fromPitch $ viaPitchL (1, (-1), 4)
eb'''' = fromPitch $ viaPitchL (2, (-1), 4)
fb'''' = fromPitch $ viaPitchL (3, (-1), 4)
gb'''' = fromPitch $ viaPitchL (4, (-1), 4)
ab'''' = fromPitch $ viaPitchL (5, (-1), 4)
bb'''' = fromPitch $ viaPitchL (6, (-1), 4)
cs''' = fromPitch $ viaPitchL (0, 1, 3)
ds''' = fromPitch $ viaPitchL (1, 1, 3)
es''' = fromPitch $ viaPitchL (2, 1, 3)
fs''' = fromPitch $ viaPitchL (3, 1, 3)
gs''' = fromPitch $ viaPitchL (4, 1, 3)
as''' = fromPitch $ viaPitchL (5, 1, 3)
bs''' = fromPitch $ viaPitchL (6, 1, 3)
c''' = fromPitch $ viaPitchL (0, 0, 3)
d''' = fromPitch $ viaPitchL (1, 0, 3)
e''' = fromPitch $ viaPitchL (2, 0, 3)
f''' = fromPitch $ viaPitchL (3, 0, 3)
g''' = fromPitch $ viaPitchL (4, 0, 3)
a''' = fromPitch $ viaPitchL (5, 0, 3)
b''' = fromPitch $ viaPitchL (6, 0, 3)
cb''' = fromPitch $ viaPitchL (0, (-1), 3)
db''' = fromPitch $ viaPitchL (1, (-1), 3)
eb''' = fromPitch $ viaPitchL (2, (-1), 3)
fb''' = fromPitch $ viaPitchL (3, (-1), 3)
gb''' = fromPitch $ viaPitchL (4, (-1), 3)
ab''' = fromPitch $ viaPitchL (5, (-1), 3)
bb''' = fromPitch $ viaPitchL (6, (-1), 3)
cs'' = fromPitch $ viaPitchL (0, 1, 2)
ds'' = fromPitch $ viaPitchL (1, 1, 2)
es'' = fromPitch $ viaPitchL (2, 1, 2)
fs'' = fromPitch $ viaPitchL (3, 1, 2)
gs'' = fromPitch $ viaPitchL (4, 1, 2)
as'' = fromPitch $ viaPitchL (5, 1, 2)
bs'' = fromPitch $ viaPitchL (6, 1, 2)
c'' = fromPitch $ viaPitchL (0, 0, 2)
d'' = fromPitch $ viaPitchL (1, 0, 2)
e'' = fromPitch $ viaPitchL (2, 0, 2)
f'' = fromPitch $ viaPitchL (3, 0, 2)
g'' = fromPitch $ viaPitchL (4, 0, 2)
a'' = fromPitch $ viaPitchL (5, 0, 2)
b'' = fromPitch $ viaPitchL (6, 0, 2)
cb'' = fromPitch $ viaPitchL (0, (-1), 2)
db'' = fromPitch $ viaPitchL (1, (-1), 2)
eb'' = fromPitch $ viaPitchL (2, (-1), 2)
fb'' = fromPitch $ viaPitchL (3, (-1), 2)
gb'' = fromPitch $ viaPitchL (4, (-1), 2)
ab'' = fromPitch $ viaPitchL (5, (-1), 2)
bb'' = fromPitch $ viaPitchL (6, (-1), 2)
cs' = fromPitch $ viaPitchL (0, 1, 1)
ds' = fromPitch $ viaPitchL (1, 1, 1)
es' = fromPitch $ viaPitchL (2, 1, 1)
fs' = fromPitch $ viaPitchL (3, 1, 1)
gs' = fromPitch $ viaPitchL (4, 1, 1)
as' = fromPitch $ viaPitchL (5, 1, 1)
bs' = fromPitch $ viaPitchL (6, 1, 1)
c' = fromPitch $ viaPitchL (0, 0, 1)
d' = fromPitch $ viaPitchL (1, 0, 1)
e' = fromPitch $ viaPitchL (2, 0, 1)
f' = fromPitch $ viaPitchL (3, 0, 1)
g' = fromPitch $ viaPitchL (4, 0, 1)
a' = fromPitch $ viaPitchL (5, 0, 1)
b' = fromPitch $ viaPitchL (6, 0, 1)
cb' = fromPitch $ viaPitchL (0, (-1), 1)
db' = fromPitch $ viaPitchL (1, (-1), 1)
eb' = fromPitch $ viaPitchL (2, (-1), 1)
fb' = fromPitch $ viaPitchL (3, (-1), 1)
gb' = fromPitch $ viaPitchL (4, (-1), 1)
ab' = fromPitch $ viaPitchL (5, (-1), 1)
bb' = fromPitch $ viaPitchL (6, (-1), 1)
cs = fromPitch $ viaPitchL (0, 1, 0)
ds = fromPitch $ viaPitchL (1, 1, 0)
es = fromPitch $ viaPitchL (2, 1, 0)
fs = fromPitch $ viaPitchL (3, 1, 0)
gs = fromPitch $ viaPitchL (4, 1, 0)
as = fromPitch $ viaPitchL (5, 1, 0)
bs = fromPitch $ viaPitchL (6, 1, 0)
c = fromPitch $ viaPitchL (0, 0, 0)
d = fromPitch $ viaPitchL (1, 0, 0)
e = fromPitch $ viaPitchL (2, 0, 0)
f = fromPitch $ viaPitchL (3, 0, 0)
g = fromPitch $ viaPitchL (4, 0, 0)
a = fromPitch $ viaPitchL (5, 0, 0)
b = fromPitch $ viaPitchL (6, 0, 0)
cb = fromPitch $ viaPitchL (0, (-1), 0)
db = fromPitch $ viaPitchL (1, (-1), 0)
eb = fromPitch $ viaPitchL (2, (-1), 0)
fb = fromPitch $ viaPitchL (3, (-1), 0)
gb = fromPitch $ viaPitchL (4, (-1), 0)
ab = fromPitch $ viaPitchL (5, (-1), 0)
bb = fromPitch $ viaPitchL (6, (-1), 0)
cs_ = fromPitch $ viaPitchL (0, 1, -1)
ds_ = fromPitch $ viaPitchL (1, 1, -1)
es_ = fromPitch $ viaPitchL (2, 1, -1)
fs_ = fromPitch $ viaPitchL (3, 1, -1)
gs_ = fromPitch $ viaPitchL (4, 1, -1)
as_ = fromPitch $ viaPitchL (5, 1, -1)
bs_ = fromPitch $ viaPitchL (6, 1, -1)
c_ = fromPitch $ viaPitchL (0, 0, -1)
d_ = fromPitch $ viaPitchL (1, 0, -1)
e_ = fromPitch $ viaPitchL (2, 0, -1)
f_ = fromPitch $ viaPitchL (3, 0, -1)
g_ = fromPitch $ viaPitchL (4, 0, -1)
a_ = fromPitch $ viaPitchL (5, 0, -1)
b_ = fromPitch $ viaPitchL (6, 0, -1)
cb_ = fromPitch $ viaPitchL (0, (-1), -1)
db_ = fromPitch $ viaPitchL (1, (-1), -1)
eb_ = fromPitch $ viaPitchL (2, (-1), -1)
fb_ = fromPitch $ viaPitchL (3, (-1), -1)
gb_ = fromPitch $ viaPitchL (4, (-1), -1)
ab_ = fromPitch $ viaPitchL (5, (-1), -1)
bb_ = fromPitch $ viaPitchL (6, (-1), -1)
cs__ = fromPitch $ viaPitchL (0, 1, -2)
ds__ = fromPitch $ viaPitchL (1, 1, -2)
es__ = fromPitch $ viaPitchL (2, 1, -2)
fs__ = fromPitch $ viaPitchL (3, 1, -2)
gs__ = fromPitch $ viaPitchL (4, 1, -2)
as__ = fromPitch $ viaPitchL (5, 1, -2)
bs__ = fromPitch $ viaPitchL (6, 1, -2)
c__ = fromPitch $ viaPitchL (0, 0, -2)
d__ = fromPitch $ viaPitchL (1, 0, -2)
e__ = fromPitch $ viaPitchL (2, 0, -2)
f__ = fromPitch $ viaPitchL (3, 0, -2)
g__ = fromPitch $ viaPitchL (4, 0, -2)
a__ = fromPitch $ viaPitchL (5, 0, -2)
b__ = fromPitch $ viaPitchL (6, 0, -2)
cb__ = fromPitch $ viaPitchL (0, (-1), -2)
db__ = fromPitch $ viaPitchL (1, (-1), -2)
eb__ = fromPitch $ viaPitchL (2, (-1), -2)
fb__ = fromPitch $ viaPitchL (3, (-1), -2)
gb__ = fromPitch $ viaPitchL (4, (-1), -2)
ab__ = fromPitch $ viaPitchL (5, (-1), -2)
bb__ = fromPitch $ viaPitchL (6, (-1), -2)
cs___ = fromPitch $ viaPitchL (0, 1, -3)
ds___ = fromPitch $ viaPitchL (1, 1, -3)
es___ = fromPitch $ viaPitchL (2, 1, -3)
fs___ = fromPitch $ viaPitchL (3, 1, -3)
gs___ = fromPitch $ viaPitchL (4, 1, -3)
as___ = fromPitch $ viaPitchL (5, 1, -3)
bs___ = fromPitch $ viaPitchL (6, 1, -3)
c___ = fromPitch $ viaPitchL (0, 0, -3)
d___ = fromPitch $ viaPitchL (1, 0, -3)
e___ = fromPitch $ viaPitchL (2, 0, -3)
f___ = fromPitch $ viaPitchL (3, 0, -3)
g___ = fromPitch $ viaPitchL (4, 0, -3)
a___ = fromPitch $ viaPitchL (5, 0, -3)
b___ = fromPitch $ viaPitchL (6, 0, -3)
cb___ = fromPitch $ viaPitchL (0, (-1), -3)
db___ = fromPitch $ viaPitchL (1, (-1), -3)
eb___ = fromPitch $ viaPitchL (2, (-1), -3)
fb___ = fromPitch $ viaPitchL (3, (-1), -3)
gb___ = fromPitch $ viaPitchL (4, (-1), -3)
ab___ = fromPitch $ viaPitchL (5, (-1), -3)
bb___ = fromPitch $ viaPitchL (6, (-1), -3)
cs____ = fromPitch $ viaPitchL (0, 1, -4)
ds____ = fromPitch $ viaPitchL (1, 1, -4)
es____ = fromPitch $ viaPitchL (2, 1, -4)
fs____ = fromPitch $ viaPitchL (3, 1, -4)
gs____ = fromPitch $ viaPitchL (4, 1, -4)
as____ = fromPitch $ viaPitchL (5, 1, -4)
bs____ = fromPitch $ viaPitchL (6, 1, -4)
c____ = fromPitch $ viaPitchL (0, 0, -4)
d____ = fromPitch $ viaPitchL (1, 0, -4)
e____ = fromPitch $ viaPitchL (2, 0, -4)
f____ = fromPitch $ viaPitchL (3, 0, -4)
g____ = fromPitch $ viaPitchL (4, 0, -4)
a____ = fromPitch $ viaPitchL (5, 0, -4)
b____ = fromPitch $ viaPitchL (6, 0, -4)
cb____ = fromPitch $ viaPitchL (0, (-1), -4)
db____ = fromPitch $ viaPitchL (1, (-1), -4)
eb____ = fromPitch $ viaPitchL (2, (-1), -4)
fb____ = fromPitch $ viaPitchL (3, (-1), -4)
gb____ = fromPitch $ viaPitchL (4, (-1), -4)
ab____ = fromPitch $ viaPitchL (5, (-1), -4)
bb____ = fromPitch $ viaPitchL (6, (-1), -4)
| music-suite/music-pitch | src/Music/Pitch/Literal/Pitch.hs | bsd-3-clause | 12,962 | 0 | 12 | 3,764 | 5,483 | 3,194 | 2,289 | 246 | 1 |
module Main where
import Control.Monad (when)
import Data.Maybe (fromMaybe)
import System.Environment (getArgs)
import Text.Printf (printf)
import qualified System.Console.GetOpt as GetOpt
import Parser
import Tokenizer
-- Options parsing
data Options = Options {
optInput :: Maybe FilePath,
optVerbose :: Bool }
deriving Show
defaultOptions :: Options
defaultOptions = Options {
optInput = Nothing,
optVerbose = False }
options :: [GetOpt.OptDescr (Options -> Options)]
options = [
GetOpt.Option ['v'] ["verbose"]
(GetOpt.NoArg (\opts -> opts { optVerbose = True }))
"chatty output on stderr",
GetOpt.Option ['i'] ["input-file"]
(GetOpt.OptArg ((\f opts -> opts { optInput = Just f }) . fromMaybe "input") "FILE")
"input FILE" ]
compilerOpts :: [String] -> IO (Options, [String])
compilerOpts argv =
case GetOpt.getOpt GetOpt.Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaultOptions o, n)
(_,_,errs) -> ioError (userError (concat errs ++ GetOpt.usageInfo header options))
where header = "Usage: ic [OPTION...] files..."
-- Actual program
main :: IO ()
main = do
args <- getArgs
(opts, leftover) <- compilerOpts args
case leftover of
[] -> return ()
_ -> error $ "Unknown args " ++ show leftover
content <- case optInput opts of
Nothing -> getContents
Just _ -> error "Unimplemented. Use stdin."
when (optVerbose opts) $ putStrLn "Tokenizing..."
tokenized <- case tokenize "(unknown)" content of
Left te -> error $ show te
Right tokenized_ -> return tokenized_
when (optVerbose opts) $ do
putStrLn "Tokenized:"
mapM_ putStr [ printf "\t%s\n" (show toks) | toks <- tokenized ]
putStrLn ""
when (optVerbose opts) $ putStrLn "Parsing..."
parsed <- case parseOCM tokenized of
Left pe -> error $ show pe
Right parsed_ -> return parsed_
when (optVerbose opts) $ do
putStrLn "Parsed:"
putStrLn $ show parsed
| christianlavoie/origami-computational-model | src/Main.hs | bsd-3-clause | 2,056 | 0 | 15 | 513 | 697 | 355 | 342 | 55 | 5 |
-- |
module VK.App (app
, module Exp) where
import React.Flux
import System.IO.Unsafe (unsafePerformIO)
import VK.App.Actions
import VK.App.Store
import VK.App.Types as Exp
import VK.App.Views
import VK.DOM.Router as Exp
app :: App ParentRouter
app =
App {appName = "VK"
, appState = store
, appView = appView_
, appInitAction = AppInit
, appRouter = Just $ storeRouter appDispatcher
}
where
appDispatcher action = unsafePerformIO $ do
st <- getStoreData store
return $ dispatch st action
| eryx67/vk-api-example | src/VK/App.hs | bsd-3-clause | 635 | 0 | 11 | 220 | 154 | 92 | 62 | 19 | 1 |
{-# LANGUAGE TypeFamilies, BangPatterns, TypeOperators, FlexibleContexts, FlexibleInstances, ScopedTypeVariables #-}
module Main where
import Data.NeuralNetwork hiding (cost')
import Data.NeuralNetwork.Backend.BLASHS
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import Data.List(foldl',partition,maximumBy)
import Data.IORef
import Text.Printf (printf)
import Control.Monad
import Control.Monad.Except
import System.Environment
import Text.PrettyPrint.Free hiding (flatten)
import System.IO (hFlush, stdout)
import System.IO.Unsafe
import Parser
main = do x <- runExceptT $ compile byBLASHSf (In2D 28 28,
Convolution 2 7 3 :&: MaxPooling 2 :&:
Convolution 4 5 2 :&: MaxPooling 2 :&:
Reshape2DAs1D :&:
FullConnect 512 :&: FullConnect 32 :&:
FullConnect 10 :&: HNil,
MeanSquaredError)
case x of
Left _ -> putStrLn "Error."
Right cnn -> do
loop cnn 5
-- debug cnn
where
loop cnn cnt = do
cnn <- dotrain cnn cnt
dotest cnn
putStr "Continue? (number):"
hFlush stdout
str <- getLine
let next = (reads :: ReadS Int) str
when (not $ null next) (loop cnn (fst $ head next))
debug :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e) -> IO ()
debug nn = do
a0:a1:_ <- getArgs
let cycle = read a0 :: Int
rate = read a1 :: Float
putStrLn "Load training data."
dataset <- trainingData >>= mapM preprocess . uncurry zip
testset <- testData >>= mapM preprocess . take 10 . uncurry zip
cnt <- newIORef 0 :: IO (IORef Int)
let dispAndInc = do
i <- readIORef cnt
writeIORef cnt (i+1)
putStrLn ("Iteration " ++ show i)
nn <- iterateM (cycle `div` checkpoint) nn $ \nn1 -> do
nn1 <- iterateM checkpoint nn1 $ (dispAndInc >>) . online rate dataset
putStrLn "[test]..."
smalltest testset nn1
return nn1
nn <- iterateM (cycle `mod` checkpoint) nn $ (dispAndInc >>) . online rate dataset
putStrLn "[final test]..."
smalltest testset nn
where
checkpoint = 2
smalltest it (nn,_) = do
flip mapM_ it $ \(ds,ev) -> do
pv <- forward nn ds
prettyResult pv >>= putStrLn . ("+" ++ )
prettyResult ev >>= putStrLn . ("*" ++ )
dotrain :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e)-> Int -> IO (n,e)
dotrain nn mcnt = do
putStrLn "Load training data."
dataset <- trainingData >>= mapM preprocess . uncurry zip
putStrLn "Load test data."
putStrLn "Learning."
cnt <- newIORef 0 :: IO (IORef Int)
let dispAndInc = do
i <- readIORef cnt
writeIORef cnt (i+1)
putStrLn ("Iteration " ++ show i)
iterateM mcnt nn ((dispAndInc >>) . online 0.001 dataset)
dotest :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e) -> IO ()
dotest !(nn,_) = do
testset <- testData >>= mapM preprocess . uncurry zip
putStrLn "Start test"
result <- mapM ((>>= postprocess) . forward nn . fst) testset
expect <- mapM (postprocess . snd) testset
let (co,wr) = partition (uncurry (==)) $ zip result expect
putStrLn $ printf "correct: %d, wrong: %d" (length co) (length wr)
putStrLn $ "First 10 tests:"
flip mapM_ (take 10 testset) $ \(ds,ev) -> do
pv <- forward nn ds
prettyResult pv >>= putStrLn . ("+" ++ )
prettyResult ev >>= putStrLn . ("*" ++ )
online :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> Float -> [(Inp n, Out n)] -> (n,e) -> IO (n,e)
online rate ds !nn = walk ds nn
where
walk [] !nn = return nn
walk (d:ds) !nn = do !nn <- learn nn d rate
walk ds nn
iterateM :: (MonadIO m) => Int -> a -> (a -> m a) -> m a
iterateM n x f = go 0 x
where
go i x = if i == n
then
return x
else do
x <- f x
go (i+1) x
type PImage = V.Vector (DenseMatrix Float)
type PLabel = DenseVector Float
preprocess :: (Image, Label) -> IO (PImage, PLabel)
preprocess (img,lbl) = do
i <- SV.unsafeThaw img
l <- SV.unsafeThaw lbl
return (V.singleton $ DenseMatrix 28 28 i, DenseVector l)
postprocess :: PLabel -> IO Int
postprocess v = do
a <- denseVectorToVector v
return $ V.maxIndex a
prettyResult a = do
v <- postprocess a
return $ showPretty $ text (printf "%02d:" v) <+> pretty a
where
showPretty x = displayS (renderPretty 0.4 500 x) ""
instance Pretty (DenseVector Float) where
pretty vec = let a = unsafePerformIO (denseVectorToVector vec)
in encloseSep langle rangle comma $ map (text . printf "%.04f") (V.toList a)
| pierric/neural-network | Backend-blashs/Example/MNIST/Main.hs | bsd-3-clause | 5,095 | 0 | 19 | 1,681 | 1,929 | 939 | 990 | 121 | 2 |
module Opaleye.Internal.Tag where
-- | Tag is for use as a source of unique IDs in QueryArr
newtype Tag = UnsafeTag Int deriving (Read, Show)
start :: Tag
start = UnsafeTag 1
next :: Tag -> Tag
next = UnsafeTag . (+1) . unsafeUnTag
unsafeUnTag :: Tag -> Int
unsafeUnTag (UnsafeTag i) = i
tagWith :: Tag -> String -> String
tagWith t s = s ++ "_" ++ show (unsafeUnTag t)
| WraithM/haskell-opaleye | src/Opaleye/Internal/Tag.hs | bsd-3-clause | 375 | 0 | 8 | 77 | 129 | 71 | 58 | 10 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Mars.Command.Load (Load (..)) where
import Data.Aeson as Aeson
import Data.String.Conv
import Data.Text (Text)
import Data.Typeable
import GHC.Generics
import Mars.Command
import System.IO (hPutStrLn, stderr)
import Test.QuickCheck
import Mars.Renderable
newtype Load = Load Text
deriving (Generic, Show, Eq, Typeable)
newtype LoadResult = LoadFile Text
instance Command Load LoadResult where
evalCommand _ (Load filename) = LoadFile filename
instance Action LoadResult where
execCommand s (LoadFile filename) = do
c <- readFile . toS $ filename
(loadResult . decode . toS) c
where
loadResult Nothing = printErr "Could not parse"
loadResult (Just j) = reportResult . fromJSON $ j
reportResult (Aeson.Error err) = printErr err
reportResult (Aeson.Success state) = pure state
printErr err = s <$ hPutStrLn stderr ("Invalid saved state: " <> err)
instance Renderable Load where
render (Load f) = "load \"" <> f <> "\""
instance Arbitrary Load where
arbitrary = Load <$> arbString
arbString :: Gen Text
arbString =
toS
<$> listOf
(elements (['A' .. 'Z'] <> ['a' .. 'z']))
`suchThat` (not . null)
| lorcanmcdonald/mars | src/Mars/Command/Load.hs | bsd-3-clause | 1,281 | 0 | 12 | 253 | 402 | 217 | 185 | 37 | 1 |
{-# LANGUAGE NoMonomorphismRestriction, ExtendedDefaultRules#-}
module DocTest.Flat.Endian where
import qualified DocTest
import Test.Tasty(TestTree,testGroup)
import Flat.Endian
import Numeric (showHex)
tests :: IO TestTree
tests = testGroup "Flat.Endian" <$> sequence [ DocTest.test "src/Data/Flat/Endian.hs:36" ["True"] (DocTest.asPrint( toBE64 0xF0F1F2F3F4F5F6F7 == if isBigEndian then 0xF0F1F2F3F4F5F6F7 else 0xF7F6F5F4F3F2F1F0 )), DocTest.test "src/Data/Flat/Endian.hs:49" ["True"] (DocTest.asPrint( toBE32 0xF0F1F2F3 == if isBigEndian then 0xF0F1F2F3 else 0xF3F2F1F0 )), DocTest.test "src/Data/Flat/Endian.hs:62" ["True"] (DocTest.asPrint( toBE16 0xF0F1 == if isBigEndian then 0xF0F1 else 0xF1F0 ))]
| tittoassini/flat | test/DocTest/Data/Flat/Endian.hs | bsd-3-clause | 712 | 0 | 13 | 75 | 184 | 102 | 82 | 8 | 4 |
module Air.Data.Default where
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
-- BEGIN
-- copy from data.default
import Data.Ratio
import qualified Data.Set as S
import qualified Data.Map as M
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Word (Word8, Word16, Word32, Word64)
import Data.Time (Day(..), TimeOfDay, midnight, UTCTime(..), DiffTime, secondsToDiffTime)
import qualified Data.Text as ST
import qualified Data.Text.Lazy as LT
-- | A class for types with a default value.
class Default a where
-- | The default value for this type.
def :: a
instance Default () where def = ()
instance Default (S.Set v) where def = S.empty
instance Default (M.Map k v) where def = M.empty
instance Default Int where def = 0
instance Default Integer where def = 0
instance Default Float where def = 0
instance Default Double where def = 0
instance (Integral a) => Default (Ratio a) where def = 0
instance Default (Maybe a) where def = Nothing
instance Default [a] where def = []
instance (Default r) => Default (e -> r) where def _ = def
instance (Default a) => Default (IO a) where def = return def
instance (Default a, Default b) => Default (a, b) where
def = (def, def)
-- END
instance Default B.ByteString where
def = B.empty
instance Default L.ByteString where
def = L.empty
instance Default ST.Text where
def = ST.empty
instance Default LT.Text where
def = LT.empty
instance Default Int8 where def = 0
instance Default Int16 where def = 0
instance Default Int32 where def = 0
instance Default Int64 where def = 0
instance Default Word8 where def = 0
instance Default Word16 where def = 0
instance Default Word32 where def = 0
instance Default Word64 where def = 0
instance Default Bool where def = False
instance (Default a, Default b, Default c) => Default (a, b, c) where
def = (def, def, def)
instance (Default a, Default b, Default c, Default d) => Default (a, b, c, d) where
def = (def, def, def, def)
instance (Default a, Default b, Default c, Default d, Default e) => Default (a, b, c, d, e) where
def = (def, def, def, def, def)
instance Default Day where
def = ModifiedJulianDay def
instance Default DiffTime where
def = secondsToDiffTime def
instance Default UTCTime where
def = UTCTime def def
instance Default TimeOfDay where
def = midnight | nfjinjing/air | src/Air/Data/Default.hs | bsd-3-clause | 2,370 | 0 | 8 | 458 | 879 | 501 | 378 | 58 | 0 |
{-# LANGUAGE ForeignFunctionInterface, OverloadedStrings, CPP #-}
module IOSMain where
import Graphics.UI.SDL as SDL
import HXSDL
foreign export ccall "haskell_main" main :: IO ()
main =
withInit [InitVideo] $
withWindow "Hello World!" (Position 100 100) (Size 640 480) [WindowShown] $ \win ->
withRenderer win (Device (-1)) [Accelerated, PresentVSync] $ \ren -> do mainLoop ren
| EDeijl/HXSDL | src/iOSMain.hs | mit | 387 | 0 | 12 | 61 | 124 | 68 | 56 | 9 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai.Handler.Warp.Request (
recvRequest
, headerLines
) where
import Control.Applicative
import qualified Control.Concurrent as Conc (yield)
import Control.Exception (throwIO)
import Data.Array ((!))
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as SU
import qualified Data.CaseInsensitive as CI
import qualified Data.IORef as I
import Data.Monoid (mempty)
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.Conduit
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.ReadInt
import Network.Wai.Handler.Warp.RequestHeader
import Network.Wai.Handler.Warp.Settings (Settings, settingsNoParsePath)
import qualified Network.Wai.Handler.Warp.Timeout as Timeout
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import Prelude hiding (lines)
import Control.Monad (when)
----------------------------------------------------------------
-- FIXME come up with good values here
maxTotalHeaderLength :: Int
maxTotalHeaderLength = 50 * 1024
----------------------------------------------------------------
-- | Receiving a HTTP request from 'Connection' and parsing its header
-- to create 'Request'.
recvRequest :: Settings
-> Connection
-> InternalInfo
-> SockAddr -- ^ Peer's address.
-> Source -- ^ Where HTTP request comes from.
-> IO (Request
,IndexedHeader) -- ^
-- 'Request' passed to 'Application',
-- 'IndexedHeader' of HTTP request for internal use,
recvRequest settings conn ii addr src = do
hdrlines <- headerLines src
(method, unparsedPath, path, query, httpversion, hdr) <- parseHeaderLines hdrlines
let idxhdr = indexRequestHeader hdr
expect = idxhdr ! idxExpect
cl = idxhdr ! idxContentLength
te = idxhdr ! idxTransferEncoding
handleExpect conn httpversion expect
(rbody, bodyLength) <- bodyAndSource src cl te
rbody' <- timeoutBody th rbody
let req = Request {
requestMethod = method
, httpVersion = httpversion
, pathInfo = H.decodePathSegments path
, rawPathInfo = if settingsNoParsePath settings then unparsedPath else path
, rawQueryString = query
, queryString = H.parseQuery query
, requestHeaders = hdr
, isSecure = False
, remoteHost = addr
, requestBody = rbody'
, vault = mempty
, requestBodyLength = bodyLength
, requestHeaderHost = idxhdr ! idxHost
, requestHeaderRange = idxhdr ! idxRange
}
return (req, idxhdr)
where
th = threadHandle ii
----------------------------------------------------------------
headerLines :: Source -> IO [ByteString]
headerLines src = do
bs <- readSource src
if S.null bs
then throwIO $ NotEnoughLines []
else push src (THStatus 0 id id) bs
----------------------------------------------------------------
handleExpect :: Connection
-> H.HttpVersion
-> Maybe HeaderValue
-> IO ()
handleExpect conn ver (Just "100-continue") = do
connSendAll conn continue
Conc.yield
where
continue
| ver == H.http11 = "HTTP/1.1 100 Continue\r\n\r\n"
| otherwise = "HTTP/1.0 100 Continue\r\n\r\n"
handleExpect _ _ _ = return ()
----------------------------------------------------------------
bodyAndSource :: Source
-> Maybe HeaderValue -- ^ content length
-> Maybe HeaderValue -- ^ transfer-encoding
-> IO (IO ByteString
,RequestBodyLength
)
bodyAndSource src cl te
| chunked = do
csrc <- mkCSource src
return (readCSource csrc, ChunkedBody)
| otherwise = do
isrc <- mkISource src len
return (readISource isrc, bodyLen)
where
len = toLength cl
bodyLen = KnownLength $ fromIntegral len
chunked = isChunked te
toLength :: Maybe HeaderValue -> Int
toLength Nothing = 0
toLength (Just bs) = readInt bs
isChunked :: Maybe HeaderValue -> Bool
isChunked (Just bs) = CI.foldCase bs == "chunked"
isChunked _ = False
----------------------------------------------------------------
timeoutBody :: Timeout.Handle -> IO ByteString -> IO (IO ByteString)
timeoutBody timeoutHandle rbody = do
isFirstRef <- I.newIORef True
return $ do
isFirst <- I.readIORef isFirstRef
when isFirst $
-- Timeout handling was paused after receiving the full request
-- headers. Now we need to resume it to avoid a slowloris
-- attack during request body sending.
Timeout.resume timeoutHandle
bs <- rbody
-- As soon as we finish receiving the request body, whether
-- because the application is not interested in more bytes, or
-- because there is no more data available, pause the timeout
-- handler again.
when (S.null bs) (Timeout.pause timeoutHandle)
return bs
----------------------------------------------------------------
type BSEndo = ByteString -> ByteString
type BSEndoList = [ByteString] -> [ByteString]
data THStatus = THStatus
{-# UNPACK #-} !Int -- running total byte count
BSEndoList -- previously parsed lines
BSEndo -- bytestrings to be prepended
----------------------------------------------------------------
{- FIXME
close :: Sink ByteString IO a
close = throwIO IncompleteHeaders
-}
push :: Source -> THStatus -> ByteString -> IO [ByteString]
push src (THStatus len lines prepend) bs'
-- Too many bytes
| len > maxTotalHeaderLength = throwIO OverLargeHeader
| otherwise = push' mnl
where
bs = prepend bs'
bsLen = S.length bs
mnl = do
nl <- S.elemIndex 10 bs
-- check if there are two more bytes in the bs
-- if so, see if the second of those is a horizontal space
if bsLen > nl + 1 then
let c = S.index bs (nl + 1)
b = case nl of
0 -> True
1 -> S.index bs 0 == 13
_ -> False
in Just (nl, not b && (c == 32 || c == 9))
else
Just (nl, False)
{-# INLINE push' #-}
push' :: Maybe (Int, Bool) -> IO [ByteString]
-- No newline find in this chunk. Add it to the prepend,
-- update the length, and continue processing.
push' Nothing = do
bs <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push src status bs
where
len' = len + bsLen
prepend' = S.append bs
status = THStatus len' lines prepend'
-- Found a newline, but next line continues as a multiline header
push' (Just (end, True)) = push src status rest
where
rest = S.drop (end + 1) bs
prepend' = S.append (SU.unsafeTake (checkCR bs end) bs)
len' = len + end
status = THStatus len' lines prepend'
-- Found a newline at position end.
push' (Just (end, False))
-- leftover
| S.null line = do
when (start < bsLen) $ leftoverSource src (SU.unsafeDrop start bs)
return (lines [])
-- more headers
| otherwise = let len' = len + start
lines' = lines . (line:)
status = THStatus len' lines' id
in if start < bsLen then
-- more bytes in this chunk, push again
let bs'' = SU.unsafeDrop start bs
in push src status bs''
else do
-- no more bytes in this chunk, ask for more
bs <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push src status bs
where
start = end + 1 -- start of next chunk
line = SU.unsafeTake (checkCR bs end) bs
{-# INLINE checkCR #-}
checkCR :: ByteString -> Int -> Int
checkCR bs pos = if 13 == S.index bs p then p else pos -- 13 is CR
where
!p = pos - 1
| beni55/wai | warp/Network/Wai/Handler/Warp/Request.hs | mit | 8,515 | 0 | 19 | 2,578 | 1,890 | 1,006 | 884 | 170 | 7 |
module Network.Haskoin.Node.Units (tests) where
-- import Test.HUnit (Assertion, assertBool)
import Test.Framework (Test, testGroup)
-- import Test.Framework.Providers.HUnit (testCase)
-- TODO: Make sure that evalNewChain for a partially overlapping best chain
-- properly evaluates to BestChain.
tests :: [Test]
tests =
[ testGroup "Test Group"
[
]
]
| plaprade/haskoin-node | tests/Network/Haskoin/Node/Units.hs | unlicense | 381 | 0 | 7 | 75 | 53 | 34 | 19 | 6 | 1 |
{-# OPTIONS_GHC -fglasgow-exts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Morphism.Futu
-- Copyright : (C) 2008 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable (rank-2 polymorphism)
--
-- Traditional operators, shown here to show how to roll your own
----------------------------------------------------------------------------
module Control.Morphism.Futu
( futu, g_futu
, postpro_futu, g_postpro_futu
, distFutu
) where
import Control.Functor.Algebra
import Control.Functor.Extras
import Control.Functor.Fix
import Control.Monad.Free
import Control.Morphism.Ana
import Control.Morphism.Postpro
-- | Generalized from @futu :: Functor f => GCoalgebra f (Free f) a -> a -> FixF f@
futu :: (RunMonadFree f m) => GCoalgebra f m a -> a -> FixF f
futu = g_ana (distFutu id)
g_futu :: (Functor f, RunMonadFree h m) => Dist h f -> GCoalgebra f m a -> a -> FixF f
g_futu k = g_ana (distFutu k)
-- | A futumorphic postpromorphism
postpro_futu :: (RunMonadFree f m) => GCoalgebra f m a -> (f :~> f) -> a -> FixF f
postpro_futu = g_postpro (distFutu id)
-- | A generalized-futumorphic postpromorphism
g_postpro_futu :: (Functor f, RunMonadFree h m) => Dist h f -> GCoalgebra f m a -> (f :~> f) -> a -> FixF f
g_postpro_futu k = g_postpro (distFutu k)
-- | Turn a distributive law for a functor into a distributive law for the free monad of that functor.
-- This has been generalized to support generating distributive laws for a number of related free-monad-like
-- constructions such as the Codensity monad of the free monad of a functor.
distFutu :: (Functor f, RunMonadFree h m) => Dist h f -> Dist m f
distFutu k = cataFree (fmap return) (fmap inFree . k)
| urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav | Control/Morphism/Futu.hs | apache-2.0 | 1,865 | 4 | 10 | 319 | 398 | 217 | 181 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Main (module Main, module Arc4) where
import Control.Concurrent
import Control.Monad.Reader
-- import Data.Char
-- import Data.List
import Data.Word
import Network.Socket
import System.Console.GetOpt
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import Arc4
import NetSim
import TM
import Target
import Data.IterIO
data Test = Test {
testStream :: [ThreadId] -> Targ -> Targ -> TM Bool
, testTargA :: TM Targ
, testTargB :: TM Targ
, testAtoB :: NetSim ()
, testBtoA :: NetSim ()
, testDescrip :: String
}
tests :: [Test]
tests =
[ Test twoWay spawnTarget spawnOrConnect inumNop inumNop
"Bi-directionally transfer data"
, Test pingPong spawnTarget spawnOrConnect inumNop inumNop
"Ping-pong short messages back and forth"
, Test pingPong spawnTarget spawnOrConnect excessive inumNop
"Ping-pong with test for excessive retransmissions"
, Test oneWay internalTarget spawnOrConnect inumNop inumNop
"Receiving data from reference implementation"
, Test flowControl spawnTarget spawnTarget inumNop inumNop
"Flow control when application doesn't read output"
, Test oneWay spawnTarget internalTarget inumNop inumNop
"Sending data to reference implementation"
, Test twoWay spawnTarget internalTarget inumNop inumNop
"Bi-directionally interoperating with reference"
, Test eofTest spawnTarget spawnOrConnect inumNop inumNop
"Test for proper end-of-file handling"
, Test twoWay spawnOrConnect spawnTarget (garbage 0.05) (garbage 0.05)
"Two-way transfer injecting 5% garbage packets"
, Test oneWay internalTarget spawnOrConnect (reorderer 0.02) inumNop
"Receiving from reference with 2% reordering"
, Test twoWay spawnTarget spawnOrConnect (duplicater 0.05) (duplicater 0.05)
"Two-way transfer with 5% packet duplication"
, Test twoWay spawnTarget spawnOrConnect
(badlength 0.02) (truncater 0.02)
"Two-way transfer with 2% of packets having bad length"
, Test oneWay spawnTarget spawnOrConnect (dropper 0.02) (dropper 0.02)
"One-way transfer with 2% packet loss"
, Test twoWay spawnTarget spawnOrConnect (corrupter 0.02) (corrupter 0.02)
"Two-way transfer with 2% packet corruption"
]
runTest :: Int -> Test -> TM Bool
runTest n test = do
liftIO $ putStr $ printf "TEST %2d: %-60s" n (testDescrip test ++ "...")
a <- testTargA test
b <- testTargB test
threads <- mapM forkTM [
tUSource a |$ testAtoB test .| tUDrain b
, tUSource b |$ testBtoA test .| tUDrain a ]
result <- testStream test threads a b
liftIO $ putStrLn $ if result then "passed" else "FAILED"
return result
data Options = Options{ optSeed :: String
, optDebug :: Bool
, optWin :: Word32
, optTimeout :: Int
, optQuiet :: Bool
, optList :: Bool
, optTest :: Maybe Int
, optGdb :: Bool
, optServer :: Bool
}
defaultOptions :: Options
defaultOptions = Options { optSeed = ""
, optDebug = False
, optWin = 1
, optTimeout = 1000
, optQuiet = True
, optList = False
, optTest = Nothing
, optGdb = False
, optServer = False
}
options :: [OptDescr (Options -> Options)]
options =
[ Option "" ["seed"]
(ReqArg (\s o -> o { optSeed = s }) "SEED")
"set random seed to a specific string"
, Option "d" ["debug"]
(NoArg (\o -> o { optDebug = True }))
"enable debugging support"
, Option "" ["gdb"]
(NoArg (\o -> o { optGdb = True }))
"print PID forked processes so you can attach with gdb"
, Option "v" ["verbose"]
(NoArg (\o -> o { optQuiet = False }))
"show reliable program stderr"
, Option "L" ["list"]
(NoArg (\o -> o { optList = True }))
"list available tests"
, Option "s" ["server"]
(NoArg (\o -> o { optServer = True }))
"test server mode"
, Option "w" ["window"]
(ReqArg (\s o -> o { optWin = read s}) "SIZE")
"specify window size"
, Option "T" ["test"]
(ReqArg (\t o -> o { optTest = Just $ read t}) "#")
"run just one test"
, Option "t" ["timeout"]
(ReqArg (\s o -> o { optTimeout = read s}) "msec")
"retransmission timeout"
]
doOpt :: IO (Options, [String])
doOpt = do
argv <- getArgs
case getOpt RequireOrder options argv of
(o,n,[]) -> return $ (foldl (flip ($)) defaultOptions o, n)
(_,_,errs) -> do
hPutStrLn stderr $ concat errs
usage
usage :: IO a
usage = do
prog <- getProgName
let header = "usage: " ++ prog ++ " [OPTIONS] reliable [reliable OPTIONS]\n"
hPutStrLn stderr $ usageInfo header options
exitFailure
rt :: Int -> TM ()
rt n | n <= 0 = return ()
| otherwise = do bool <- asks tcRnd >>= flip a4RandomBool 0.1
liftIO $ putStrLn $ show bool
rt (n-1)
showTests :: IO ()
showTests = do putStrLn "\nAvailable tests:\n"
st (1::Int) tests
putStrLn ""
where
st _ [] = return ()
st n (t:ts) = do _ <- hPrintf stdout " %2d. %s\n" n (testDescrip t)
st (n+1) ts
runTests :: Int -> [Test] -> TM (Int,Int)
runTests _ [] = return (0, 0)
runTests n (t:ts) = do
result <- runTest n t
(passed, completed) <- runTests (n + 1) ts
return (if result then passed + 1 else passed, completed + 1)
main :: IO ()
main = withSocketsDo $ do
(o, argv) <- doOpt
when (optList o) $ showTests >> exitSuccess
when (null argv) usage
r <- a4RandomNew $ optSeed o
let config' = TestConfig { tcTarget = argv
, tcDebug = optDebug o
, tcRnd = r
, tcWin = optWin o
, tcTimeout = optTimeout o
, tcQuiet = optQuiet o
, tcGdb = optGdb o
, tcServer = Nothing
}
config <- if optServer o
then do server <- runReaderT startServer config'
return config' { tcServer = Just server }
else return config'
hSetBuffering stdout NoBuffering
case optTest o of
_ | optList o -> showTests
Just n | n <= 0 || n > length tests -> showTests
Just n -> do
_ <- runReaderT (runTest n $ tests !! (n - 1)) config
return ()
Nothing -> do
(passed, completed) <- runReaderT (runTests 1 tests) config
putStrLn $ printf "SUMMARY: passed %d/%d" passed completed
| TC1211/TCP | src/3a/tester-src/Examples/reliable/tester.hs | apache-2.0 | 7,264 | 0 | 18 | 2,579 | 2,015 | 1,048 | 967 | 173 | 5 |
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts #-}
-- | Filter for compressing the 'Response' body.
module Happstack.Server.Compression
( compressedResponseFilter
, compressedResponseFilter'
, compressWithFilter
, gzipFilter
, deflateFilter
, identityFilter
, starFilter
, standardEncodingHandlers
) where
import Happstack.Server.Internal.Compression ( compressedResponseFilter
, compressedResponseFilter'
, compressWithFilter
, gzipFilter
, deflateFilter
, identityFilter
, starFilter
, standardEncodingHandlers
)
| arybczak/happstack-server | src/Happstack/Server/Compression.hs | bsd-3-clause | 923 | 0 | 5 | 450 | 68 | 46 | 22 | 18 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.FetchUtils
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Functions for fetching packages
-----------------------------------------------------------------------------
{-# LANGUAGE RecordWildCards #-}
module Distribution.Client.FetchUtils (
-- * fetching packages
fetchPackage,
isFetched,
checkFetched,
-- ** specifically for repo packages
fetchRepoTarball,
-- * fetching other things
downloadIndex,
) where
import Distribution.Client.Types
import Distribution.Client.HttpUtils
( downloadURI, isOldHackageURI, DownloadResult(..)
, HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps )
import Distribution.Package
( PackageId, packageName, packageVersion )
import Distribution.Simple.Utils
( notice, info, setupMessage )
import Distribution.Text
( display )
import Distribution.Verbosity
( Verbosity )
import Data.Maybe
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.IO
( openTempFile, hClose )
import System.FilePath
( (</>), (<.>) )
import qualified System.FilePath.Posix as FilePath.Posix
( combine, joinPath )
import Network.URI
( URI(uriPath) )
-- ------------------------------------------------------------
-- * Actually fetch things
-- ------------------------------------------------------------
-- | Returns @True@ if the package has already been fetched
-- or does not need fetching.
--
isFetched :: PackageLocation (Maybe FilePath) -> IO Bool
isFetched loc = case loc of
LocalUnpackedPackage _dir -> return True
LocalTarballPackage _file -> return True
RemoteTarballPackage _uri local -> return (isJust local)
RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid)
checkFetched :: PackageLocation (Maybe FilePath)
-> IO (Maybe (PackageLocation FilePath))
checkFetched loc = case loc of
LocalUnpackedPackage dir ->
return (Just $ LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (Just $ LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (Just $ RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (Just $ RepoTarballPackage repo pkgid file)
RemoteTarballPackage _uri Nothing -> return Nothing
RepoTarballPackage repo pkgid Nothing -> do
let file = packageFile repo pkgid
exists <- doesFileExist file
if exists
then return (Just $ RepoTarballPackage repo pkgid file)
else return Nothing
-- | Fetch a package if we don't have it already.
--
fetchPackage :: HttpTransport
-> Verbosity
-> PackageLocation (Maybe FilePath)
-> IO (PackageLocation FilePath)
fetchPackage transport verbosity loc = case loc of
LocalUnpackedPackage dir ->
return (LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (RepoTarballPackage repo pkgid file)
RemoteTarballPackage uri Nothing -> do
path <- downloadTarballPackage uri
return (RemoteTarballPackage uri path)
RepoTarballPackage repo pkgid Nothing -> do
local <- fetchRepoTarball transport verbosity repo pkgid
return (RepoTarballPackage repo pkgid local)
where
downloadTarballPackage uri = do
transportCheckHttps transport uri
notice verbosity ("Downloading " ++ show uri)
tmpdir <- getTemporaryDirectory
(path, hnd) <- openTempFile tmpdir "cabal-.tar.gz"
hClose hnd
_ <- downloadURI transport verbosity uri path
return path
-- | Fetch a repo package if we don't have it already.
--
fetchRepoTarball :: HttpTransport -> Verbosity -> Repo -> PackageId -> IO FilePath
fetchRepoTarball transport verbosity repo pkgid = do
fetched <- doesFileExist (packageFile repo pkgid)
if fetched
then do info verbosity $ display pkgid ++ " has already been downloaded."
return (packageFile repo pkgid)
else do setupMessage verbosity "Downloading" pkgid
downloadRepoPackage
where
downloadRepoPackage = case repo of
RepoLocal{..} -> return (packageFile repo pkgid)
RepoRemote{..} -> do
remoteRepoCheckHttps transport repoRemote
let uri = packageURI repoRemote pkgid
dir = packageDir repo pkgid
path = packageFile repo pkgid
createDirectoryIfMissing True dir
_ <- downloadURI transport verbosity uri path
return path
-- | Downloads an index file to [config-dir/packages/serv-id].
--
downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult
downloadIndex transport verbosity remoteRepo cacheDir = do
remoteRepoCheckHttps transport remoteRepo
let uri = (remoteRepoURI remoteRepo) {
uriPath = uriPath (remoteRepoURI remoteRepo)
`FilePath.Posix.combine` "00-index.tar.gz"
}
path = cacheDir </> "00-index" <.> "tar.gz"
createDirectoryIfMissing True cacheDir
downloadURI transport verbosity uri path
-- ------------------------------------------------------------
-- * Path utilities
-- ------------------------------------------------------------
-- | Generate the full path to the locally cached copy of
-- the tarball for a given @PackageIdentifer@.
--
packageFile :: Repo -> PackageId -> FilePath
packageFile repo pkgid = packageDir repo pkgid
</> display pkgid
<.> "tar.gz"
-- | Generate the full path to the directory where the local cached copy of
-- the tarball for a given @PackageIdentifer@ is stored.
--
packageDir :: Repo -> PackageId -> FilePath
packageDir repo pkgid = repoLocalDir repo
</> display (packageName pkgid)
</> display (packageVersion pkgid)
-- | Generate the URI of the tarball for a given package.
--
packageURI :: RemoteRepo -> PackageId -> URI
packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,display (packageName pkgid)
,display (packageVersion pkgid)
,display pkgid <.> "tar.gz"]
}
packageURI repo pkgid =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,"package"
,display pkgid <.> "tar.gz"]
}
| martinvlk/cabal | cabal-install/Distribution/Client/FetchUtils.hs | bsd-3-clause | 6,921 | 0 | 15 | 1,591 | 1,497 | 761 | 736 | 130 | 7 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/Data/BitUtil.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.BitUtil
-- Copyright : (c) Clark Gaebel 2012
-- (c) Johan Tibel 2012
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
-----------------------------------------------------------------------------
module Data.BitUtil
( highestBitMask
) where
-- On GHC, include MachDeps.h to get WORD_SIZE_IN_BITS macro.
import Data.Bits ((.|.), xor)
import GHC.Exts (Word(..), Int(..))
import GHC.Prim (uncheckedShiftRL#)
-- The highestBitMask implementation is based on
-- http://graphics.stanford.edu/~seander/bithacks.html#RoundUpPowerOf2
-- which has been put in the public domain.
-- | Return a word where only the highest bit is set.
highestBitMask :: Word -> Word
highestBitMask x1 = let x2 = x1 .|. x1 `shiftRL` 1
x3 = x2 .|. x2 `shiftRL` 2
x4 = x3 .|. x3 `shiftRL` 4
x5 = x4 .|. x4 `shiftRL` 8
x6 = x5 .|. x5 `shiftRL` 16
x7 = x6 .|. x6 `shiftRL` 32
in x7 `xor` (x7 `shiftRL` 1)
{-# INLINE highestBitMask #-}
-- Right and left logical shifts.
shiftRL :: Word -> Int -> Word
{--------------------------------------------------------------------
GHC: use unboxing to get @shiftRL@ inlined.
--------------------------------------------------------------------}
shiftRL (W# x) (I# i) = W# (uncheckedShiftRL# x i)
{-# INLINE shiftRL #-}
| phischu/fragnix | tests/packages/scotty/Data.BitUtil.hs | bsd-3-clause | 2,131 | 0 | 10 | 811 | 255 | 159 | 96 | 22 | 1 |
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI #-}
{- | Haskell-specific web worker API. The URL is expected to point to a script
that is the same as the caller, or at least a script that has been
produced by GHCJS and contains the same static values.
-}
module JavaScript.Web.Worker.Haskell ( HaskellWorker
, terminate
, call
) where
import qualified JavaScript.Web.Worker as W
data HaskellWorker = HaskellWorker W.Worker
create :: JSString -> IO HaskellWorker
create uri = fmap HaskellWorker (W.create uri)
{-# INLINE create #-}
-- fixme stop all waiters?
terminate :: HaskellWorker -> IO ()
terminate (HaskellWorker w) = W.terminate w
{-# INLINE terminate #-}
-- call :: SomethingSomething -> HaskellWorker -> IO a
call hw = undefined
{-# INLINE call #-}
| tavisrudd/ghcjs-base | JavaScript/Web/Worker/Haskell.hs | mit | 895 | 0 | 8 | 251 | 120 | 69 | 51 | 14 | 1 |
-- | test module: call some server methods
module Main where
import Network.XmlRpc.Client
import Modular.Documented
import Modular.Signed
import Modular.Task
import Modular.Config
import Modular.Seed
import Modular.Instance
import Modular.Solution
import Modular.Pair
type URL = String
server :: URL
server = "http://dfa.imn.htwk-leipzig.de/cgi-bin/modular-server.cgi"
list_types :: URL -> IO [ Task ]
list_types url = remote url "autotool.list_types"
get_config :: URL -> Task -> IO ( Documented Config )
get_config url = remote url "autotool.get_config"
verify_config :: URL -> Task -> Config -> IO ( Signed Config )
verify_config url = remote url "autotool.verify_config"
get_instance :: URL
-> Task
-> Signed Config
-> Seed
-> IO ( Pair ( Documented ( Signed Instance ) )
( Documented Solution )
)
get_instance url = remote url "autotool.get_instance"
grade :: URL
-> Task
-> Signed Instance
-> Solution
-> IO ( Documented ( Pair Bool Double ) )
grade url = remote url "autotool.grade"
main :: IO ()
main = do
putStrLn $ "using server: " ++ server
ts <- list_types server
print $ zip [0.. ] ts
let [ task ] = filter ( \ t -> Modular.Task.contents t == "Faktor-Direct" ) ts
dconf <- get_config server task
print $ dconf
let conf = Modular.Documented.contents dconf
sconf <- verify_config server task conf
print sconf
p <- get_instance server task sconf ( Seed 271828 )
print p
let sint = Modular.Documented.contents $ first p
sol = Modular.Documented.contents $ second p
sol' = Solution "[ 23,34,45 ]"
w <- grade server task sint sol'
print w
| Erdwolf/autotool-bonn | src/Modular/Server_Test.hs | gpl-2.0 | 1,752 | 6 | 16 | 454 | 523 | 259 | 264 | 50 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Response.Graph
(graphResponse) where
import Text.Blaze ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Happstack.Server
import MasterTemplate
import Scripts
graphResponse :: ServerPart Response
graphResponse =
ok $ toResponse $
masterTemplate "Courseography - Graph"
[]
(do
header "graph"
H.div ! A.id "container" $ do
H.div ! A.id "graph" ! A.class_ "graph" $ ""
sideBar
disclaimer
)
graphScripts
sideBar :: H.Html
sideBar = do
H.div ! A.id "sidebar" $ do
H.div ! A.id "fce" $ do
H.div ! A.id "fcecount" $ ""
H.button ! A.id "reset" $ "Reset Graph"
H.nav ! A.id "sidebar-nav" $ H.ul $ do
H.li ! A.id "graphs-nav" $ do
H.a ! A.href "" $ "Graphs"
H.li ! A.id "focuses-nav" $ do
H.a ! A.href "" $ "Focuses"
H.div ! A.id "focuses" $ do
H.p ! A.id "sci" ! A.class_ "focus" $ "Scientific Computing"
H.div ! A.id "sci-details" ! A.class_ "details" $ ""
H.p ! A.id "AI" ! A.class_ "focus" $ "Artificial Intelligence"
H.div ! A.id "AI-details" ! A.class_ "details" $ ""
H.p ! A.id "NLP" ! A.class_ "focus" $ "Natural Language Processing"
H.div ! A.id "NLP-details" ! A.class_ "details" $ ""
H.p ! A.id "vision" ! A.class_ "focus" $ "Computer Vision"
H.div ! A.id "vision-details" ! A.class_ "details" $ ""
H.p ! A.id "systems" ! A.class_ "focus" $ "Computer Systems"
H.div ! A.id "systems-details" ! A.class_ "details" $ ""
H.p ! A.id "game" ! A.class_ "focus" $ "Video Games"
H.div ! A.id "game-details" ! A.class_ "details" $ ""
H.p ! A.id "HCI" ! A.class_ "focus" $ "Human Computer Interaction"
H.div ! A.id "HCI-details" ! A.class_ "details" $ ""
H.p ! A.id "theory" ! A.class_ "focus" $ "Theory of Computation"
H.div ! A.id "theory-details" ! A.class_ "details" $ ""
H.p ! A.id "web" ! A.class_ "focus" $ "Web Technologies"
H.div ! A.id "web-details" ! A.class_ "details" $ ""
H.div ! A.id "graphs" $ ""
H.div ! A.id "sidebar-button" $
H.img ! A.id "sidebar-icon" ! A.src "static/res/ico/sidebar.png"
| chocoluffy/courseography | hs/Response/Graph.hs | gpl-3.0 | 2,535 | 0 | 23 | 866 | 887 | 411 | 476 | 54 | 1 |
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2014 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : [email protected]
--
-- Compression and decompression of data streams in the gzip format.
--
-- The format is described in detail in RFC #1952:
-- <http://www.ietf.org/rfc/rfc1952.txt>
--
-- See also the zlib home page: <http://zlib.net/>
--
-----------------------------------------------------------------------------
module Codec.Compression.GZip (
-- | This module provides pure functions for compressing and decompressing
-- streams of data in the gzip format and represented by lazy 'ByteString's.
-- This makes it easy to use either in memory or with disk or network IO.
--
-- For example a simple gzip compression program is just:
--
-- > import qualified Data.ByteString.Lazy as ByteString
-- > import qualified Codec.Compression.GZip as GZip
-- >
-- > main = ByteString.interact GZip.compress
--
-- Or you could lazily read in and decompress a @.gz@ file using:
--
-- > content <- fmap GZip.decompress (readFile file)
--
-- * Simple compression and decompression
compress,
decompress,
-- * Extended api with control over compression parameters
compressWith,
decompressWith,
CompressParams(..), defaultCompressParams,
DecompressParams(..), defaultDecompressParams,
-- ** The compression parameter types
CompressionLevel(..),
defaultCompression,
noCompression,
bestSpeed,
bestCompression,
compressionLevel,
Method(..),
deflateMethod,
WindowBits(..),
defaultWindowBits,
windowBits,
MemoryLevel(..),
defaultMemoryLevel,
minMemoryLevel,
maxMemoryLevel,
memoryLevel,
CompressionStrategy(..),
defaultStrategy,
filteredStrategy,
huffmanOnlyStrategy,
) where
import Data.ByteString.Lazy (ByteString)
import qualified Codec.Compression.Zlib.Internal as Internal
import Codec.Compression.Zlib.Internal hiding (compress, decompress)
-- | Decompress a stream of data in the gzip format.
--
-- There are a number of errors that can occur. In each case an exception will
-- be thrown. The possible error conditions are:
--
-- * if the stream does not start with a valid gzip header
--
-- * if the compressed stream is corrupted
--
-- * if the compressed stream ends permaturely
--
-- Note that the decompression is performed /lazily/. Errors in the data stream
-- may not be detected until the end of the stream is demanded (since it is
-- only at the end that the final checksum can be checked). If this is
-- important to you, you must make sure to consume the whole decompressed
-- stream before doing any IO action that depends on it.
--
decompress :: ByteString -> ByteString
decompress = decompressWith defaultDecompressParams
-- | Like 'decompress' but with the ability to specify various decompression
-- parameters. Typical usage:
--
-- > decompressWith defaultCompressParams { ... }
--
decompressWith :: DecompressParams -> ByteString -> ByteString
decompressWith = Internal.decompress gzipFormat
-- | Compress a stream of data into the gzip format.
--
-- This uses the default compression parameters. In partiular it uses the
-- default compression level which favours a higher compression ratio over
-- compression speed, though it does not use the maximum compression level.
--
-- Use 'compressWith' to adjust the compression level or other compression
-- parameters.
--
compress :: ByteString -> ByteString
compress = compressWith defaultCompressParams
-- | Like 'compress' but with the ability to specify various compression
-- parameters. Typical usage:
--
-- > compressWith defaultCompressParams { ... }
--
-- In particular you can set the compression level:
--
-- > compressWith defaultCompressParams { compressLevel = BestCompression }
--
compressWith :: CompressParams -> ByteString -> ByteString
compressWith = Internal.compress gzipFormat
| CloudI/CloudI | src/api/haskell/external/zlib-0.6.2.1/Codec/Compression/GZip.hs | mit | 3,981 | 0 | 6 | 676 | 312 | 225 | 87 | 38 | 1 |
{-# LANGUAGE BangPatterns, CPP #-}
-- | File descriptor cache to avoid locks in kernel.
module Network.Wai.Handler.Warp.FdCache (
withFdCache
, Fd
, Refresh
#ifndef WINDOWS
, openFile
, closeFile
, setFileCloseOnExec
#endif
) where
#ifndef WINDOWS
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>), (<*>))
#endif
import Control.Exception (bracket)
import Network.Wai.Handler.Warp.IORef
import Network.Wai.Handler.Warp.MultiMap
import Control.Reaper
import System.Posix.IO (openFd, OpenFileFlags(..), defaultFileFlags, OpenMode(ReadOnly), closeFd, FdOption(CloseOnExec), setFdOption)
#endif
import System.Posix.Types (Fd)
----------------------------------------------------------------
type Hash = Int
-- | An action to activate a Fd cache entry.
type Refresh = IO ()
getFdNothing :: Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFdNothing _ _ = return (Nothing, return ())
----------------------------------------------------------------
-- | Creating 'MutableFdCache' and executing the action in the second
-- argument. The first argument is a cache duration in second.
withFdCache :: Int -> ((Hash -> FilePath -> IO (Maybe Fd, Refresh)) -> IO a) -> IO a
#ifdef WINDOWS
withFdCache _ action = action getFdNothing
#else
withFdCache 0 action = action getFdNothing
withFdCache duration action = bracket (initialize duration)
terminate
(\mfc -> action (getFd mfc))
----------------------------------------------------------------
data Status = Active | Inactive
newtype MutableStatus = MutableStatus (IORef Status)
status :: MutableStatus -> IO Status
status (MutableStatus ref) = readIORef ref
newActiveStatus :: IO MutableStatus
newActiveStatus = MutableStatus <$> newIORef Active
refresh :: MutableStatus -> Refresh
refresh (MutableStatus ref) = writeIORef ref Active
inactive :: MutableStatus -> IO ()
inactive (MutableStatus ref) = writeIORef ref Inactive
----------------------------------------------------------------
data FdEntry = FdEntry !FilePath !Fd !MutableStatus
openFile :: FilePath -> IO Fd
openFile path = do
fd <- openFd path ReadOnly Nothing defaultFileFlags{nonBlock=False}
setFileCloseOnExec fd
return fd
closeFile :: Fd -> IO ()
closeFile = closeFd
newFdEntry :: FilePath -> IO FdEntry
newFdEntry path = FdEntry path <$> openFile path <*> newActiveStatus
setFileCloseOnExec :: Fd -> IO ()
setFileCloseOnExec fd = setFdOption fd CloseOnExec True
----------------------------------------------------------------
type FdCache = MMap FdEntry
-- | Mutable Fd cacher.
newtype MutableFdCache = MutableFdCache (Reaper FdCache (Hash, FdEntry))
fdCache :: MutableFdCache -> IO FdCache
fdCache (MutableFdCache reaper) = reaperRead reaper
look :: MutableFdCache -> FilePath -> Hash -> IO (Maybe FdEntry)
look mfc path key = searchWith key check <$> fdCache mfc
where
check (FdEntry path' _ _) = path == path'
----------------------------------------------------------------
-- The first argument is a cache duration in second.
initialize :: Int -> IO MutableFdCache
initialize duration = MutableFdCache <$> mkReaper settings
where
settings = defaultReaperSettings {
reaperAction = clean
, reaperDelay = duration
, reaperCons = uncurry insert
, reaperNull = isEmpty
, reaperEmpty = empty
}
clean :: FdCache -> IO (FdCache -> FdCache)
clean old = do
new <- pruneWith old prune
return $ merge new
where
prune (FdEntry _ fd mst) = status mst >>= act
where
act Active = inactive mst >> return True
act Inactive = closeFd fd >> return False
----------------------------------------------------------------
terminate :: MutableFdCache -> IO ()
terminate (MutableFdCache reaper) = do
!t <- reaperStop reaper
mapM_ closeIt $ toList t
where
closeIt (FdEntry _ fd _) = closeFd fd
----------------------------------------------------------------
-- | Getting 'Fd' and 'Refresh' from the mutable Fd cacher.
getFd :: MutableFdCache -> Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFd mfc@(MutableFdCache reaper) h path = look mfc path h >>= get
where
get Nothing = do
ent@(FdEntry _ fd mst) <- newFdEntry path
reaperAdd reaper (h, ent)
return (Just fd, refresh mst)
get (Just (FdEntry _ fd mst)) = do
refresh mst
return (Just fd, refresh mst)
#endif
| utdemir/wai | warp/Network/Wai/Handler/Warp/FdCache.hs | mit | 4,467 | 0 | 14 | 874 | 273 | 170 | 103 | 88 | 2 |
{-# CFILES a.c #-}
foreign import ccall unsafe "foo" foo :: Int -> Int
main = print $ foo 6
| dcreager/cabal | tests/systemTests/exeWithC/test.hs | bsd-3-clause | 93 | 0 | 6 | 21 | 32 | 17 | 15 | 2 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.