code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds, MultiParamTypeClasses, FunctionalDependencies, TypeOperators, PolyKinds, TypeFamilies, FlexibleInstances, ScopedTypeVariables, UndecidableInstances, DefaultSignatures, FlexibleContexts #-}
{-# OPTIONS_HADDOCK prune #-}
module Data.MZip where
import Data.MGeneric
import Data.Unapply
import Data.HList
import Data.Proxy
import Data.Nat
import Unsafe.Coerce
import Control.Applicative
type family Dom (f :: *) where
Dom (a -> b) = a
type family Codom (f :: *) where
Codom (a -> b) = b
type family Doms (fs :: [*]) :: [*] where
Doms '[] = '[]
Doms ((a -> b) ': as) = a ': Doms as
type family Codoms (fs :: [*]) :: [*] where
Codoms '[] = '[]
Codoms ((a -> b) ': as) = b ': Codoms as
type family LCodoms (n :: Nat) (fs :: [*]) where
LCodoms NZ fs = fs
LCodoms (NS n) fs = LCodoms n (Codoms fs)
type family ZipInput n f where
ZipInput NZ a = Maybe a
ZipInput (NS n) (a -> b) = a -> ZipInput n b
type family ZipInputs n fs where
ZipInputs n '[] = '[]
ZipInputs n (f ': fs) = ZipInput n f ': ZipInputs n fs
type family ZipWithType' (n :: Nat) (f :: k) (fs :: [*]) :: * where
ZipWithType' NZ f fs = (f :$: fs)
ZipWithType' (NS n) f fs = f :$: (Doms fs) -> ZipWithType' n f (Codoms fs)
type family ZipWithType (n :: Nat) (f :: k) (fs :: [*]) :: * where
ZipWithType NZ f fs = Maybe (f :$: fs)
ZipWithType (NS n) f fs = f :$: (Doms fs) -> ZipWithType n f (Codoms fs)
type family ZipWithTypeUn (n :: Nat) (f :: Un *) (fs :: [*]) :: * where
ZipWithTypeUn NZ f fs = Maybe (In f fs)
ZipWithTypeUn (NS n) f fs = In f (Doms fs) -> ZipWithTypeUn n f (Codoms fs)
type family ZipWithTypeField (n :: Nat) (f :: Field *) (fs :: [*]) :: * where
ZipWithTypeField NZ f fs = Maybe (InField f fs)
ZipWithTypeField (NS n) f fs = InField f (Doms fs) -> ZipWithTypeField n f (Codoms fs)
class MZipWithG n f rf fs where
mzipWithPG :: Proxy n -> Proxy f -> Proxy rf -> Proxy fs -> ZipWithTypeUn n rf fs -> ZipWithType n f fs
instance ( fs ~ Pars (f :$: fs)
, rf ~ Rep (f :$: fs)
, MGeneric (f :$: fs)
) => MZipWithG NZ f rf fs where
mzipWithPG _ _ _ _ a = fmap to a
instance ( MZipWithG n f rf (Codoms fs)
, rf ~ Rep (f :$: Doms fs)
, Doms fs ~ Pars (f :$: Doms fs)
, MGeneric (f :$: Doms fs)
) => MZipWithG (NS n) f rf fs where
mzipWithPG _ pf prf _ a b = mzipWithPG (Proxy :: Proxy n) pf prf (Proxy :: Proxy (Codoms fs)) (a (from b))
-- |
class MZipWith (n :: Nat) (f :: k) (fs :: [*]) where
-- |
mzipWithP :: Proxy n -> Proxy f -> Proxy fs -> HList (ZipInputs n fs) -> ZipWithType n f fs
default mzipWithP :: ( rf ~ Rep (f :$: LCodoms n fs)
, MZipWithG n f rf fs
, GMZipWith n rf fs
) => Proxy n -> Proxy f -> Proxy fs -> HList (ZipInputs n fs) -> ZipWithType n f fs
mzipWithP pn pf pfs fs = mzipWithPG pn pf prf (Proxy :: Proxy fs) (mzipWithG pn prf pfs fs)
where prf = Proxy :: Proxy (Rep (f :$: LCodoms n fs))
class (ZipInput n f ~ a) => ZipInputC n f a | n f -> a, a -> f
instance ZipInputC NZ a (Maybe a)
instance ZipInputC n b c => ZipInputC (NS n) (a -> b) (a -> c)
class (ZipInputs n fs ~ a) => ZipInputsC n fs a | n fs -> a, a -> fs
instance ZipInputsC n '[] '[]
instance (ZipInputC n f c, ZipInputsC n fs b) => ZipInputsC n (f ': fs) (c ': b)
class (ZipWithType n f fs ~ a) => ZipWithTypeC n f fs a | n f fs -> a, a -> n f fs
instance Unapply a f fs => ZipWithTypeC NZ f fs (Maybe a)
instance (Unapply a f (Doms fs), ZipWithTypeC n f (Codoms fs) b) => ZipWithTypeC (NS n) f fs (a -> b)
-- | `mzipWith` zips n structures together if they have the same shape, or fails (with `Nothing`) if the shapes do not match.
--
-- > mzipWith :: HList '[a11 -> ... -> an1 -> b1, ...] -> f a11 ... a1m -> f an1 ... anm -> f b1 ... bm
mzipWith :: forall n f fs b.
( MZipWith n f fs
, MakeZipInputs n fs
, ZipWithTypeC n f fs b
, ZipInputsC n fs (ZipInputs n fs)
) => HList fs -> b
mzipWith fs = mzipWithP (Proxy :: Proxy n) (Proxy :: Proxy f) (Proxy :: Proxy fs) (makeZipInputs (Proxy :: Proxy n) fs)
class MakeZipInput n f where
makeZipInput :: Proxy n -> f -> ZipInput n f
instance MakeZipInput NZ a where
makeZipInput _ = Just
instance MakeZipInput n b => MakeZipInput (NS n) (a -> b) where
makeZipInput _ f a = makeZipInput (Proxy :: Proxy n) (f a)
class MakeZipInputs n fs where
makeZipInputs :: ZipInputsC n fs a => Proxy n -> HList fs -> HList a
instance MakeZipInputs n '[] where
makeZipInputs _ _ = HNil
instance ( MakeZipInput n f
, MakeZipInputs n fs
, ZipInputsC n fs (ZipInputs n fs)
) => MakeZipInputs n (f ': fs) where
makeZipInputs pn (HCons f fs) = HCons (makeZipInput pn f) (makeZipInputs pn fs)
class GMZipWith (n :: Nat) (f :: Un *) (fs :: [*]) where
mzipWithG :: Proxy n -> Proxy f -> Proxy fs -> HList (ZipInputs n fs) -> ZipWithTypeUn n f fs
instance GMZipWith n UV fs where
mzipWithG _ _ _ = undefined
class GMTZipWith n fs where
mzipWithGT :: Proxy n -> Proxy fs -> ZipWithTypeUn n UT fs
instance GMTZipWith NZ fs where
mzipWithGT _ _ = Just InT
instance GMTZipWith n (Codoms fs) => GMTZipWith (NS n) fs where
mzipWithGT _ pf _ = mzipWithGT (Proxy :: Proxy n) (Proxy :: Proxy (Codoms fs))
instance GMTZipWith n fs => GMZipWith n UT fs where
mzipWithG _ _ _ _ = mzipWithGT (Proxy :: Proxy n) (Proxy :: Proxy fs)
class GMZipWithFail n u fs where
mzipWithFail :: Proxy n -> Proxy u -> Proxy fs -> ZipWithTypeUn n u fs
instance GMZipWithFail NZ u fs where
mzipWithFail _ _ _ = Nothing
class GMLZipWith n u v fs where
mzipWithGL :: Proxy n -> Proxy u -> Proxy v -> Proxy fs -> ZipWithTypeUn n u fs -> ZipWithTypeUn n (u :++: v) fs
instance GMLZipWith NZ u v fs where
mzipWithGL _ _ _ _ u = fmap InL u
instance ( GMLZipWith n u v (Codoms fs)
, GMZipWithFail n (u :++: v) (Codoms fs)
) => GMLZipWith (NS n) u v fs where
mzipWithGL _ pu pv _ f (InL u) = mzipWithGL (Proxy :: Proxy n) pu pv (Proxy :: Proxy (Codoms fs)) (f u)
mzipWithGL _ pu pv _ f (InR _) = mzipWithFail (Proxy :: Proxy n) (Proxy :: Proxy (u :++: v)) (Proxy :: Proxy (Codoms fs))
class GMRZipWith n u v fs where
mzipWithGR :: Proxy n -> Proxy u -> Proxy v -> Proxy fs -> ZipWithTypeUn n v fs -> ZipWithTypeUn n (u :++: v) fs
instance GMRZipWith NZ u v fs where
mzipWithGR _ _ _ _ v = fmap InR v
instance ( GMRZipWith n u v (Codoms fs)
, GMZipWithFail n (u :++: v) (Codoms fs)
) => GMRZipWith (NS n) u v fs where
mzipWithGR _ pu pv _ f (InR v) = mzipWithGR (Proxy :: Proxy n) pu pv (Proxy :: Proxy (Codoms fs)) (f v)
mzipWithGR _ pu pv _ f (InL _) = mzipWithFail (Proxy :: Proxy n) (Proxy :: Proxy (u :++: v)) (Proxy :: Proxy (Codoms fs))
instance ( GMZipWith (NS n) u fs, GMZipWith (NS n) v fs
, GMLZipWith n u v (Codoms fs), GMRZipWith n u v (Codoms fs)
) => GMZipWith (NS n) (u :++: v) fs where
mzipWithG _ _ pf fs (InL u) = mzipWithGL (Proxy :: Proxy n) (Proxy :: Proxy u) (Proxy :: Proxy v) (Proxy :: Proxy (Codoms fs)) (mzipWithG (Proxy :: Proxy (NS n)) (Proxy :: Proxy u) pf fs u)
mzipWithG _ _ pf fs (InR v) = mzipWithGR (Proxy :: Proxy n) (Proxy :: Proxy u) (Proxy :: Proxy v) (Proxy :: Proxy (Codoms fs)) (mzipWithG (Proxy :: Proxy (NS n)) (Proxy :: Proxy v) pf fs v)
class GPiMZipWith n u v fs where
mzipWithGPi :: Proxy n -> Proxy u -> Proxy v -> Proxy fs -> ZipWithTypeUn n u fs -> ZipWithTypeUn n v fs -> ZipWithTypeUn n (u :**: v) fs
instance GPiMZipWith NZ u v fs where
mzipWithGPi _ _ _ _ f g = (:*:) <$> f <*> g
instance GPiMZipWith n u v (Codoms fs) => GPiMZipWith (NS n) u v fs where
mzipWithGPi _ _ _ _ f g (u :*: v) = mzipWithGPi (Proxy :: Proxy n) (Proxy :: Proxy u) (Proxy :: Proxy v) (Proxy :: Proxy (Codoms fs)) (f u) (g v)
instance (GMZipWith n u fs, GMZipWith n v fs, GPiMZipWith n u v fs) => GMZipWith n (u :**: v) fs where
mzipWithG pn _ pf fs = mzipWithGPi pn (Proxy :: Proxy u) (Proxy :: Proxy v) pf (mzipWithG pn (Proxy :: Proxy u) pf fs) (mzipWithG pn (Proxy :: Proxy v) pf fs)
class GMZipWithF n f fs where
mzipWithGFF :: Proxy n -> Proxy f -> Proxy fs -> ZipWithTypeField n f fs -> ZipWithTypeUn n (UF f) fs
instance GMZipWithF NZ f fs where
mzipWithGFF _ _ _ f = fmap InF f
instance GMZipWithF n f (Codoms fs) => GMZipWithF (NS n) f fs where
mzipWithGFF _ pf _ f (InF b) = mzipWithGFF (Proxy:: Proxy n) pf (Proxy :: Proxy (Codoms fs)) (f b)
instance (GFMZipWith n f fs, GMZipWithF n f fs) => GMZipWith n (UF f) fs where
mzipWithG pn _ pf fs = mzipWithGFF pn (Proxy :: Proxy f) pf (mzipWithGF pn (Proxy :: Proxy f) pf fs)
class GFMZipWith (n :: Nat) (f :: Field *) (fs :: [*]) where
mzipWithGF :: Proxy n -> Proxy f -> Proxy fs -> HList (ZipInputs n fs) -> ZipWithTypeField n f fs
-- instance GFMZipWith n (FK a) fs where
-- mzipWithGF _ fs (InK a) = InK a
class GFPMZipWith n m fs where
mzipWithGFP :: Proxy n -> Proxy m -> Proxy fs -> (ZipInputs n fs :!: m) -> ZipWithTypeField n (FP m) fs
instance (Maybe (fs :!: m) ~ (ZipInputs NZ fs :!: m)) => GFPMZipWith NZ m fs where
mzipWithGFP _ _ _ a = fmap InP a
instance ( (ZipInputs (NS n) fs :!: m) ~ (Doms fs :!: m -> ZipInputs n (Codoms fs) :!: m)
, GFPMZipWith n m (Codoms fs)
) => GFPMZipWith (NS n) m fs where
mzipWithGFP _ _ _ f (InP a) = mzipWithGFP (Proxy :: Proxy n) (Proxy :: Proxy m) (Proxy :: Proxy (Codoms fs)) (f a)
instance (GFPMZipWith n m fs, HLookup m (ZipInputs n fs)) => GFMZipWith n (FP m) fs where
mzipWithGF pn _ pf fs = mzipWithGFP (Proxy :: Proxy n) (Proxy :: Proxy m) pf (hlookup (Proxy :: Proxy m) (Proxy :: Proxy (ZipInputs n fs)) fs)
-- type family NId n a where
-- NId NZ a = a
-- NId (NS n) a = a -> NId n a
type family ExpandFieldFunction (n :: Nat) (f :: [Field *]) (ps :: [*]) :: [*] where
ExpandFieldFunction n '[] ps = '[]
--ExpandFieldFunction n (FK a ': fs) ps = NId n a ': ExpandFieldFunction fs vfs ps vs
ExpandFieldFunction n (FP m ': fs) ps = (ps :!: m) ': ExpandFieldFunction n fs ps
ExpandFieldFunction n ((f :@: as) ': fs) ps = ZipWithType' n f (ExpandFieldFunction n as ps) ': ExpandFieldFunction n fs ps
class AdaptFieldFunction (n :: Nat) (f :: [Field *]) (ps :: [*]) where
adaptFieldFunction :: Proxy n -> Proxy f -> Proxy ps -> HList (ZipInputs n ps) -> HList (ZipInputs n (ExpandFieldFunction n f ps))
instance AdaptFieldFunction n '[] ps where
adaptFieldFunction _ _ _ fs = HNil
instance ( HLookup m (ZipInputs n ps)
, ZipInput n (ps :!: m) ~ (ZipInputs n ps :!: m)
, AdaptFieldFunction n as ps
) => AdaptFieldFunction n (FP m ': as) ps where
adaptFieldFunction _ _ pf fs =
HCons
(hlookup (Proxy :: Proxy m) (Proxy :: Proxy (ZipInputs n ps)) fs)
(adaptFieldFunction (Proxy :: Proxy n) (Proxy :: Proxy as) pf fs)
instance ( MZipWith n f (ExpandFieldFunction n bs ps)
, ZipInput n (ZipWithType' n f (ExpandFieldFunction n bs ps)) ~ ZipWithType n f (ExpandFieldFunction n bs ps)
, AdaptFieldFunction n bs ps
, AdaptFieldFunction n as ps
) => AdaptFieldFunction n ((f :@: bs) ': as) ps where
adaptFieldFunction pn _ pfs fs =
HCons
(mzipWithP pn pf (Proxy :: Proxy (ExpandFieldFunction n bs ps)) (adaptFieldFunction pn pb pfs fs))
(adaptFieldFunction pn (Proxy :: Proxy as) pfs fs)
where pf = Proxy :: Proxy f
pb = Proxy :: Proxy bs
class GFAMZipWith n f as fs where
mzipWithGFA :: Proxy n -> Proxy f -> Proxy as -> Proxy fs -> ZipWithType n f (ExpandFieldFunction n as fs) -> ZipWithTypeField n (f :@: as) fs
instance (ExpandFields as fs ~ ExpandFieldFunction NZ as fs)
=> GFAMZipWith NZ f as fs where
mzipWithGFA _ _ _ _ (Just a) = Just (InA a)
mzipWithGFA _ _ _ _ Nothing = Nothing
instance ( ExpandFieldFunction n as (Codoms fs) ~ Codoms (ExpandFieldFunction (NS n) as fs)
, Doms (ExpandFieldFunction (NS n) as fs) ~ ExpandFields as (Doms fs)
, GFAMZipWith n f as (Codoms fs)
) => GFAMZipWith (NS n) f as fs where
mzipWithGFA _ pf pa _ a (InA b) = mzipWithGFA (Proxy :: Proxy n) pf pa (Proxy :: Proxy (Codoms fs)) (a (unsafeCoerce b))
instance ( GFAMZipWith n f as fs
, MZipWith n f (ExpandFieldFunction n as fs)
, AdaptFieldFunction n as fs
) => GFMZipWith n (f :@: as) fs where
mzipWithGF pn _ pfs fs = mzipWithGFA pn pf pa pfs (mzipWithP pn pf (Proxy :: Proxy (ExpandFieldFunction n as fs)) (adaptFieldFunction pn pa pfs fs))
where pf = Proxy :: Proxy f
pa = Proxy :: Proxy as
| RafaelBocquet/haskell-mgeneric | src/Data/MZip.hs | mit | 12,658 | 0 | 15 | 3,219 | 5,867 | 3,008 | 2,859 | -1 | -1 |
{-# LANGUAGE TypeFamilies, QuasiQuotes, TemplateHaskell, MultiParamTypeClasses, OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module YesodCoreTest.ErrorHandling
( errorHandlingTest
, Widget
) where
import Yesod.Core
import Test.Hspec
import Network.Wai
import Network.Wai.Test
import Text.Hamlet (hamlet)
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Char8 as S8
import Control.Exception (SomeException, try)
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
/not_found NotFoundR POST
/first_thing FirstThingR POST
/after_runRequestBody AfterRunRequestBodyR POST
/error-in-body ErrorInBodyR GET
/error-in-body-noeval ErrorInBodyNoEvalR GET
|]
instance Yesod App
getHomeR :: Handler RepHtml
getHomeR = do
$logDebug "Testing logging"
defaultLayout $ toWidget [hamlet|
$doctype 5
<html>
<body>
<form method=post action=@{NotFoundR}>
<input type=submit value="Not found">
<form method=post action=@{FirstThingR}>
<input type=submit value="Error is thrown first thing in handler">
<form method=post action=@{AfterRunRequestBodyR}>
<input type=submit value="BUGGY: Error thrown after runRequestBody">
|]
postNotFoundR, postFirstThingR, postAfterRunRequestBodyR :: Handler RepHtml
postNotFoundR = do
(_, _files) <- runRequestBody
_ <- notFound
getHomeR
postFirstThingR = do
_ <- error "There was an error 3.14159"
getHomeR
postAfterRunRequestBodyR = do
x <- runRequestBody
_ <- error $ show $ fst x
getHomeR
getErrorInBodyR :: Handler RepHtml
getErrorInBodyR = do
let foo = error "error in body 19328" :: String
defaultLayout [whamlet|#{foo}|]
getErrorInBodyNoEvalR :: Handler (DontFullyEvaluate RepHtml)
getErrorInBodyNoEvalR = fmap DontFullyEvaluate getErrorInBodyR
errorHandlingTest :: Spec
errorHandlingTest = describe "Test.ErrorHandling" $ do
it "says not found" caseNotFound
it "says 'There was an error' before runRequestBody" caseBefore
it "says 'There was an error' after runRequestBody" caseAfter
it "error in body == 500" caseErrorInBody
it "error in body, no eval == 200" caseErrorInBodyNoEval
runner :: Session () -> IO ()
runner f = toWaiApp App >>= runSession f
caseNotFound :: IO ()
caseNotFound = runner $ do
res <- request defaultRequest
{ pathInfo = ["not_found"]
, requestMethod = "POST"
}
assertStatus 404 res
assertBodyContains "Not Found" res
caseBefore :: IO ()
caseBefore = runner $ do
res <- request defaultRequest
{ pathInfo = ["first_thing"]
, requestMethod = "POST"
}
assertStatus 500 res
assertBodyContains "There was an error 3.14159" res
caseAfter :: IO ()
caseAfter = runner $ do
let content = "foo=bar&baz=bin12345"
res <- srequest SRequest
{ simpleRequest = defaultRequest
{ pathInfo = ["after_runRequestBody"]
, requestMethod = "POST"
, requestHeaders =
[ ("content-type", "application/x-www-form-urlencoded")
, ("content-length", S8.pack $ show $ L.length content)
]
}
, simpleRequestBody = content
}
assertStatus 500 res
assertBodyContains "bin12345" res
caseErrorInBody :: IO ()
caseErrorInBody = runner $ do
res <- request defaultRequest { pathInfo = ["error-in-body"] }
assertStatus 500 res
assertBodyContains "error in body 19328" res
caseErrorInBodyNoEval :: IO ()
caseErrorInBodyNoEval = do
eres <- try $ runner $ do
_ <- request defaultRequest { pathInfo = ["error-in-body-noeval"] }
return ()
case eres of
Left (_ :: SomeException) -> return ()
Right _ -> error "Expected an exception"
| piyush-kurur/yesod | yesod-core/test/YesodCoreTest/ErrorHandling.hs | mit | 3,811 | 0 | 18 | 881 | 788 | 404 | 384 | 87 | 2 |
-- |
-- Module: Math.NumberTheory.Primes.Testing.Probabilistic
-- Copyright: (c) 2011 Daniel Fischer, 2017 Andrew Lelechenko
-- Licence: MIT
-- Maintainer: Daniel Fischer <[email protected]>
--
-- Probabilistic primality tests, Miller-Rabin and Baillie-PSW.
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Math.NumberTheory.Primes.Testing.Probabilistic
( isPrime
, millerRabinV
, bailliePSW
, isStrongFermatPP
, isFermatPP
, lucasTest
) where
import Data.Bits
import Data.Mod
import Data.Proxy
import GHC.Base
import GHC.Integer.GMP.Internals
import GHC.TypeNats (KnownNat, SomeNat(..), someNatVal)
import Math.NumberTheory.Moduli.JacobiSymbol
import Math.NumberTheory.Utils
import Math.NumberTheory.Roots
-- | @isPrime n@ tests whether @n@ is a prime (negative or positive).
-- It is a combination of trial division and Baillie-PSW test.
--
-- If @isPrime n@ returns @False@ then @n@ is definitely composite.
-- There is a theoretical possibility that @isPrime n@ is @True@,
-- but in fact @n@ is not prime. However, no such numbers are known
-- and none exist below @2^64@. If you have found one, please report it,
-- because it is a major discovery.
isPrime :: Integer -> Bool
isPrime n
| n < 0 = isPrime (-n)
| n < 2 = False
| n < 4 = True
| otherwise = millerRabinV 0 n -- trial division test
&& bailliePSW n
-- | Miller-Rabin probabilistic primality test. It consists of the trial
-- division test and several rounds of the strong Fermat test with different
-- bases. The choice of trial divisors and bases are
-- implementation details and may change in future silently.
--
-- First argument stands for the number of rounds of strong Fermat test.
-- If it is 0, only trial division test is performed.
--
-- If @millerRabinV k n@ returns @False@ then @n@ is definitely composite.
-- Otherwise @n@ may appear composite with probability @1/4^k@.
millerRabinV :: Int -> Integer -> Bool
millerRabinV (I# k) n = case testPrimeInteger n k of
0# -> False
_ -> True
-- | @'isStrongFermatPP' n b@ tests whether non-negative @n@ is
-- a strong Fermat probable prime for base @b@.
--
-- Apart from primes, also some composite numbers have the tested
-- property, but those are rare. Very rare are composite numbers
-- having the property for many bases, so testing a large prime
-- candidate with several bases can identify composite numbers
-- with high probability. An odd number @n > 3@ is prime if and
-- only if @'isStrongFermatPP' n b@ holds for all @b@ with
-- @2 <= b <= (n-1)/2@, but of course checking all those bases
-- would be less efficient than trial division, so one normally
-- checks only a relatively small number of bases, depending on
-- the desired degree of certainty. The probability that a randomly
-- chosen base doesn't identify a composite number @n@ is less than
-- @1/4@, so five to ten tests give a reasonable level of certainty
-- in general.
--
-- Please consult <https://miller-rabin.appspot.com Deterministic variants of the Miller-Rabin primality test>
-- for the best choice of bases.
isStrongFermatPP :: Integer -> Integer -> Bool
isStrongFermatPP n b
| n < 0 = error "isStrongFermatPP: negative argument"
| n <= 1 = False
| n == 2 = True
| otherwise = case someNatVal (fromInteger n) of
SomeNat (_ :: Proxy t) -> isStrongFermatPPMod (fromInteger b :: Mod t)
isStrongFermatPPMod :: KnownNat n => Mod n -> Bool
isStrongFermatPPMod b = b == 0 || a == 1 || go t a
where
m = -1
(t, u) = shiftToOddCount $ unMod m
a = b ^% u
go 0 _ = False
go k x = x == m || go (k - 1) (x * x)
-- | @'isFermatPP' n b@ tests whether @n@ is a Fermat probable prime
-- for the base @b@, that is, whether @b^(n-1) `mod` n == 1@.
-- This is a weaker but simpler condition. However, more is lost
-- in strength than is gained in simplicity, so for primality testing,
-- the strong check should be used. The remarks about
-- the choice of bases to test from @'isStrongFermatPP'@ apply
-- with the modification that if @a@ and @b@ are Fermat bases
-- for @n@, then @a*b@ /always/ is a Fermat base for @n@ too.
-- A /Charmichael number/ is a composite number @n@ which is a
-- Fermat probable prime for all bases @b@ coprime to @n@. By the
-- above, only primes @p <= n/2@ not dividing @n@ need to be tested
-- to identify Carmichael numbers (however, testing all those
-- primes would be less efficient than determining Carmichaelness
-- from the prime factorisation; but testing an appropriate number
-- of prime bases is reasonable to find out whether it's worth the
-- effort to undertake the prime factorisation).
isFermatPP :: Integer -> Integer -> Bool
isFermatPP n b = case someNatVal (fromInteger n) of
SomeNat (_ :: Proxy t) -> (fromInteger b :: Mod t) ^% (n-1) == 1
-- | Primality test after Baillie, Pomerance, Selfridge and Wagstaff.
-- The Baillie-PSW test consists of a strong Fermat probable primality
-- test followed by a (strong) Lucas primality test. This implementation
-- assumes that the number @n@ to test is odd and larger than @3@.
-- Even and small numbers have to be handled before. Also, before
-- applying this test, trial division by small primes should be performed
-- to identify many composites cheaply (although the Baillie-PSW test is
-- rather fast, about the same speed as a strong Fermat test for four or
-- five bases usually, it is, for large numbers, much more costly than
-- trial division by small primes, the primes less than @1000@, say, so
-- eliminating numbers with small prime factors beforehand is more efficient).
--
-- The Baillie-PSW test is very reliable, so far no composite numbers
-- passing it are known, and it is known (Gilchrist 2010) that no
-- Baillie-PSW pseudoprimes exist below @2^64@. However, a heuristic argument
-- by Pomerance indicates that there are likely infinitely many Baillie-PSW
-- pseudoprimes. On the other hand, according to
-- <http://mathworld.wolfram.com/Baillie-PSWPrimalityTest.html> there is
-- reason to believe that there are none with less than several
-- thousand digits, so that for most use cases the test can be
-- considered definitive.
bailliePSW :: Integer -> Bool
bailliePSW n = isStrongFermatPP n 2 && lucasTest n
-- precondition: n odd, > 3 (no small prime factors, typically large)
-- | The Lucas-Selfridge test, including square-check, but without
-- the Fermat test. For package-internal use only.
lucasTest :: Integer -> Bool
lucasTest n
| isSquare n || d == 0 = False
| d == 1 = True
| otherwise = uo == 0 || go t vo qo
where
d = find True 5
find !pos cd = case jacobi (n `rem` cd) cd of
MinusOne -> if pos then cd else (-cd)
Zero -> if cd == n then 1 else 0
One -> find (not pos) (cd+2)
q = (1-d) `quot` 4
(t,o) = shiftToOddCount (n+1)
(uo, vo, qo) = testLucas n q o
go 0 _ _ = False
go s vn qn = vn == 0 || go (s-1) ((vn*vn-2*qn) `rem` n) ((qn*qn) `rem` n)
-- n odd positive, n > abs q, index odd
testLucas :: Integer -> Integer -> Integer -> (Integer, Integer, Integer)
testLucas n q (S# i#) = look (finiteBitSize (0 :: Word) - 2)
where
j = I# i#
look k
| testBit j k = go (k-1) 1 1 1 q
| otherwise = look (k-1)
go k un un1 vn qn
| k < 0 = (un, vn, qn)
| testBit j k = go (k-1) u2n1 u2n2 v2n1 q2n1
| otherwise = go (k-1) u2n u2n1 v2n q2n
where
u2n = (un*vn) `rem` n
u2n1 = (un1*vn-qn) `rem` n
u2n2 = ((un1-q*un)*vn-qn) `rem` n
v2n = (vn*vn-2*qn) `rem` n
v2n1 = ((un1 - (2*q)*un)*vn-qn) `rem` n
q2n = (qn*qn) `rem` n
q2n1 = (qn*qn*q) `rem` n
testLucas n q (Jp# bn#) = test (s# -# 1#)
where
s# = sizeofBigNat# bn#
test j# = case indexBigNat# bn# j# of
0## -> test (j# -# 1#)
w# -> look (j# -# 1#) (W# w#) (finiteBitSize (0 :: Word) - 1)
look j# w i
| testBit w i = go j# w (i - 1) 1 1 1 q
| otherwise = look j# w (i-1)
go k# w i un un1 vn qn
| i < 0 = if isTrue# (k# <# 0#)
then (un,vn,qn)
else go (k# -# 1#) (W# (indexBigNat# bn# k#)) (finiteBitSize (0 :: Word) - 1) un un1 vn qn
| testBit w i = go k# w (i-1) u2n1 u2n2 v2n1 q2n1
| otherwise = go k# w (i-1) u2n u2n1 v2n q2n
where
u2n = (un*vn) `rem` n
u2n1 = (un1*vn-qn) `rem` n
u2n2 = ((un1-q*un)*vn-qn) `rem` n
v2n = (vn*vn-2*qn) `rem` n
v2n1 = ((un1 - (2*q)*un)*vn-qn) `rem` n
q2n = (qn*qn) `rem` n
q2n1 = (qn*qn*q) `rem` n
-- Listed as a precondition of lucasTest
testLucas _ _ _ = error "lucasTest: negative argument"
| cartazio/arithmoi | Math/NumberTheory/Primes/Testing/Probabilistic.hs | mit | 9,084 | 0 | 17 | 2,339 | 1,956 | 1,070 | 886 | 103 | 6 |
import Data.List
import Data.List.Split
import Data.Maybe
import System.IO
import Data.Char
import qualified AStar
main = do
contents <- readFile "day11input.txt"
let result = compute $ lines contents
print result
type Pair = (Int, Int) -- (Floor of microchip, Floor of generator)
type State = (Int, [Pair]) -- (Floor of elevator, List of Pairs)
compute :: [String] -> Int
compute input = cost
where Just (cost, path) = AStar.search initState isGoalState nextStates heuristic
initState = generateInitialState input
isGoalState :: State -> Bool
isGoalState (_, pairs) = and $ map (\(i,j) -> i == 3 && j == 3) pairs
nextStates :: State -> [(State, Int)]
nextStates (elev, pairs) = map (\s -> (sortState s, 1)) validStates
where validStates = filter validState newStates
newStates = concat $ map (makeMove (elev, pairs)) ranges
ranges = filter (\x -> length x < 3) seq
seq = tail $ subsequences $ elemIndices elev $ unPairList pairs
unPairList :: [(a,a)] -> [a]
unPairList [] = []
unPairList ((x, y):xs) = x : y : unPairList xs
makeMove :: State -> [Int] -> [State]
makeMove (elev, pairs) moves = [moveUp] ++ [moveDown]
where moveUp = (elev + 1, foldl (move elev True) pairs moves)
moveDown = (elev - 1, foldl (move elev False) pairs moves)
move :: Int -> Bool -> [Pair] -> Int -> [Pair]
move floor up ((m,g):pairs) i
| i == 0 = (newElem m, g) : pairs
| i == 1 = (m, newElem g) : pairs
| otherwise = (m, g) : move floor up pairs (i - 2)
where newElem n = if up then n + 1 else n - 1
validState :: State -> Bool
validState state@(elev, pairs) = elev > -1 && elev < 4 && unfriedState pairs pairs
unfriedState :: [Pair] -> [Pair] -> Bool
unfriedState _ [] = True
unfriedState originalPairs ((m,g):pairs)
| m == g = rest
| otherwise = isNothing (find (\(a, b) -> b == m) (delete (m,g) originalPairs)) && rest
where rest = unfriedState originalPairs pairs
heuristic :: State -> Int
heuristic (_, pairs) = sum $ map (\(i, j) -> 3 - i + 3 - j) pairs
sortState :: State -> State
sortState (elev, pairs) = (elev, sort pairs)
generateInitialState :: [String] -> State
generateInitialState input = (0, sort $ state ++ newElems)
where newElems = [(0,0),(0,0)]
state = collate . concat $ map (\i -> generateFloor i $ parsedInput !! i) [0 .. length parsedInput - 1]
generateFloor i floor = map (\word -> (takeWhile (/= '-') word, "compatible" `isInfixOf` word, i)) floor
parsedInput = map (concat . tail . splitWhen (== "a") . drop 4 . filter validWord . words) input
validWord x = and $ map (\w -> not $ isPrefixOf w x) ["and", "generator", "microchip"]
collate :: [(String, Bool, Int)] -> [Pair]
collate [] = []
collate ((x, b, i):xs) = pair : collate (delete other xs)
where pair = if b then (i,i') else (i',i)
Just other@(_,_,i') = find (\(x',_,_) -> x == x') xs | aBhallo/AoC2016 | Day 11/day11part2.hs | mit | 3,004 | 3 | 12 | 754 | 1,385 | 745 | 640 | 61 | 2 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Delete where
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Text as T
import Text.Parsec
import Text.Parsec.Error
import qualified Text.BibTeX.Parse as BibP
import qualified Text.BibTeX.Format as BibF
import Yesod.Markdown
import Import
import Barch.Adaptors
import Barch.UploadUtils
import Barch.Widgets (fullReferenceView)
import Handler.Edit
getDeleteR :: ReferenceId->Handler Html
getDeleteR refid = do
dbRef <- runDB $ get refid
(formWidget, formEnctype) <- generateFormPost $ editReferenceForm dbRef
let submission = Nothing :: Maybe Text
handlerName = "getDeleteR" :: Text
fieldText = "" :: Text
parsed = Nothing
reference = Nothing
parseErrors = []
haveErrors = False
defaultLayout $ do
aDomId <- newIdent
setTitle "Barch: Delete"
$(widgetFile "delete")
postDeleteR :: ReferenceId->Handler Html
postDeleteR refid = do
dbRef <- runDB $ get refid
let handlerName = "postDeleteR" :: Text
reference = Nothing
parseErrors = []
haveErrors = False
(editFormWidget, editFormEnctype) <- generateFormPost $ editReferenceForm dbRef
files <- referenceFiles refid
_ <- mapM deleteFile (entityKey <$> files)
_ <- runDB $ delete refid
defaultLayout $ do
aDomId <- newIdent
setTitle "Barch: Delete"
$(widgetFile "edit")
| klarh/barch | Handler/Delete.hs | mit | 1,469 | 0 | 12 | 340 | 383 | 202 | 181 | 45 | 1 |
module Language.Binal.Verifier where
import Control.Applicative
import Control.Monad
import Control.Monad.State
import Control.Lens
import qualified Data.Maybe as Maybe
import qualified Data.List as List
import qualified Data.HashSet as HashSet
import qualified Data.HashMap.Strict as HashMap
import Language.Binal.Types
import qualified Language.Binal.Util as Util
examineFormOfParams :: AST -> [SyntaxError]
examineFormOfParams lit@(Lit _ _) = examineForms lit
examineFormOfParams (List [] _) = []
examineFormOfParams (List [_] pos) = [UnexpectedArity 2 1 pos]
examineFormOfParams (List xs _) = concatMap examineFormOfParams xs
-- 特殊形式が妥当か検査する
examineForms :: AST -> [SyntaxError]
examineForms (Lit lit pos) = do
case Util.extractSym lit of
Just s -> do
if HashSet.member s Util.keywords
then [KeywordUsedAsVariable s pos]
else []
Nothing -> []
examineForms (List [] pos) = [UnexpectedArity 1 0 pos]
examineForms (List xs pos) = do
let instr = xs !! 0
case instr of
Lit (SymLit "^") _ -> do
if length xs /= 3
then [UnexpectedArity 3 (length xs) pos]
else do
let params = xs !! 1
let body = xs !! 2
examineFormOfParams params ++ examineForms body
Lit (SymLit "seq") _ -> do
if length xs == 1
then [UnexpectedArity 2 (length xs) pos]
else concatMap examineForms (tail xs)
Lit (SymLit "let") _ -> do
if length xs /= 3
then [UnexpectedArity 3 (length xs) pos]
else do
let pattern = xs !! 1
let body = xs !! 2
examineFormOfParams pattern ++ examineForms body
Lit (SymLit "letrec") _ -> do
if length xs /= 3
then [UnexpectedArity 3 (length xs) pos]
else do
let pattern = xs !! 1
let body = xs !! 2
examineFormOfParams pattern ++ examineForms body
Lit (SymLit "match") _ -> do
if length xs < 3
then [UnexpectedArity 3 (length xs) pos]
else concatMap examineForms (tail xs)
Lit (SymLit "cond") _ -> do
if odd (length xs)
then [UnexpectedArity (length xs + 1) (length xs) pos]
else concatMap examineForms (tail xs)
Lit (SymLit "object") _ -> do
if odd (length (tail xs))
then [UnexpectedArity (length (tail xs) + 1) (length (tail xs)) pos]
else do
let symbols = Maybe.catMaybes (map (\(x,i) -> if even i then Just x else Nothing) (zip (tail xs) ([0..] :: [Int])))
let exprs = Maybe.catMaybes (map (\(x,i) -> if odd i then Just x else Nothing) (zip (tail xs) ([0..] :: [Int])))
let r1 = concatMap
(\x -> case x of
Lit (SymLit _) _ -> []
_ -> [Malformed (Util.whereIsAST x)]) symbols
r1 ++ concatMap examineForms exprs
Lit (SymLit ".") _ -> do
if length xs /= 3
then [UnexpectedArity 3 (length xs) pos]
else
case xs !! 2 of
Lit (SymLit _) _ -> examineForms (xs !! 1)
_ -> Malformed (Util.whereIsAST (xs !! 2)) : examineForms (xs !! 1)
Lit (SymLit ":=") _ -> do
let ex1 it@(Lit (SymLit _) _) = examineForms it
ex1 (Lit _ pos1) = [Malformed pos1]
ex1 it@(List [] _) = examineForms it
ex1 (List ys pos1) = do
let instr1 = ys !! 0
case instr1 of
Lit (SymLit ".") _ -> do
if length ys /= 3
then [UnexpectedArity 3 (length ys) pos1]
else
case ys !! 2 of
Lit (SymLit _) _ -> ex1 (ys !! 1)
_ -> Malformed (Util.whereIsAST (ys !! 2)) : ex1 (ys !! 1)
_ -> [Malformed pos1]
if length xs /= 3
then [UnexpectedArity 3 (length xs) pos]
else
ex1 (xs !! 1) ++ examineForms (xs !! 2)
Lit (SymLit "assume") _ -> do
if length xs /= 2
then [UnexpectedArity 2 (length xs) pos]
else
case xs !! 1 of
Lit (SymLit _) _ -> []
_ -> [Malformed (Util.whereIsAST (xs !! 1))]
_ -> concatMap examineForms xs
examineNames' :: AST -> State (HashSet.HashSet String) [NotInScope]
examineNames' (Lit (SymLit s) pos) = do
env <- get
if HashSet.member s env
then return []
else return [NotInScope s pos]
examineNames' (Lit (StrLit _) _) = return []
examineNames' (Lit (NumLit _) _) = return []
examineNames' (Lit (BoolLit _) _) = return []
examineNames' (List [] _) = return []
examineNames' (List xs _) = do
env <- get
let instr = xs !! 0
case instr of
Lit (SymLit "^") _ -> do
let params = xs !! 1
let body = xs !! 2
let env' = foldr HashSet.insert env (Util.flatSymbols params)
put env'
r <- examineNames' body
put env
return r
Lit (SymLit "seq") _ -> do
rs <- mapM examineNames' (tail xs)
return (concat rs)
Lit (SymLit "let") _ -> do
let pattern = xs !! 1
let body = xs !! 2
r <- examineNames' body
let env' = foldr HashSet.insert env (Util.flatSymbols pattern)
put env'
return r
Lit (SymLit "letrec") _ -> do
let pattern = xs !! 1
let body = xs !! 2
let env' = foldr HashSet.insert env (Util.flatSymbols pattern)
put env'
examineNames' body
Lit (SymLit "match") _ -> do
rs <- mapM examineNames' (tail xs)
return (concat rs)
Lit (SymLit "cond") _ -> do
rs <- mapM examineNames' (tail xs)
return (concat rs)
Lit (SymLit "object") _ -> do
let exprs = Maybe.catMaybes (map (\(x,i) -> if odd i then Just x else Nothing) (zip (tail xs) ([0..] :: [Int])))
rs <- mapM examineNames' exprs
return (concat rs)
Lit (SymLit ".") _ -> do
examineNames' (xs !! 1)
Lit (SymLit ":=") _ -> do
rs <- mapM examineNames' (tail xs)
return (concat rs)
Lit (SymLit "assume") _ -> do
let Lit (SymLit s) _ = xs !! 1
let env' = HashSet.insert s env
put env'
return []
_ -> do
rs <- mapM examineNames' xs
return (concat rs)
examineNames :: AST -> [NotInScope]
examineNames ast = evalState (examineNames' ast) Util.primitives
gensym :: TypeInferer Variable
gensym = do
var <- uses _2 head
_2 %= tail
return var
makePoly :: TypedAST -> TypeInferer ()
makePoly (TyLit _ ty1 _)
= Util.traverseVarTyM
(\ty -> case ty of
VarTy i -> _4 %= HashSet.insert i
_ -> return ())
ty1
makePoly (TyList (TyLit (SymLit "^") _ _:_) ty1 _)
= Util.traverseVarTyM
(\ty -> case ty of
VarTy i -> _4 %= HashSet.insert i
_ -> return ())
ty1
makePoly _ = return ()
freshPoly' :: TyKind -> StateT (HashMap.HashMap Variable Variable) (State (TypeEnv, [Variable], [Constraint], PolyEnv)) TyKind
freshPoly' (VarTy i) = do
isPoly <- lift (uses _4 (HashSet.member i))
if isPoly
then do
polys <- get
case HashMap.lookup i polys of
Just poly -> return (VarTy poly)
Nothing -> do
var <- lift gensym
modify (HashMap.insert i var)
return (VarTy var)
else return (VarTy i)
freshPoly' (RecTy i ty) = do
VarTy i' <- freshPoly' (VarTy i)
ty' <- freshPoly' ty
return (RecTy i' ty')
freshPoly' SymTy = return SymTy
freshPoly' StrTy = return StrTy
freshPoly' NumTy = return NumTy
freshPoly' BoolTy = return BoolTy
freshPoly' (ArrTy x y) = ArrTy <$> freshPoly' x <*> freshPoly' y
freshPoly' (ListTy tys) = ListTy <$> mapM freshPoly' tys
freshPoly' (EitherTy tys) = EitherTy <$> mapM freshPoly' tys
freshPoly' (ObjectTy i xs) = do
xs' <- mapM (\(key, val) -> do { x <- freshPoly' val ; return (key, x) }) (HashMap.toList xs)
return (ObjectTy i (HashMap.fromList xs'))
freshPoly' (MutableTy ty) = MutableTy <$> freshPoly' ty
freshPoly :: TyKind -> TypeInferer TyKind
freshPoly ty = evalStateT (freshPoly' ty) HashMap.empty
unifyEnv :: TypeInferer ()
unifyEnv = do
env <- use _1
constraints <- use _3
_1 .= HashMap.map (unify (reverse constraints)) env
inferTypeOfParams :: AST -> TypeInferer TypedAST
inferTypeOfParams x@(Lit _ _) = inferType' x
inferTypeOfParams (List xs pos) = do
xs' <- mapM inferTypeOfParams xs
let ty = ListTy (map Util.typeof xs')
return (TyList xs' ty pos)
inferType' :: AST -> TypeInferer TypedAST
inferType' (Lit lit@(SymLit "true") pos) = return (TyLit lit BoolTy pos)
inferType' (Lit lit@(SymLit "false") pos) = return (TyLit lit BoolTy pos)
inferType' (Lit lit@(SymLit s) pos) = do
env <- use _1
ty <- freshPoly (Maybe.fromJust (HashMap.lookup s env))
return (TyLit lit ty pos)
inferType' (Lit lit@(StrLit _) pos) = return (TyLit lit StrTy pos)
inferType' (Lit lit@(NumLit _) pos) = return (TyLit lit NumTy pos)
inferType' (Lit lit@(BoolLit _) pos) = return (TyLit lit BoolTy pos)
inferType' (List xs pos) = do
let instr = xs !! 0
case instr of
Lit (SymLit "^") pos1 -> do
let params = xs !! 1
let body = xs !! 2
let syms = Util.flatSymbols params
env <- use _1
forM_ syms $ \sym -> do
var <- gensym
_1 %= HashMap.insert sym (VarTy var)
typedBody <- inferType' body
typedParams <- inferTypeOfParams params
_1 .= env
unifyEnv
constraints <- use _3
let unifiedBody = Util.mapTyKind (unify (reverse constraints)) typedBody
let unifiedParams = Util.mapTyKind (unify (reverse constraints)) typedParams
return (TyList
[TyLit (SymLit "^") SymTy pos1, unifiedParams, unifiedBody]
(ArrTy (Util.typeof unifiedParams) (Util.typeof unifiedBody))
pos)
Lit (SymLit "seq") pos1 -> do
xs' <- mapM inferType' (tail xs)
return (TyList
(TyLit (SymLit "seq") SymTy pos1:xs')
(Util.typeof (last xs'))
pos)
Lit (SymLit "let") pos1 -> do
let pattern = xs !! 1
let body = xs !! 2
let syms = Util.flatSymbols pattern
typedBody <- inferType' body
makePoly typedBody
forM_ syms $ \sym -> do
var <- gensym
_1 %= HashMap.insert sym (VarTy var)
typedPattern <- inferTypeOfParams pattern
let bodyTy = Util.typeof typedBody
let patTy = Util.typeof typedPattern
let absurd = UnexpectedType bodyTy patTy (Util.whereIs typedPattern)
_3 %= (Subtype bodyTy patTy absurd :)
unifyEnv
constraints <- use _3
let unifiedPattern = Util.mapTyKind (unify (reverse constraints)) typedPattern
return (TyList
[TyLit (SymLit "let") SymTy pos1, unifiedPattern, typedBody]
(ListTy [])
pos)
Lit (SymLit "letrec") pos1 -> do
let pattern = xs !! 1
let body = xs !! 2
let syms = Util.flatSymbols pattern
forM_ syms $ \sym -> do
var <- gensym
_1 %= HashMap.insert sym (VarTy var)
typedBody <- inferType' body
typedPattern <- inferTypeOfParams pattern
let bodyTy = Util.typeof typedBody
let patTy = Util.typeof typedPattern
let absurd = UnexpectedType bodyTy patTy (Util.whereIs typedPattern)
_3 %= (Subtype bodyTy patTy absurd :)
unifyEnv
constraints <- use _3
let unifiedBody = Util.mapTyKind (unify (reverse constraints)) typedBody
let unifiedPattern = Util.mapTyKind (unify (reverse constraints)) typedPattern
makePoly unifiedBody
return (TyList
[TyLit (SymLit "letrec") SymTy pos1, unifiedPattern, unifiedBody]
(ListTy [])
pos)
Lit (SymLit "match") pos1 -> do
expr <- inferType' (xs !! 1)
patterns <- mapM inferType' (drop 2 xs)
syms <- mapM (\_ -> (,) <$> gensym <*> gensym) patterns
forM_ (zip patterns syms) $ \(pat, (x, y)) -> do
let patTy = Util.typeof pat
let expected = ArrTy (VarTy x) (VarTy y)
_3 %= (Subtype patTy expected (UnexpectedType expected patTy (Util.whereIs pat)) :)
let exprTy = Util.typeof expr
let srcTys = map (\(x, _) -> VarTy x) syms
let expected = EitherTy srcTys
let retTy = EitherTy (map (\(_, y) -> VarTy y) syms)
_3 %= (Subtype exprTy expected (UnexpectedType expected exprTy (Util.whereIs expr)) :)
unifyEnv
constraints <- use _3
env <- use _1
_1 .= HashMap.map (Util.flatEitherTy (negate 1)) env
let unifiedExpr = Util.mapTyKind (Util.flatEitherTy (negate 1) . unify (reverse constraints)) expr
let unifiedPatterns = map (Util.mapTyKind (Util.flatEitherTy (negate 1) . unify (reverse constraints))) patterns
return (TyList
(TyLit (SymLit "match") SymTy pos1:unifiedExpr:unifiedPatterns)
(Util.flatEitherTy (negate 1) (unify (reverse constraints) retTy))
pos)
Lit (SymLit "cond") pos1 -> do
exprs <- mapM inferType' (tail xs)
let conds = Maybe.catMaybes (map (\(x,i) -> if even i then Just x else Nothing) (zip (init exprs) ([0..] :: [Int])))
let thenClauses = Maybe.catMaybes (map (\(x,i) -> if odd i then Just x else Nothing) (zip exprs ([0..] :: [Int])))
let elseClause = last exprs
forM_ conds $ \cond -> do
let ty = Util.typeof cond
_3 %= (Subtype ty BoolTy (UnexpectedType BoolTy ty (Util.whereIs cond)) :)
unifyEnv
constraints <- use _3
let retTy = Util.flatEitherTy (negate 1) (EitherTy (map Util.typeof thenClauses ++ [Util.typeof elseClause]))
return (Util.mapTyKind
(unify (reverse constraints))
(TyList
(TyLit (SymLit "cond") SymTy pos1:exprs)
retTy
pos))
Lit (SymLit "object") pos1 -> do
let symbols = Maybe.catMaybes (map (\(x,i) -> if even i then Just x else Nothing) (zip (tail xs) ([0..] :: [Int])))
let exprs = Maybe.catMaybes (map (\(x,i) -> if odd i then Just x else Nothing) (zip (tail xs) ([0..] :: [Int])))
let propertyNames = Maybe.catMaybes (map (\x -> case x of
Lit (SymLit s) _ -> Just s
_ -> Nothing) symbols)
typedExprs <- mapM inferType' exprs
let tys = map Util.typeof typedExprs
i <- gensym
let ty = ObjectTy (HashSet.singleton i) (HashMap.fromList (zip propertyNames tys))
return
(TyList
(TyLit (SymLit "object") SymTy pos1:concatMap (\(k1, (k,v)) -> [TyLit (SymLit k) SymTy (Util.whereIsAST k1),v]) (zip symbols (zip propertyNames typedExprs)))
ty
pos)
Lit (SymLit ".") pos1 -> do
let Lit (SymLit propertyName) pos2 = xs !! 2
let expr = xs !! 1
typedExpr <- inferType' expr
let exprTy = Util.typeof typedExpr
i <- gensym
x <- gensym
let typedProp = TyLit (SymLit propertyName) (VarTy x) pos2
let expected = ObjectTy (HashSet.singleton i) (HashMap.singleton propertyName (VarTy x))
_3 %= (Equal expected exprTy (UnexpectedType expected exprTy (Util.whereIs typedExpr)) :)
unifyEnv
constraints <- use _3
let unifiedExpr = Util.mapTyKind (unify (reverse constraints)) typedExpr
let unifiedProp = Util.mapTyKind (unify (reverse constraints)) typedProp
return
(Util.mapTyKind
(unify (reverse constraints))
(TyList
[TyLit (SymLit ".") SymTy pos1, unifiedExpr, unifiedProp]
(VarTy x)
pos))
Lit (SymLit ":=") pos1 -> do
left <- inferType' (xs !! 1)
right <- inferType' (xs !! 2)
let leftTy = Util.typeof left
let expected = MutableTy (Util.typeof right)
_3 %= (Subtype expected leftTy (UnexpectedType expected leftTy (Util.whereIs left)) :)
unifyEnv
constraints <- use _3
return
(Util.mapTyKind
(unify (reverse constraints))
(TyList
[TyLit (SymLit ":=") SymTy pos1, left, right]
(ListTy [])
pos))
Lit (SymLit "assume") pos1 -> do
let Lit (SymLit sym) pos2 = xs !! 1
var <- gensym
_1 %= HashMap.insert sym (VarTy var)
return (TyList
[ TyLit (SymLit "assume") SymTy pos1,
TyLit (SymLit sym) (VarTy var) pos2
] (ListTy []) pos)
_ -> do
let func = head xs
let args = tail xs
typedFunc <- inferType' func
typedArgs <- mapM inferType' args
let funcTy = Util.typeof typedFunc
let argsTy = Util.flatListTy (ListTy (map Util.typeof typedArgs))
x <- gensym
let expected = ArrTy argsTy (VarTy x)
_3 %= (Subtype funcTy expected (UnexpectedType expected funcTy (Util.whereIs typedFunc)) :)
unifyEnv
constraints <- use _3
let unifiedFunc = Util.mapTyKind (unify (reverse constraints)) typedFunc
let unifiedArgs = map (Util.mapTyKind (unify (reverse constraints))) typedArgs
return (Util.mapTyKind (unify (reverse constraints)) (TyList (unifiedFunc:unifiedArgs) (VarTy x) pos))
inferType :: AST -> ([Absurd], TypedAST)
inferType ast = do
let (typedAST, (_, _, constraints, _)) = runState (inferType' ast) (Util.initialTypeEnv, Util.initialVarList, [], Util.initialPolyEnv)
let absurds = cantUnify (reverse constraints)
(List.nub absurds, Util.mapTyKind (unify (reverse constraints) . Util.flatListTy) typedAST)
subst :: Variable -> TyKind -> TyKind -> TyKind
subst i x y@(VarTy j)
| i == j = x
| otherwise = y
subst i x (RecTy j ty)
| i == j = RecTy j ty
| otherwise = RecTy j (subst i x ty)
subst _ _ SymTy = SymTy
subst _ _ StrTy = StrTy
subst _ _ NumTy = NumTy
subst _ _ BoolTy = BoolTy
subst i x (ArrTy y z) = ArrTy (subst i x y) (subst i x z)
subst i x (ListTy xs) = ListTy (map (subst i x) xs)
subst i x (EitherTy xs) = EitherTy (map (subst i x) xs)
subst i x@(ObjectTy j ys) (ObjectTy k xs)
| HashSet.member i k = ObjectTy (HashSet.union j k) (HashMap.union xs ys)
| otherwise = ObjectTy k (HashMap.map (subst i x) xs)
subst i x (ObjectTy j xs) = ObjectTy j (HashMap.map (subst i x) xs)
subst i x (MutableTy ty) = MutableTy (subst i x ty)
substConstraint :: Variable -> TyKind -> Constraint -> Constraint
substConstraint i y (Equal ty1 ty2 absurd) = Equal (subst i y ty1) (subst i y ty2) (substAbsurd i y absurd)
substConstraint i y (Subtype ty1 ty2 absurd) = Subtype (subst i y ty1) (subst i y ty2) (substAbsurd i y absurd)
substAbsurd :: Variable -> TyKind -> Absurd -> Absurd
substAbsurd i y (UnexpectedType ty1 ty2 pos) = UnexpectedType (subst i y ty1) (subst i y ty2) pos
unify :: [Constraint] -> TyKind -> TyKind
unify = snd . unify'
cantUnify :: [Constraint] -> [Absurd]
cantUnify = fst . unify'
unify' :: [Constraint] -> ([Absurd], TyKind -> TyKind)
unify' [] = ([], id)
unify' (Subtype s t absurd:c)
| s == t = unify' c
| otherwise = do
let tmp1 = Util.extractVarTy s
let tmp2 = Util.extractVarTy t
let i = Maybe.fromJust tmp1
let j = Maybe.fromJust tmp2
if Maybe.isJust tmp1
then do
let t' = Util.flatEitherTy i t
if not (elem i (Util.freeVariables t'))
then do
let (absurds, substitution) = unify' (map (substConstraint i t') c)
(absurds, substitution . subst i t')
else do
let (absurds, substitution) = unify' (map (substConstraint i (RecTy i t')) c)
(absurds, substitution . subst i (RecTy i t'))
else
if Maybe.isJust tmp2
then do
let s' = Util.flatEitherTy j s
if not (elem j (Util.freeVariables s'))
then do
let (absurds, substitution) = unify' (map (substConstraint j s') c)
(absurds, substitution . subst j s')
else do
let (absurds, substitution) = unify' (map (substConstraint j (RecTy j s')) c)
(absurds, substitution . subst j (RecTy j s'))
else
case (s, t) of
(ArrTy s1 s2, ArrTy t1 t2) ->
unify' (Subtype t1 s1 absurd:Subtype s2 t2 absurd:c)
(RecTy _ s1, RecTy _ t1) -> do
unify' (Subtype s1 t1 absurd:c)
(MutableTy s1, MutableTy t1) ->
unify' (Subtype s1 t1 absurd:c)
(EitherTy ss, _) -> do
let results = map (\s1 -> unify' [Subtype s1 t absurd]) ss
if all (\(absurds, _) -> null absurds) results
then do
let substitution = foldr (\(_, substitution1) substitution2 -> substitution1 . substitution2) id results
let (absurds, substitution1) = unify' c
(absurds, substitution1 . substitution)
else do
let (absurds, substitution) = unify' c
let (absurds1, substitution1) = head results
(absurds ++ absurds1, substitution . substitution1)
(_, EitherTy ts) -> do
let results = map (\t1 -> unify' (Subtype s t1 absurd:c)) ts
let r = foldl1 (\(absurds1, substitution1) (absurds2, substitution2) ->
case absurds1 of
[] -> (absurds1, substitution1)
_ -> (absurds2, substitution2)) results
case fst r of
[] -> r
_ -> head results
(RecTy k s1, _) ->
unify' (Subtype (subst k s s1) t absurd:c)
(_, RecTy k t1) ->
unify' (Subtype s (subst k t t1) absurd:c)
(ListTy [], _) -> do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
(_, ListTy []) -> do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
(ListTy xs, ListTy ys)
| length xs == length ys ->
unify' (map (\(a,b) -> Subtype a b absurd) (zip xs ys) ++ c)
| length xs < length ys -> do
let len = length xs - 1
let xs1 = take len xs
let ys1 = take len ys
let xs2 = last xs
let ys2 = ListTy (drop len ys)
unify' (map (\(a,b) -> Subtype a b absurd) (zip xs1 ys1)
++ [Subtype xs2 ys2 absurd]
++ c)
| length xs > length ys -> do
let len = length ys - 1
let xs1 = take len xs
let ys1 = take len ys
let xs2 = ListTy (drop len xs)
let ys2 = last ys
unify' (map (\(a,b) -> Subtype a b absurd) (zip xs1 ys1)
++ [Subtype xs2 ys2 absurd]
++ c)
(ObjectTy _ xs, ObjectTy _ ys) -> do
let xKeys = HashMap.keys xs
let yKeys = HashMap.keys ys
if all (\x -> elem x xKeys) yKeys
then do
let c' = map (\key -> Subtype
(Maybe.fromJust (HashMap.lookup key xs))
(Maybe.fromJust (HashMap.lookup key ys))
absurd) yKeys
unify' (c'++c)
else do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
_ -> do
unify' (Equal s t absurd:c)
unify' (Equal s t absurd:c)
| s == t = unify' c
| otherwise = do
let tmp1 = Util.extractVarTy s
let tmp2 = Util.extractVarTy t
let i = Maybe.fromJust tmp1
let j = Maybe.fromJust tmp2
if Maybe.isJust tmp1
then do
let t' = Util.flatEitherTy i t
if not (elem i (Util.freeVariables t'))
then do
let (absurds, substitution) = unify' (map (substConstraint i t') c)
(absurds, substitution . subst i t')
else do
let (absurds, substitution) = unify' (map (substConstraint i (RecTy i t')) c)
(absurds, substitution . subst i (RecTy i t'))
else
if Maybe.isJust tmp2
then do
let s' = Util.flatEitherTy j s
if not (elem j (Util.freeVariables s'))
then do
let (absurds, substitution) = unify' (map (substConstraint j s') c)
(absurds, substitution . subst j s')
else do
let (absurds, substitution) = unify' (map (substConstraint j (RecTy j s')) c)
(absurds, substitution . subst j (RecTy j s'))
else
case (s, t) of
(ArrTy s1 s2, ArrTy t1 t2) ->
unify' (Equal t1 s1 absurd:Equal s2 t2 absurd:c)
(ListTy [], _) -> do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
(_, ListTy []) -> do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
(ListTy xs, ListTy ys)
| length xs == length ys ->
unify' (map (\(a,b) -> Equal a b absurd) (zip xs ys) ++ c)
| length xs < length ys -> do
let len = length xs - 1
let xs1 = take len xs
let ys1 = take len ys
let xs2 = last xs
let ys2 = ListTy (drop len ys)
unify' (map (\(a,b) -> Equal a b absurd) (zip xs1 ys1)
++ [Equal xs2 ys2 absurd]
++ c)
| length xs > length ys -> do
let len = length ys - 1
let xs1 = take len xs
let ys1 = take len ys
let xs2 = ListTy (drop len xs)
let ys2 = last ys
unify' (map (\(a,b) -> Equal a b absurd) (zip xs1 ys1)
++ [Equal xs2 ys2 absurd]
++ c)
(ObjectTy k xs, ObjectTy l ys) -> do
let xKeys = HashMap.keys xs
let yKeys = HashMap.keys ys
if any (\x -> elem x yKeys) xKeys
then do
unify' (Subtype s t absurd:c)
else do
let (absurds, substitution) = unify' c
(absurds, substitution . foldl (\f v -> subst v t . f) id (HashSet.toList k) . foldl (\f v -> subst v s . f) id (HashSet.toList l))
(RecTy _ s1, RecTy _ t1) -> do
unify' (Equal s1 t1 absurd:c)
_ -> do
let (absurds, substitution) = unify' c
(absurd:absurds, substitution)
| pasberth/binal1 | Language/Binal/Verifier.hs | mit | 26,883 | 12 | 30 | 9,165 | 10,946 | 5,302 | 5,644 | 626 | 34 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import Data.Monoid (mappend)
import Hakyll
--------------------------------------------------------------------------------
main :: IO ()
main = hakyll $ do
match "images/*" $ do
route idRoute
compile copyFileCompiler
match "css/*" $ do
route idRoute
compile compressCssCompiler
match (fromList ["about.rst", "contact.markdown"]) $ do
route $ setExtension "html"
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
match "posts/*" $ do
route $ setExtension "html"
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/post.html" postCtx
>>= loadAndApplyTemplate "templates/default.html" postCtx
>>= relativizeUrls
create ["archive.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "posts/*"
let archiveCtx =
listField "posts" postCtx (return posts) `mappend`
constField "title" "Archives" `mappend`
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/archive.html" archiveCtx
>>= loadAndApplyTemplate "templates/default.html" archiveCtx
>>= relativizeUrls
match "index.html" $ do
route idRoute
compile $ do
posts <- fmap (take 10).recentFirst =<< loadAll "posts/*"
let indexCtx =
listField "posts" postCtx (return posts) `mappend`
constField "title" "Home" `mappend`
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
match "templates/*" $ compile templateCompiler
--------------------------------------------------------------------------------
postCtx :: Context String
postCtx =
dateField "date" "%B %e, %Y" `mappend`
defaultContext
| prannayk/Hakyll-Blog | site.hs | mit | 2,284 | 3 | 20 | 725 | 446 | 201 | 245 | 51 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LambdaCmsOrg.Tutorial.Foundation where
import Control.Arrow ((&&&))
import Data.Text (Text)
import Network.Wai (requestMethod)
import Yesod
import LambdaCms.Core
import LambdaCmsOrg.Tutorial.Message (TutorialMessage, defaultMessage,
englishMessage)
import qualified LambdaCmsOrg.Tutorial.Message as Msg
import LambdaCmsOrg.Tutorial.Models
data TutorialAdmin = TutorialAdmin
mkYesodSubData "TutorialAdmin" $(parseRoutesFile "config/routes")
instance LambdaCmsOrgTutorial master => RenderMessage master TutorialMessage where
renderMessage = renderTutorialMessage
type TutorialHandler a = forall master. LambdaCmsOrgTutorial master => HandlerT TutorialAdmin (HandlerT master IO) a
type TutorialForm x = forall master. LambdaCmsOrgTutorial master => Html -> MForm (HandlerT master IO) (FormResult x, WidgetT master IO ())
class LambdaCmsAdmin master => LambdaCmsOrgTutorial master where
tutorialR :: Route TutorialAdmin -> Route master
renderTutorialMessage :: master
-> [Text]
-> TutorialMessage
-> Text
renderTutorialMessage m (lang:langs) = do
case (lang `elem` (renderLanguages m), lang) of
(True, "en") -> englishMessage
_ -> renderTutorialMessage m langs
renderTutorialMessage _ _ = defaultMessage
defaultTutorialAdminMenu :: LambdaCmsOrgTutorial master => (Route TutorialAdmin -> Route master) -> [AdminMenuItem master]
defaultTutorialAdminMenu tp = [ MenuItem (SomeMessage Msg.MenuTutorial) (tp TutorialAdminIndexR) "book" ]
instance LambdaCmsOrgTutorial master => LambdaCmsLoggable master Tutorial where
logMessage y "POST" = translateTutorialLogs y Msg.LogCreatedTutorial
logMessage y "PATCH" = translateTutorialLogs y Msg.LogUpdatedTutorial
logMessage y "DELETE" = translateTutorialLogs y Msg.LogDeletedTutorial
logMessage _ _ = const []
translateTutorialLogs :: forall b master.
( LambdaCmsOrgTutorial master
, RenderMessage master b
) => master -> (Text -> b) -> Tutorial -> [(Text, Text)]
translateTutorialLogs y msg e = map (id &&& messageFor) $ renderLanguages y
where messageFor lang = renderMessage y [lang] . msg $ tutorialTitle e
logTutorial :: LambdaCmsOrgTutorial master => Tutorial -> HandlerT master IO [(Text, Text)]
logTutorial tutorial = do
y <- getYesod
method <- waiRequest >>= return . requestMethod
return $ logMessage y method tutorial
| lambdacms/lambdacms.org | lambdacmsorg-tutorial/LambdaCmsOrg/Tutorial/Foundation.hs | mit | 3,035 | 0 | 13 | 774 | 686 | 362 | 324 | 54 | 1 |
module Main where
-- import Control.Monad ( (<<) )
import System( getArgs )
import System.IO( stderr, hPutStrLn )
import System.Process( runCommand, waitForProcess)
import Data.List( nub, sort, isPrefixOf )
main = do args <- getArgs
if (length args >= 2) then
do let file = head args
let current_word = (head . tail) args
procHandle <- runCommand $ command file
exitcode <- waitForProcess procHandle
text <- readFile "/tmp/textmatetags"
mapM_ (putStrLn) $ sort . nub . filter (isPrefixOf current_word)
$ map (head . words) (lines text)
else
hPutStrLn stderr "Provide a haskell file and a current word!"
where command file = "echo \":ctags /tmp/textmatetags\" | ghci " ++ file ++" &> /tmp/runtags"
| spockz/Haskell-Code-Completion-for-TextMate | Main.hs | mit | 904 | 0 | 15 | 317 | 228 | 116 | 112 | 16 | 2 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<helpset version="2.0">
<!-- title -->
<title>JavaScore - Auswertungsprogramm für Segelregatten</title>
<!-- maps -->
<maps>
<homeID>top</homeID>
<mapref location="de/Map.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>Inhaltsangabe</label>
<type>javax.help.TOCView</type>
<data>de/JavaScoreTOC.xml</data>
</view>
</helpset>
| sgrosven/gromurph | Javascore/src/main/resources/help/JavaScore_de.hs | gpl-2.0 | 453 | 58 | 7 | 102 | 177 | 102 | 75 | -1 | -1 |
module MachineLearning.SupervisedLearning.SupportVectorMachines.Terms where
import Notes
makeDefs [
"support vector machines"
, "linear support vector machines"
, "confidence"
, "support vector"
]
| NorfairKing/the-notes | src/MachineLearning/SupervisedLearning/SupportVectorMachines/Terms.hs | gpl-2.0 | 232 | 0 | 6 | 56 | 29 | 18 | 11 | -1 | -1 |
{-# OPTIONS -w -O0 #-}
{- |
Module : HolLight/ATC_HolLight.der.hs
Description : generated Typeable, ShATermConvertible instances
Copyright : (c) DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(overlapping Typeable instances)
Automatic derivation of instances via DrIFT-rule Typeable, ShATermConvertible
for the type(s):
'HolLight.Sentence.Sentence'
'HolLight.Sign.Sign'
'HolLight.Sublogic.HolLightSL'
'HolLight.Term.HolType'
'HolLight.Term.HolProof'
'HolLight.Term.HolParseType'
'HolLight.Term.HolTermInfo'
'HolLight.Term.Term'
-}
{-
Generated by 'genRules' (automatic rule generation for DrIFT). Don't touch!!
dependency files:
HolLight/Sentence.hs
HolLight/Sign.hs
HolLight/Sublogic.hs
HolLight/Term.hs
-}
module HolLight.ATC_HolLight () where
import ATC.AS_Annotation
import ATerm.Lib
import Common.Doc
import Common.DocUtils
import Common.Result
import Data.Maybe (fromJust, catMaybes, isNothing)
import Data.Typeable
import HolLight.Helper
import HolLight.Sentence
import HolLight.Sign
import HolLight.Sublogic
import HolLight.Term
import qualified Data.Char as Char
import qualified Data.Map as Map
{-! for HolLight.Sentence.Sentence derive : Typeable !-}
{-! for HolLight.Sign.Sign derive : Typeable !-}
{-! for HolLight.Sublogic.HolLightSL derive : Typeable !-}
{-! for HolLight.Term.HolType derive : Typeable !-}
{-! for HolLight.Term.HolProof derive : Typeable !-}
{-! for HolLight.Term.HolParseType derive : Typeable !-}
{-! for HolLight.Term.HolTermInfo derive : Typeable !-}
{-! for HolLight.Term.Term derive : Typeable !-}
{-! for HolLight.Sentence.Sentence derive : ShATermConvertible !-}
{-! for HolLight.Sign.Sign derive : ShATermConvertible !-}
{-! for HolLight.Sublogic.HolLightSL derive : ShATermConvertible !-}
{-! for HolLight.Term.HolType derive : ShATermConvertible !-}
{-! for HolLight.Term.HolProof derive : ShATermConvertible !-}
{-! for HolLight.Term.HolParseType derive : ShATermConvertible !-}
{-! for HolLight.Term.HolTermInfo derive : ShATermConvertible !-}
{-! for HolLight.Term.Term derive : ShATermConvertible !-}
| nevrenato/Hets_Fork | HolLight/ATC_HolLight.der.hs | gpl-2.0 | 2,184 | 0 | 5 | 265 | 115 | 80 | 35 | 16 | 0 |
{- |
Module : $Header$
Description : Additional (manual) ATerm-Conversions for OMDoc
Copyright : (c) Hendrik Iben, Uni Bremen 2005-2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Additional ATerm-Conversions for OMDoc.
-}
module OMDoc.ATerm where
import qualified Network.URI as URI
import ATerm.Lib
instance ShATermConvertible URI.URI where
toShATermAux att0 u = do
(att1, us) <- toShATerm' att0 ((URI.uriToString id u) "")
return $ addATerm (ShAAppl "URI.URI" [us] []) att1
fromShATermAux ix att0 =
case getShATerm ix att0 of
x@(ShAAppl "URI.URI" [us] _) ->
case fromShATerm' us att0 of
(att1, us') ->
case URI.parseURIReference us' of
Nothing ->
fromShATermError "URI.URI" x
Just uri ->
(att1, uri)
u -> fromShATermError "URI.URI" u
| nevrenato/Hets_Fork | OMDoc/ATerm.hs | gpl-2.0 | 971 | 0 | 15 | 262 | 209 | 108 | 101 | 18 | 0 |
{- -----------------------------------------------------------------------------
ZDCPU16 is a DCPU-16 emulator.
Copyright (C) 2012 Luis Cabellos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
----------------------------------------------------------------------------- -}
module Main( main ) where
-- -----------------------------------------------------------------------------
import Control.Monad( unless )
import qualified Data.ByteString as BS( readFile )
import Data.Word( Word32 )
import qualified Graphics.UI.SDL as SDL(
Event(..), SDLKey(..), Keysym(..), pollEvent, getTicks )
import System.Environment( getArgs, getProgName )
import System.Exit( exitSuccess, exitFailure )
import ZDCpu16.DebugRender(
RenderState, runRender, mkRenderState, clearScreen, renderEmuState )
import ZDCpu16.EmuState( EmuState(..), mkEmuState )
import ZDCpu16.Hardware( loads )
import ZDCpu16.ConRPC( startConsole, clQuit, clWriteVRAM )
import ZDCpu16.Util( byteStringToWord16 )
import ZDCpu16.ZDCpu16( runEmulator, stepEmulator, stepNCycles )
-- -----------------------------------------------------------------------------
getInput :: EmuState -> IO (EmuState, Bool)
getInput est = do
e <- SDL.pollEvent
case e of
SDL.Quit -> return $! (est, True)
SDL.KeyUp key -> do
case SDL.symKey key of
SDL.SDLK_ESCAPE -> return $! (est, True)
SDL.SDLK_q -> return $! (est, True)
SDL.SDLK_s -> do
if runMode est
then getInput est
else do
(_,newEst) <- runEmulator stepEmulator est
getInput newEst
SDL.SDLK_r -> do
if runMode est
then getInput est
else getInput est{ runMode = True }
SDL.SDLK_h -> do
if runMode est
then getInput est{ runMode = False }
else getInput est
SDL.SDLK_PLUS -> if (speed est < 1000)
then getInput est{ speed = speed est + 10 }
else getInput est
SDL.SDLK_MINUS -> if (speed est > 10)
then getInput est{ speed = speed est - 10 }
else getInput est
_ -> getInput est
SDL.NoEvent -> return $! (est, False)
_ -> getInput est
-- -----------------------------------------------------------------------------
mainLoop :: RenderState -> EmuState -> Int -> Word32 -> IO ()
mainLoop rst est lastd lastt = do
_ <- runRender (clearScreen >> renderEmuState est) rst
(newEst, quit) <- getInput est
unless quit $ do
newt <- SDL.getTicks
if runMode newEst
then do
let dt = newt - lastt
cycles = fromIntegral $ dt * (fromIntegral $ speed newEst)
(newd,newEst2) <- runEmulator (stepNCycles lastd cycles) newEst
mainLoop rst newEst2 newd newt
else mainLoop rst newEst lastd newt
-- -----------------------------------------------------------------------------
main :: IO ()
main = do
args <- getArgs
case args of
[filename] -> do
program <- fmap byteStringToWord16 . BS.readFile $ filename
conn <- startConsole
rst <- mkRenderState
emptyState <- mkEmuState (clWriteVRAM conn)
loads 0 program $ emuCpu emptyState
lastTicks <- SDL.getTicks
mainLoop rst emptyState 0 lastTicks
clQuit conn
exitSuccess
_ -> do
progName <- getProgName
putStrLn $ "Usage: " ++ progName ++ "BIN_FILE"
exitFailure
-- -----------------------------------------------------------------------------
| zhensydow/zdcpu16 | src/zddcpu16_emu_main.hs | gpl-3.0 | 4,098 | 0 | 21 | 973 | 931 | 485 | 446 | 78 | 16 |
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PatternSynonyms #-}
module Logic where
import Data.Char (toLower)
import Data.List
type Var = String
data Exp = Op Op [Exp]
| Var Var
| Con String
| Lam (Exp -> Exp)
| Quant Amount Pol Var Exp Exp
check :: Bool -> Maybe ()
check c = if c then Just () else Nothing
-- | solveThe metaVariable t1 t2. metaVariable occurs t1; not in t2.
-- Attempt to unify t1 and t2; return the necessary assignments of metaVariable if it exists.
-- ATTN: currently unused. But this could be a more principled
-- solution to solving for definites (or times). Rather than having a
-- special environment, solve for a variable that makes the thing that you lookup true.
solveThe :: Int -> Var -> [(Exp,Exp)] -> Maybe [Exp]
solveThe _ _ [] = Just []
solveThe n meta ((Op op1 e1,Op op2 e2):cs)
= check (op1 == op2 && length e1 == length e2) >> solveThe n meta (zip e1 e2 ++ cs)
solveThe n meta ((Var x,t):cs) | x == meta = (t:) <$> solveThe n meta cs
solveThe n meta ((Var x,Var y):cs) | x == y = solveThe n meta cs
solveThe n meta ((Lam f,Lam g):cs) = solveThe (n+1) meta ((f v, g v):cs)
where v = Var $ "_V_" ++ show n
solveThe n meta ((Quant a1 p1 v1 d1 b1,Quant a2 p2 v2 d2 b2):cs) =
check (a1 == a2 && p1 == p2 && v1 == v2) >>
solveThe n meta ((d1,d2):(b1,b2):cs)
solveThe _ _ _ = Nothing
eqExp' :: Exp -> Exp -> Bool
eqExp' = eqExp 0 []
eqNat' :: Nat -> Nat -> Bool
eqNat' = eqNat 0 []
instance Eq Nat where
(==) = eqNat'
eqExp :: Int -> [(Var,Var)] -> Exp -> Exp -> Bool
eqExp n equs e1 e2 = case (e1,e2) of
(Op op1 exps1,Op op2 exps2) -> op1 == op2 && length exps1 == length exps2 && and (zipWith (eqExp n equs) (exps1) (exps2))
(Quant a1 p1 v1 t1 b1,Quant a2 p2 v2 t2 b2) -> eqAmount n equs a1 a2 && p1 == p2 && eqExp n eq' t1 t2 && eqExp n eq' b1 b2
where eq' = (v1,v2):equs
(Lam f1,Lam f2) -> eqExp (n+1) equs (f1 x) (f2 x)
where x = Var $ "_V" ++ show n
(Var x,Var x') -> x == x' || (x,x') `elem` equs
(Con x,Con x') -> x == x'
_ -> False
eqAmount :: Int -> [(Var, Var)] -> Amount -> Amount -> Bool
eqAmount n eqs (Exact x) (Exact x') = eqNat n eqs x x'
eqAmount n eqs(AtLeast x) (AtLeast x') = eqNat n eqs x x'
eqAmount _ _ One One = True
eqAmount _ _ Few Few = True
eqAmount _ _ Several Several = True
eqAmount _ _ Many Many = True
eqAmount _ _ Most Most = True
eqAmount _ _ Lots Lots = True
eqAmount _ _ _ _ = False
eqNat :: Int -> [(Var, Var)] -> Nat -> Nat -> Bool
eqNat n es (Nat x) (Nat x') = eqExp n es x x'
type Type = Exp
newtype Nat = Nat {fromNat :: Exp}
instance Num Nat where
fromInteger n = Nat (Con (show n))
(Nat x) + (Nat y) = Nat (BinOp Plus x y)
data Amount' n = One | Few | Several | Many | Most | Lots | Exact n | AtLeast n -- amount for the *positive* polarity
deriving (Show,Eq,Functor,Foldable,Traversable)
type Amount = Amount' Nat
data Op = Fld String -- ^ field lookup
| Custom String
| App
| Not
| And
| Or
| Plus
| Implies
| ImpliesOften
| LAST_OPERATOR
deriving (Eq,Show)
pattern TRUE :: Exp
pattern TRUE = Con "True"
pattern FALSE :: Exp
pattern FALSE = Con "False"
pattern APP :: Exp -> Exp -> Exp
pattern APP f x = BinOp App f x
pattern NOT :: Exp -> Exp
pattern NOT x = UnOp Not x
not' :: Exp -> Exp
not' = NOT
lam :: (Exp -> Exp) -> Exp
lam f = case f (Var eta) of
APP b (Var "__ETA__") | not (eta `elem` freeVars b) -> b
_ -> Lam f
where eta = "__ETA__"
app :: Exp -> Exp -> Exp
app (Lam f) x = f x
app f x = APP f x
apps :: Exp -> [Exp] -> Exp
apps f args = foldl app f args
true :: Exp
true = TRUE
pattern BinOp :: Op -> Exp -> Exp -> Exp
pattern BinOp op x y = Op op [(x),(y)]
pattern UnOp :: Op -> Exp -> Exp
pattern UnOp op x = Op op [(x)]
(<->) :: Exp -> Exp -> Exp
a <-> b = (a --> b) ∧ (b --> a)
(-->),(~~>) :: Exp -> Exp -> Exp
x --> y = BinOp Implies x y
x ~~> y = BinOp ImpliesOften x y
pattern (:∧) :: Exp -> Exp -> Exp
pattern x :∧ y = BinOp And x y
(∧) :: Exp -> Exp -> Exp
TRUE ∧ y = y
y ∧ TRUE = y
(x :∧ y) ∧ z = x :∧ (y ∧ z)
x ∧ y = x :∧ y
(∨) :: Exp -> Exp -> Exp
x ∨ y = BinOp Or x y
data Pol = Pos | Neg | Both deriving (Eq,Ord,Show)
quoteTex :: String -> String
quoteTex = concatMap q
where q x | x `elem` "_#\\%" = "\\" ++ [x]
| otherwise = [x]
-- showTex :: Exp -> [Char]
-- showTex = texExp LAST_OPERATOR
pattern Forall :: Var -> Type -> Exp -> Exp
pattern Forall x dom body = Quant One Neg x dom body
pattern Exists :: Var -> Type -> Exp -> Exp
pattern Exists x dom body = Quant One Pos x dom body
-- pattern Sigma x dom body = Quant Pi Pos x dom body
pattern MOST :: Var -> Type -> Exp -> Exp
pattern MOST x dom body = Quant Most Neg x dom body
pattern FEW :: Var -> Type -> Exp -> Exp
pattern FEW x dom body = Quant Few Pos x dom body
pattern SEVERAL :: Var -> Type -> Exp -> Exp
pattern SEVERAL x dom body = Quant Several Pos x dom body
normalize :: [Char] -> [Char]
normalize = map toLower
freeVars :: Exp -> [Var]
freeVars (Con _x) = []
freeVars (Lam f) = freeVars (f (Con "__FREE__"))
freeVars (Var x) = [x]
freeVars (Quant _ _ x dom body) = (freeVars dom ++ nub (freeVars body)) \\ [x]
freeVars (Op _ xs) = (concatMap (freeVars) xs)
parens :: [Char] -> [Char]
parens x = "(" ++ x ++ ")"
| GU-CLASP/FraCoq | Logic.hs | gpl-3.0 | 5,498 | 0 | 14 | 1,393 | 2,583 | 1,346 | 1,237 | 141 | 6 |
{-# LANGUAGE ViewPatterns, TupleSections, TypeFamilies, FlexibleInstances, MultiParamTypeClasses, RecordWildCards, TemplateHaskell, ScopedTypeVariables #-}
{-# OPTIONS -Wall #-}
module Triangulation.AbstractNeighborhood(
EdgeNeighborhoodTet,
IEdgeNeighborhoodTet,
ent_top,ent_bot,ent_left,ent_right,ent_leftTri,ent_rightTri,ent_upperTri,ent_lowerTri,
ent_mirrorLR,ent_centralEdge,
ient_top,ient_bot,ient_left,ient_right,ient_leftTri,ient_rightTri,ient_upperTri,ient_lowerTri,
ient_mirrorLR,ient_centralEdge,
InnerEdgeNeighborhood,
ien_toList, ien_toNonEmpty, ien_length,
BoundaryEdgeNeighborhood,
ben_toList, ben_toNonEmpty, ben_length,
innerEdgeNeighborhood',
innerEdgeNeighborhood,
edgeNeighborhood,
someEdgeNeighborhood,
-- * Testing
unsafeEdgeNeighborhoodTetExportedOnlyForTesting,
edgeNeighborhoodTetStream
) where
import TriangulationCxtObject
import PrettyUtil
import Data.List(unfoldr)
import HomogenousTuples
import Language.Haskell.TH
import Util
import qualified Data.List.NonEmpty as NE
import Data.List.NonEmpty(NonEmpty(..))
import Data.SumType
import Data.Bits
import Control.Applicative
newtype EdgeNeighborhoodTet =
ENTet Word8
-- msb to lsb: top,bot,left,right
deriving(Eq,Ord)
enTet
:: Vertex -> Vertex -> Vertex -> Vertex -> EdgeNeighborhoodTet
enTet t b l r = ENTet ( shiftL (vertexToWord8 t) 6 .|. shiftL (vertexToWord8 b) 4
.|. shiftL (vertexToWord8 l) 2 .|. (vertexToWord8 r))
ent_top :: EdgeNeighborhoodTet -> Vertex
ent_top (ENTet w) = vertexFromWord8 (shiftR w 6)
ent_bot :: EdgeNeighborhoodTet -> Vertex
ent_bot (ENTet w) = vertexFromWord8 (shiftR w 4 .&. 3)
ent_left :: EdgeNeighborhoodTet -> Vertex
ent_left (ENTet w) = vertexFromWord8 (shiftR w 2 .&. 3)
ent_right :: EdgeNeighborhoodTet -> Vertex
ent_right (ENTet w) = vertexFromWord8 ( w .&. 3)
data EdgeNeighborhoodTetView = ENTetView {
entv_top, entv_bot, entv_left, entv_right :: Vertex
}
deriving Show
viewENTet :: EdgeNeighborhoodTet -> EdgeNeighborhoodTetView
viewENTet = ENTetView <$> ent_top <*> ent_bot <*> ent_left <*> ent_right
unviewENTet :: EdgeNeighborhoodTetView -> EdgeNeighborhoodTet
unviewENTet (ENTetView t b l r) = enTet t b l r
instance Show EdgeNeighborhoodTet where
showsPrec _ (viewENTet -> ENTetView a b c d) =
shows (a,b,c,d)
trivialHasTIndexInstance [t| EdgeNeighborhoodTet |]
type IEdgeNeighborhoodTet = I EdgeNeighborhoodTet
-- instance Eq EdgeNeighborhoodTet where
-- (==) = (==) `on` (ent_top &&& ent_bot &&& ent_left)
instance Pretty EdgeNeighborhoodTet where
prettyPrec prec (viewENTet -> ENTetView a b c d) = prettyPrecApp prec (text "ENTet") [a,b,c,d]
instance Vertices EdgeNeighborhoodTet where
type Verts EdgeNeighborhoodTet = Quadruple Vertex
vertices x = (ent_bot x, ent_top x, ent_left x, ent_right x)
-- | Order: top vertex, bottom vertex, left vertex
ent_leftTri :: EdgeNeighborhoodTet -> OTriangle
ent_leftTri ent = otriangle (ent_top ent, ent_bot ent, ent_left ent)
-- | Order: top vertex, bottom vertex, right vertex
ent_rightTri :: EdgeNeighborhoodTet -> OTriangle
ent_rightTri ent = otriangle (ent_top ent, ent_bot ent, ent_right ent)
-- | Order: top vertex, left vertex, right vertex
ent_upperTri :: EdgeNeighborhoodTet -> OTriangle
ent_upperTri ent = otriangle (ent_top ent, ent_left ent, ent_right ent)
-- | Order: bottom vertex, left vertex, right vertex
ent_lowerTri :: EdgeNeighborhoodTet -> OTriangle
ent_lowerTri ent = otriangle (ent_bot ent, ent_left ent, ent_right ent)
ent_mirrorLR :: EdgeNeighborhoodTet -> EdgeNeighborhoodTet
ent_mirrorLR (viewENTet -> ent) =
unviewENTet (ent { entv_left = entv_right ent, entv_right = entv_left ent })
ent_centralEdge :: EdgeNeighborhoodTet -> OEdge
ent_centralEdge ent = oedge (ent_bot ent, ent_top ent)
-- generate i-variants of accessors
$(concatMapM
(\(t,f) ->
let
i_f = mkName ("i" ++ nameBase f)
theSigD = sigD i_f [t| IEdgeNeighborhoodTet -> $(t) |]
theValD =
valD
(varP i_f)
(normalB [| mapI $(varE f) |])
[]
in
sequence [ theSigD, theValD ]
)
(
map ([t| IVertex |],) ['ent_top,'ent_bot,'ent_left,'ent_right] ++
map ([t| OITriangle |],) ['ent_leftTri,'ent_rightTri,'ent_lowerTri,'ent_upperTri] ++
map ([t| IEdgeNeighborhoodTet |],) ['ent_mirrorLR] ++
map ([t| OIEdge |],) ['ent_centralEdge]
))
-- | Returns a stream of tetrahedra containing a preimage of the given edge, with each tetrahedron's
-- 'ient_rightTri' glued to the next tetrahedron's 'ient_leftTri'.
--
-- The result will be infinite iff the edge is an inner edge.
--
-- The 'ent_bot' of each result tet will be equivalent to the first vertex of the given 'OIEdge'; the 'ent_top of each result tet will be equivalent to the second vertex of the given 'OIEdge'.
edgeNeighborhoodTetStream :: Triangulation -> S2 -> OIEdge -> NonEmpty IEdgeNeighborhoodTet
edgeNeighborhoodTetStream tr dir ie =
let
I i0 e = viewI ie
ient0 = i0 ./ enTet _top _bot _left _right
where
(_bot,_top) = vertices e
(_left,_right) = (vertices . oppositeEdge . forgetVertexOrder) e
*. dir
in
ient0 :|
unfoldr (\prev -> do
_S <- lookupGluingOfOITriangle tr (ient_rightTri prev)
let I i _S' = viewI _S
(v0,v1,v2) = vertices _S'
this = i ./ enTet v0 v1 v2 (oTriangleDualVertex _S')
Just (this,this))
ient0
-- |
-- INVARIANT: The 'ent_top's are all glued together, so are the 'ent_bot's.
--
-- INVARIANT: Each tetrahedron's
-- 'ient_rightTri' is glued to the next tetrahedron's 'ient_leftTri', cyclically.
newtype InnerEdgeNeighborhood = UnsafeInnerEdgeNeighborhood {
ien_toNonEmpty :: NonEmpty IEdgeNeighborhoodTet
}
deriving Show
ien_toList :: InnerEdgeNeighborhood -> [IEdgeNeighborhoodTet]
ien_toList = NE.toList . ien_toNonEmpty
-- |
-- INVARIANT: The 'ent_top's are all glued together, so are the 'ent_bot's.
--
-- INVARIANT: Each tetrahedron's
-- 'ient_rightTri' is glued to the next tetrahedron's 'ient_leftTri'.
--
-- INVARIANT: The first tetrahedron's 'ient_leftTri' and the last tetrahedron's 'ient_rightTri' are boundary tris.
newtype BoundaryEdgeNeighborhood = UnsafeBoundaryEdgeNeighborhood {
ben_toNonEmpty :: NonEmpty IEdgeNeighborhoodTet
}
deriving Show
ben_toList :: BoundaryEdgeNeighborhood -> [IEdgeNeighborhoodTet]
ben_toList = NE.toList . ben_toNonEmpty
-- The 'ent_bot' of each result tet will be equivalent to the first vertex of the given 'OIEdge'; the 'ent_top of each result tet will be equivalent to the second vertex of the given 'OIEdge'.
innerEdgeNeighborhood' :: Triangulation -> OIEdge -> Maybe [IEdgeNeighborhoodTet]
innerEdgeNeighborhood' tr e = fmap ien_toList $ sumTypeToMaybe (edgeNeighborhood tr e)
innerEdgeNeighborhood :: TEdge -> Maybe [IEdgeNeighborhoodTet]
innerEdgeNeighborhood x = innerEdgeNeighborhood' (getTriangulation x) (packOrderedFace (unT x) Flip)
-- The 'ent_bot' of each result tet will be equivalent to the first vertex of the given 'OIEdge'; the 'ent_top of each result tet will be equivalent to the second vertex of the given 'OIEdge'.
edgeNeighborhood
:: Triangulation
-> OIEdge -> Either BoundaryEdgeNeighborhood InnerEdgeNeighborhood
edgeNeighborhood tr e =
let
x0xs@(x0 :| xs) = edgeNeighborhoodTetStream tr NoFlip e
in
case break (== x0) xs of
(xs',_:_) -> Right (UnsafeInnerEdgeNeighborhood (x0 :| xs'))
(_,[]) ->
Left
(UnsafeBoundaryEdgeNeighborhood
(NE.fromList (
(reverse . map ient_mirrorLR . tail . NE.toList
. edgeNeighborhoodTetStream tr Flip) e
++
NE.toList x0xs)))
-- | Uses arbitrary orders
someEdgeNeighborhood
::
TEdge
-> Either BoundaryEdgeNeighborhood InnerEdgeNeighborhood
someEdgeNeighborhood e = edgeNeighborhood (getTriangulation e) . toOrderedFace . unT $ e
ben_length :: BoundaryEdgeNeighborhood -> Int
ben_length = length . ben_toList
ien_length :: InnerEdgeNeighborhood -> Int
ien_length = length . ien_toList
unsafeEdgeNeighborhoodTetExportedOnlyForTesting
:: Vertex -> Vertex -> Vertex -> Vertex -> EdgeNeighborhoodTet
unsafeEdgeNeighborhoodTetExportedOnlyForTesting = enTet
| DanielSchuessler/hstri | Triangulation/AbstractNeighborhood.hs | gpl-3.0 | 8,967 | 0 | 23 | 2,183 | 1,868 | 1,026 | 842 | 148 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.OAuth2.Util (
fromFileL,
fromFile,
fromUrl,
fromAuthorizedUrl,
fromRequest,
fromUrl',
fromAuthUrl,
checkDirectory,
downloadFile,
authRequest
)
where
import Data.Aeson
import Data.String
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import System.IO
import System.Directory
import Control.Monad.IO.Class (liftIO)
import Control.Exception
import qualified Control.Monad.State as ST
import Control.Monad.Except
import Control.Arrow (second)
import Network.HTTP.Conduit
import Data.Conduit
import Data.Conduit.Binary (sinkFile)
import Control.Monad.Trans.Resource (runResourceT)
import Network.HTTP.Types (HeaderName, hAuthorization)
import Network.HTTP.Types.Status (Status(..))
import qualified Data.ByteString.Char8 as C8
-- Maybe can remove this and make tokenUrl more general?
import Network.OAuth2.Token
import Network.OAuth2.Types
getL :: (FromJSON a) => BL.ByteString -> IO (Maybe a)
getL "" = return Nothing
getL string = do
let result = (Data.Aeson.decode string :: (FromJSON a) => Maybe a)
case result of
Nothing -> do
hPutStrLn stderr $ "Could not parse JSON!"
BL.writeFile "/tmp/log.json" string
return Nothing
Just a -> return a
get :: (FromJSON a) => BS.ByteString -> IO (Maybe a)
get "" = return Nothing
get string = do
let result = (Data.Aeson.decodeStrict string :: (FromJSON a) => Maybe a)
case result of
Nothing -> do
hPutStrLn stderr $ "Could not parse JSON!"
BS.writeFile "/tmp/log.json" string
return Nothing
Just a -> return a
-- Reads and decodes a JSON object from a file lazily.
fromFileL :: (FromJSON a) => FilePath -> IO (Maybe a)
fromFileL fName = BL.readFile fName `catch` exceptHandler >>= getL
-- Reads and decodes a JSON object from a file.
fromFile :: (FromJSON a) => FilePath -> IO (Maybe a)
fromFile fName = BS.readFile fName `catch` exceptHandler >>= get
-- Reads and decodes a JSON object from a web url.
fromUrl :: (FromJSON a) => Manager -> String -> [(C8.ByteString, String)] -> IO (Maybe a, Status)
fromUrl manager url [] = do
request <- parseUrl url
fromRequest manager $ request
fromUrl manager url params = do
request <- parseUrl url
fromRequest manager $ urlEncodedBody (map (second C8.pack) params) request
fromAuthUrl :: (FromJSON a) => String -> Flow a
fromAuthUrl url = do
webFlow <- ST.get
let flowManager = manager webFlow
let token = accessToken webFlow
case token of
Nothing -> throwError "There is no valid token for some reason!"
Just tok -> do
request <- parseUrl url
(result, status) <- liftIO $ fromRequest flowManager $ authorize (authToken tok) request
case result of
Nothing -> throwError "There was an error getting the item(s)!"
Just r -> return r
where
authorize token request =request
{ requestHeaders = headers token
}
headers token = [(hAuthorization, B8.pack $ "Bearer " ++ token)]
fromAuthorizedUrl :: (FromJSON a) => Manager -> String -> [(HeaderName, C8.ByteString)] -> IO (Maybe a, Status)
fromAuthorizedUrl manager url headers = do
request <- parseUrl url
fromRequest manager $ request { requestHeaders = headers }
fromRequest :: (FromJSON a) => Manager -> Request -> IO (Maybe a, Status)
fromRequest manager request = do
(fmap (\x -> (responseBody x, responseStatus x)) $ httpLbs request manager)
`catch` urlExceptionHandler
>>= (\(json, status) -> do
object <- getL json
return (object, status))
urlExceptionHandler :: HttpException -> IO (BL.ByteString, Status)
urlExceptionHandler (StatusCodeException status _ _) = do
hPutStrLn stderr $ "Error when "++show (statusCode status)++" fetching JSON from url"
hPutStrLn stderr $ show $ statusMessage status
return ("", status)
urlExceptionHandler someException = do
error $ show someException
exceptHandler :: (Data.String.IsString a) => SomeException -> IO a
exceptHandler err = do
hPutStrLn stderr "Error when reading JSON file"
hPutStrLn stderr $ show err
return ""
-- Reads and decodes a JSON object from a web url.
fromUrl' :: Manager -> String -> [(C8.ByteString, String)] -> IO (Maybe Token, Status)
fromUrl' manager url params = do
request <- parseUrl url
(response, status) <- getResponse manager $ urlEncodedBody (map (second C8.pack) params) request
tok <- decodeToken (Data.Aeson.decode $ response)
return (tok, status)
getResponse :: Manager -> Request -> IO (BL.ByteString, Status)
getResponse manager request = do
putStrLn "Request: "
putStrLn $ show request
(fmap (\x -> (responseBody x, responseStatus x)) $ httpLbs request manager)
`catch` urlExceptionHandler
tokenUrl :: BL.ByteString -> IO (Maybe Token)
tokenUrl body = decodeToken (Data.Aeson.decode body)
authRequest :: String -> Flow (Status)
authRequest url = do
webFlow <- ST.get
let flowManager = manager webFlow
let tok = accessToken webFlow
case tok of
Nothing -> do throwError "Cannot perform an authorized request without a valid access token!"
Just token -> do
let authHeaders = [(hAuthorization, C8.pack $ "Bearer " ++ authToken token)]
request <- parseUrl url
response <- httpLbs (request { requestHeaders = authHeaders, method = "POST" }) flowManager
return $ responseStatus response
-- Checks to see if the directory specified in path exists and creates
-- it if it does not already exist.
checkDirectory :: FilePath -> IO()
checkDirectory path = do
exists <- doesDirectoryExist path
if exists == False
then createDirectory path
else return ()
downloadFile :: Manager -> Maybe String -> FilePath -> Token -> IO ()
downloadFile _ Nothing _ _ = return ()
downloadFile manager (Just url) localPath token = do
putStrLn $ "Downloading file " ++ (show url) ++ " to " ++ localPath
runResourceT $ do
request <- liftIO $ parseUrl url
result <- http (authorize request token) manager
responseBody result $$+- sinkFile localPath
putStrLn "Downloading file now."
where
authorize request token = request { requestHeaders = [(hAuthorization, C8.pack $ "Bearer " ++ authToken token)] } | limaner2002/OAuth2 | src/Network/OAuth2/Util.hs | gpl-3.0 | 6,474 | 14 | 21 | 1,440 | 1,967 | 1,011 | 956 | 145 | 3 |
module Model.Geometry where
import Graphics.Rendering.OpenGL
data Geometry =
Geometry { gVertices :: BufferObject
, gUVCoords :: BufferObject
, gNormals :: BufferObject
, gTriElems :: BufferObject
, gTriAdjElems :: BufferObject
, gNOFTris :: GLint
, gNOFAdjs :: GLint
, gVAO :: VertexArrayObject
}
| halvorgb/AO2D | src/Model/Geometry.hs | gpl-3.0 | 447 | 0 | 8 | 191 | 69 | 45 | 24 | 11 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
module BlazeDateTime (
inputDate
) where
import Data.Text (Text)
import Text.Blaze ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Digestive
-- Old but could be used as a basis for a digestive-functors form using proper datetime widget in HTML5
inputDate :: Text -> View v -> H.Html
inputDate ref view =
H.input
! A.type_ "datetime-local"
! A.id (H.toValue ref')
! A.name (H.toValue ref')
! A.value (H.toValue $ fieldInputText ref view)
where
ref' = absoluteRef ref view
| nmbooker/recremind | BlazeDateTime.hs | gpl-3.0 | 680 | 0 | 11 | 177 | 166 | 93 | 73 | 17 | 1 |
{-# language FlexibleContexts #-}
{-# language ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.LinearAlgebra.Accelerate
-- Copyright : (c) Marco Zocca 2017
-- License : BSD3 (see the file LICENSE)
--
-- Maintainer : zocca marco gmail
-- Stability : experimental
-- Portability : portable
--
-- `accelerate` instances for linear algebra
--
-----------------------------------------------------------------------------
module Numeric.LinearAlgebra.Accelerate where
import qualified Data.Array.Accelerate as A
import Data.Array.Accelerate
(Acc, Array, Vector, Segments, DIM1, DIM2, Exp, Any(Any), All(All), Z(Z), (:.)((:.)))
-- import Data.Array.Accelerate.IO (fromVectors, toVectors)
-- import Data.Vector
-- import Data.Vector.Algorithms.Merge
import Data.Array.Accelerate.Interpreter (run)
-- | Vector as newtype
-- newtype Vector e = Vector (Array DIM1 e) deriving (Eq, Show)
-- | segments : vector of segment lengths
-- newtype Segments i = Segments (Vector i) deriving (Eq, Show)
| ocramz/sparse-linear-algebra | accelerate/src/Numeric/LinearAlgebra/Accelerate.hs | gpl-3.0 | 1,105 | 0 | 6 | 168 | 111 | 85 | 26 | 7 | 0 |
module Descr
where
import Helpers
import Ticket
descript :: Ticket -> [String] -> IO ()
descript tick tgs = do
putStrLn $ "change description of: " ++ title tick ++ " - " ++ (head tgs) ++ "."
saveTicket (setDescr (head tgs) tick)
handleDescr args = paramList descript args "descr" "Usage: descr ticket-name new-description"
| anttisalonen/nix | src/Descr.hs | gpl-3.0 | 332 | 0 | 11 | 62 | 110 | 55 | 55 | 8 | 1 |
module DL3040 (tests) where
import Helpers
import Test.Hspec
tests :: SpecWith ()
tests = do
let ?rulesConfig = mempty
describe "DL3040 - `dnf clean all` missing after dnf command." $ do
it "no ok without dnf clean all" $ do
ruleCatches "DL3040" "RUN dnf install -y mariadb-10.4"
onBuildRuleCatches "DL3040" "RUN dnf install -y mariadb-10.4"
it "ok with dnf clean all" $ do
ruleCatchesNot "DL3040" "RUN dnf install -y mariadb-10.4 && dnf clean all"
ruleCatchesNot "DL3040" "RUN notdnf install mariadb"
onBuildRuleCatchesNot "DL3040" "RUN dnf install -y mariadb-10.4 && dnf clean all"
onBuildRuleCatchesNot "DL3040" "RUN notdnf install mariadb"
| lukasmartinelli/hadolint | test/DL3040.hs | gpl-3.0 | 695 | 0 | 13 | 147 | 119 | 53 | 66 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Organizations.Roles.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists every predefined Role that IAM supports, or every custom role that
-- is defined for an organization or project.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.organizations.roles.list@.
module Network.Google.Resource.IAM.Organizations.Roles.List
(
-- * REST Resource
OrganizationsRolesListResource
-- * Creating a Request
, organizationsRolesList
, OrganizationsRolesList
-- * Request Lenses
, orlParent
, orlXgafv
, orlUploadProtocol
, orlAccessToken
, orlUploadType
, orlShowDeleted
, orlView
, orlPageToken
, orlPageSize
, orlCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.organizations.roles.list@ method which the
-- 'OrganizationsRolesList' request conforms to.
type OrganizationsRolesListResource =
"v1" :>
Capture "parent" Text :>
"roles" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "showDeleted" Bool :>
QueryParam "view" OrganizationsRolesListView :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListRolesResponse
-- | Lists every predefined Role that IAM supports, or every custom role that
-- is defined for an organization or project.
--
-- /See:/ 'organizationsRolesList' smart constructor.
data OrganizationsRolesList =
OrganizationsRolesList'
{ _orlParent :: !Text
, _orlXgafv :: !(Maybe Xgafv)
, _orlUploadProtocol :: !(Maybe Text)
, _orlAccessToken :: !(Maybe Text)
, _orlUploadType :: !(Maybe Text)
, _orlShowDeleted :: !(Maybe Bool)
, _orlView :: !(Maybe OrganizationsRolesListView)
, _orlPageToken :: !(Maybe Text)
, _orlPageSize :: !(Maybe (Textual Int32))
, _orlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsRolesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'orlParent'
--
-- * 'orlXgafv'
--
-- * 'orlUploadProtocol'
--
-- * 'orlAccessToken'
--
-- * 'orlUploadType'
--
-- * 'orlShowDeleted'
--
-- * 'orlView'
--
-- * 'orlPageToken'
--
-- * 'orlPageSize'
--
-- * 'orlCallback'
organizationsRolesList
:: Text -- ^ 'orlParent'
-> OrganizationsRolesList
organizationsRolesList pOrlParent_ =
OrganizationsRolesList'
{ _orlParent = pOrlParent_
, _orlXgafv = Nothing
, _orlUploadProtocol = Nothing
, _orlAccessToken = Nothing
, _orlUploadType = Nothing
, _orlShowDeleted = Nothing
, _orlView = Nothing
, _orlPageToken = Nothing
, _orlPageSize = Nothing
, _orlCallback = Nothing
}
-- | The \`parent\` parameter\'s value depends on the target resource for the
-- request, namely [\`roles\`](\/iam\/reference\/rest\/v1\/roles),
-- [\`projects\`](\/iam\/reference\/rest\/v1\/projects.roles), or
-- [\`organizations\`](\/iam\/reference\/rest\/v1\/organizations.roles).
-- Each resource type\'s \`parent\` value format is described below: *
-- [\`roles.list()\`](\/iam\/reference\/rest\/v1\/roles\/list): An empty
-- string. This method doesn\'t require a resource; it simply returns all
-- [predefined roles](\/iam\/docs\/understanding-roles#predefined_roles) in
-- Cloud IAM. Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/roles\` *
-- [\`projects.roles.list()\`](\/iam\/reference\/rest\/v1\/projects.roles\/list):
-- \`projects\/{PROJECT_ID}\`. This method lists all project-level [custom
-- roles](\/iam\/docs\/understanding-custom-roles). Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/projects\/{PROJECT_ID}\/roles\` *
-- [\`organizations.roles.list()\`](\/iam\/reference\/rest\/v1\/organizations.roles\/list):
-- \`organizations\/{ORGANIZATION_ID}\`. This method lists all
-- organization-level [custom
-- roles](\/iam\/docs\/understanding-custom-roles). Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/organizations\/{ORGANIZATION_ID}\/roles\`
-- Note: Wildcard (*) values are invalid; you must specify a complete
-- project ID or organization ID.
orlParent :: Lens' OrganizationsRolesList Text
orlParent
= lens _orlParent (\ s a -> s{_orlParent = a})
-- | V1 error format.
orlXgafv :: Lens' OrganizationsRolesList (Maybe Xgafv)
orlXgafv = lens _orlXgafv (\ s a -> s{_orlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
orlUploadProtocol :: Lens' OrganizationsRolesList (Maybe Text)
orlUploadProtocol
= lens _orlUploadProtocol
(\ s a -> s{_orlUploadProtocol = a})
-- | OAuth access token.
orlAccessToken :: Lens' OrganizationsRolesList (Maybe Text)
orlAccessToken
= lens _orlAccessToken
(\ s a -> s{_orlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
orlUploadType :: Lens' OrganizationsRolesList (Maybe Text)
orlUploadType
= lens _orlUploadType
(\ s a -> s{_orlUploadType = a})
-- | Include Roles that have been deleted.
orlShowDeleted :: Lens' OrganizationsRolesList (Maybe Bool)
orlShowDeleted
= lens _orlShowDeleted
(\ s a -> s{_orlShowDeleted = a})
-- | Optional view for the returned Role objects. When \`FULL\` is specified,
-- the \`includedPermissions\` field is returned, which includes a list of
-- all permissions in the role. The default value is \`BASIC\`, which does
-- not return the \`includedPermissions\` field.
orlView :: Lens' OrganizationsRolesList (Maybe OrganizationsRolesListView)
orlView = lens _orlView (\ s a -> s{_orlView = a})
-- | Optional pagination token returned in an earlier ListRolesResponse.
orlPageToken :: Lens' OrganizationsRolesList (Maybe Text)
orlPageToken
= lens _orlPageToken (\ s a -> s{_orlPageToken = a})
-- | Optional limit on the number of roles to include in the response. The
-- default is 300, and the maximum is 1,000.
orlPageSize :: Lens' OrganizationsRolesList (Maybe Int32)
orlPageSize
= lens _orlPageSize (\ s a -> s{_orlPageSize = a}) .
mapping _Coerce
-- | JSONP
orlCallback :: Lens' OrganizationsRolesList (Maybe Text)
orlCallback
= lens _orlCallback (\ s a -> s{_orlCallback = a})
instance GoogleRequest OrganizationsRolesList where
type Rs OrganizationsRolesList = ListRolesResponse
type Scopes OrganizationsRolesList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OrganizationsRolesList'{..}
= go _orlParent _orlXgafv _orlUploadProtocol
_orlAccessToken
_orlUploadType
_orlShowDeleted
_orlView
_orlPageToken
_orlPageSize
_orlCallback
(Just AltJSON)
iAMService
where go
= buildClient
(Proxy :: Proxy OrganizationsRolesListResource)
mempty
| brendanhay/gogol | gogol-iam/gen/Network/Google/Resource/IAM/Organizations/Roles/List.hs | mpl-2.0 | 8,047 | 0 | 20 | 1,687 | 1,064 | 623 | 441 | 144 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
-- Module : Network.PagerDuty.REST
-- Copyright : (c) 2013-2015 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.PagerDuty.REST
(
-- * Sending requests
send
, sendWith
, paginate
, paginateWith
, module Network.PagerDuty.Types
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans
import Data.Aeson (FromJSON)
import Data.Conduit
import Data.Monoid
import Network.HTTP.Client (Manager)
import qualified Network.HTTP.Client as Client
import Network.HTTP.Types
import Network.PagerDuty.Internal.IO
import Network.PagerDuty.Internal.Types
import Network.PagerDuty.Types
-- FIXME: verify correct actions are all paginated
-- FIXME: Ensure RequesterId parameter is always most significant param
-- FIXME: add smart constructors for all types, for testing purposes
-- | /See:/ 'sendWith'
send :: (MonadIO m, FromJSON b)
=> SubDomain
-> Auth s
-> Manager
-> Request a s b
-> m (Either Error b)
send d a m = sendWith (prod d a m)
sendWith :: (MonadIO m, FromJSON b)
=> Env s
-> Request a s b
-> m (Either Error b)
sendWith e = liftM (fmap _pgItem) . http e
-- | /See:/ 'paginateWith'
paginate :: (MonadIO m, Paginate a, FromJSON b)
=> SubDomain
-> Auth s
-> Manager
-> Request a s b
-> Source m (Either Error b)
paginate d a m = paginateWith (prod d a m)
paginateWith :: (MonadIO m, Paginate a, FromJSON b)
=> Env s
-> Request a s b
-> Source m (Either Error b)
paginateWith e = go
where
go rq = do
rs <- lift (http e rq)
yield (_pgItem <$> rs)
either (const (return ()))
(maybe (return ()) go . next rq . _pgPager)
rs
http :: (MonadIO m, FromJSON b)
=> Env s
-> Request a s b
-> m (Either Error (Page b))
http e rq = request (e ^. envManager) (e ^. envLogger) rq $ raw
{ Client.host = domain (e ^. envDomain)
, Client.path = renderPath (rq ^. path)
, Client.queryString = renderQuery False (rq ^. query)
}
where
raw = case e ^. envAuth of
AuthBasic u p -> Client.applyBasicAuth u p Client.defaultRequest
AuthToken t -> Client.defaultRequest
{ Client.requestHeaders = [("Authorization", "Token token=" <> t)]
}
| brendanhay/pagerduty | src/Network/PagerDuty/REST.hs | mpl-2.0 | 3,063 | 0 | 16 | 983 | 760 | 406 | 354 | 65 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AdSense.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AdSense.Types.Product where
import Network.Google.AdSense.Types.Sum
import Network.Google.Prelude
-- | Response definition for the alerts list rpc.
--
-- /See:/ 'listAlertsResponse' smart constructor.
newtype ListAlertsResponse =
ListAlertsResponse'
{ _larAlerts :: Maybe [Alert]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListAlertsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'larAlerts'
listAlertsResponse
:: ListAlertsResponse
listAlertsResponse = ListAlertsResponse' {_larAlerts = Nothing}
-- | The alerts returned in this list response.
larAlerts :: Lens' ListAlertsResponse [Alert]
larAlerts
= lens _larAlerts (\ s a -> s{_larAlerts = a}) .
_Default
. _Coerce
instance FromJSON ListAlertsResponse where
parseJSON
= withObject "ListAlertsResponse"
(\ o ->
ListAlertsResponse' <$> (o .:? "alerts" .!= mempty))
instance ToJSON ListAlertsResponse where
toJSON ListAlertsResponse'{..}
= object (catMaybes [("alerts" .=) <$> _larAlerts])
-- | Representation of the AdSense code for a given ad client. For more
-- information, see [About the AdSense
-- code](https:\/\/support.google.com\/adsense\/answer\/9274634).
--
-- /See:/ 'adClientAdCode' smart constructor.
data AdClientAdCode =
AdClientAdCode'
{ _acacAmpHead :: !(Maybe Text)
, _acacAmpBody :: !(Maybe Text)
, _acacAdCode :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdClientAdCode' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acacAmpHead'
--
-- * 'acacAmpBody'
--
-- * 'acacAdCode'
adClientAdCode
:: AdClientAdCode
adClientAdCode =
AdClientAdCode'
{_acacAmpHead = Nothing, _acacAmpBody = Nothing, _acacAdCode = Nothing}
-- | Output only. The AdSense code snippet to add to the head of an AMP page.
acacAmpHead :: Lens' AdClientAdCode (Maybe Text)
acacAmpHead
= lens _acacAmpHead (\ s a -> s{_acacAmpHead = a})
-- | Output only. The AdSense code snippet to add to the body of an AMP page.
acacAmpBody :: Lens' AdClientAdCode (Maybe Text)
acacAmpBody
= lens _acacAmpBody (\ s a -> s{_acacAmpBody = a})
-- | Output only. The AdSense code snippet to add to the head of an HTML
-- page.
acacAdCode :: Lens' AdClientAdCode (Maybe Text)
acacAdCode
= lens _acacAdCode (\ s a -> s{_acacAdCode = a})
instance FromJSON AdClientAdCode where
parseJSON
= withObject "AdClientAdCode"
(\ o ->
AdClientAdCode' <$>
(o .:? "ampHead") <*> (o .:? "ampBody") <*>
(o .:? "adCode"))
instance ToJSON AdClientAdCode where
toJSON AdClientAdCode'{..}
= object
(catMaybes
[("ampHead" .=) <$> _acacAmpHead,
("ampBody" .=) <$> _acacAmpBody,
("adCode" .=) <$> _acacAdCode])
--
-- /See:/ 'hTTPBodyExtensionsItem' smart constructor.
newtype HTTPBodyExtensionsItem =
HTTPBodyExtensionsItem'
{ _httpbeiAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'HTTPBodyExtensionsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'httpbeiAddtional'
hTTPBodyExtensionsItem
:: HashMap Text JSONValue -- ^ 'httpbeiAddtional'
-> HTTPBodyExtensionsItem
hTTPBodyExtensionsItem pHttpbeiAddtional_ =
HTTPBodyExtensionsItem' {_httpbeiAddtional = _Coerce # pHttpbeiAddtional_}
-- | Properties of the object. Contains field \'type with type URL.
httpbeiAddtional :: Lens' HTTPBodyExtensionsItem (HashMap Text JSONValue)
httpbeiAddtional
= lens _httpbeiAddtional
(\ s a -> s{_httpbeiAddtional = a})
. _Coerce
instance FromJSON HTTPBodyExtensionsItem where
parseJSON
= withObject "HTTPBodyExtensionsItem"
(\ o ->
HTTPBodyExtensionsItem' <$> (parseJSONObject o))
instance ToJSON HTTPBodyExtensionsItem where
toJSON = toJSON . _httpbeiAddtional
-- | Response definition for the url channels list rpc.
--
-- /See:/ 'listURLChannelsResponse' smart constructor.
data ListURLChannelsResponse =
ListURLChannelsResponse'
{ _lucrNextPageToken :: !(Maybe Text)
, _lucrURLChannels :: !(Maybe [URLChannel])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListURLChannelsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lucrNextPageToken'
--
-- * 'lucrURLChannels'
listURLChannelsResponse
:: ListURLChannelsResponse
listURLChannelsResponse =
ListURLChannelsResponse'
{_lucrNextPageToken = Nothing, _lucrURLChannels = Nothing}
-- | Continuation token used to page through url channels. To retrieve the
-- next page of the results, set the next request\'s \"page_token\" value
-- to this.
lucrNextPageToken :: Lens' ListURLChannelsResponse (Maybe Text)
lucrNextPageToken
= lens _lucrNextPageToken
(\ s a -> s{_lucrNextPageToken = a})
-- | The url channels returned in this list response.
lucrURLChannels :: Lens' ListURLChannelsResponse [URLChannel]
lucrURLChannels
= lens _lucrURLChannels
(\ s a -> s{_lucrURLChannels = a})
. _Default
. _Coerce
instance FromJSON ListURLChannelsResponse where
parseJSON
= withObject "ListURLChannelsResponse"
(\ o ->
ListURLChannelsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "urlChannels" .!= mempty))
instance ToJSON ListURLChannelsResponse where
toJSON ListURLChannelsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lucrNextPageToken,
("urlChannels" .=) <$> _lucrURLChannels])
-- | Representation of an unpaid or paid payment. See [Payment timelines for
-- AdSense](https:\/\/support.google.com\/adsense\/answer\/7164703) for
-- more information about payments.
--
-- /See:/ 'payment' smart constructor.
data Payment =
Payment'
{ _pAmount :: !(Maybe Text)
, _pDate :: !(Maybe Date)
, _pName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Payment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pAmount'
--
-- * 'pDate'
--
-- * 'pName'
payment
:: Payment
payment = Payment' {_pAmount = Nothing, _pDate = Nothing, _pName = Nothing}
-- | Output only. The amount of unpaid or paid earnings, as a formatted
-- string, including the currency. E.g. \"¥1,235 JPY\", \"$1,234.57\",
-- \"£87.65\".
pAmount :: Lens' Payment (Maybe Text)
pAmount = lens _pAmount (\ s a -> s{_pAmount = a})
-- | Output only. For paid earnings, the date that the payment was credited.
-- For unpaid earnings, this field is empty. Payment dates are always
-- returned in the billing timezone (America\/Los_Angeles).
pDate :: Lens' Payment (Maybe Date)
pDate = lens _pDate (\ s a -> s{_pDate = a})
-- | Resource name of the payment. Format:
-- accounts\/{account}\/payments\/unpaid for unpaid (current) earnings.
-- accounts\/{account}\/payments\/yyyy-MM-dd for paid earnings.
pName :: Lens' Payment (Maybe Text)
pName = lens _pName (\ s a -> s{_pName = a})
instance FromJSON Payment where
parseJSON
= withObject "Payment"
(\ o ->
Payment' <$>
(o .:? "amount") <*> (o .:? "date") <*>
(o .:? "name"))
instance ToJSON Payment where
toJSON Payment'{..}
= object
(catMaybes
[("amount" .=) <$> _pAmount, ("date" .=) <$> _pDate,
("name" .=) <$> _pName])
-- | Response definition for the custom channel list rpc.
--
-- /See:/ 'listCustomChannelsResponse' smart constructor.
data ListCustomChannelsResponse =
ListCustomChannelsResponse'
{ _lccrNextPageToken :: !(Maybe Text)
, _lccrCustomChannels :: !(Maybe [CustomChannel])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListCustomChannelsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lccrNextPageToken'
--
-- * 'lccrCustomChannels'
listCustomChannelsResponse
:: ListCustomChannelsResponse
listCustomChannelsResponse =
ListCustomChannelsResponse'
{_lccrNextPageToken = Nothing, _lccrCustomChannels = Nothing}
-- | Continuation token used to page through alerts. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
lccrNextPageToken :: Lens' ListCustomChannelsResponse (Maybe Text)
lccrNextPageToken
= lens _lccrNextPageToken
(\ s a -> s{_lccrNextPageToken = a})
-- | The custom channels returned in this list response.
lccrCustomChannels :: Lens' ListCustomChannelsResponse [CustomChannel]
lccrCustomChannels
= lens _lccrCustomChannels
(\ s a -> s{_lccrCustomChannels = a})
. _Default
. _Coerce
instance FromJSON ListCustomChannelsResponse where
parseJSON
= withObject "ListCustomChannelsResponse"
(\ o ->
ListCustomChannelsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "customChannels" .!= mempty))
instance ToJSON ListCustomChannelsResponse where
toJSON ListCustomChannelsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lccrNextPageToken,
("customChannels" .=) <$> _lccrCustomChannels])
-- | Response definition for the ad units linked to a custom channel list
-- rpc.
--
-- /See:/ 'listLinkedAdUnitsResponse' smart constructor.
data ListLinkedAdUnitsResponse =
ListLinkedAdUnitsResponse'
{ _llaurNextPageToken :: !(Maybe Text)
, _llaurAdUnits :: !(Maybe [AdUnit])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListLinkedAdUnitsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llaurNextPageToken'
--
-- * 'llaurAdUnits'
listLinkedAdUnitsResponse
:: ListLinkedAdUnitsResponse
listLinkedAdUnitsResponse =
ListLinkedAdUnitsResponse'
{_llaurNextPageToken = Nothing, _llaurAdUnits = Nothing}
-- | Continuation token used to page through ad units. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
llaurNextPageToken :: Lens' ListLinkedAdUnitsResponse (Maybe Text)
llaurNextPageToken
= lens _llaurNextPageToken
(\ s a -> s{_llaurNextPageToken = a})
-- | The ad units returned in the list response.
llaurAdUnits :: Lens' ListLinkedAdUnitsResponse [AdUnit]
llaurAdUnits
= lens _llaurAdUnits (\ s a -> s{_llaurAdUnits = a})
. _Default
. _Coerce
instance FromJSON ListLinkedAdUnitsResponse where
parseJSON
= withObject "ListLinkedAdUnitsResponse"
(\ o ->
ListLinkedAdUnitsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "adUnits" .!= mempty))
instance ToJSON ListLinkedAdUnitsResponse where
toJSON ListLinkedAdUnitsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _llaurNextPageToken,
("adUnits" .=) <$> _llaurAdUnits])
-- | Response definition for the account list rpc.
--
-- /See:/ 'listAccountsResponse' smart constructor.
data ListAccountsResponse =
ListAccountsResponse'
{ _larNextPageToken :: !(Maybe Text)
, _larAccounts :: !(Maybe [Account])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListAccountsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'larNextPageToken'
--
-- * 'larAccounts'
listAccountsResponse
:: ListAccountsResponse
listAccountsResponse =
ListAccountsResponse' {_larNextPageToken = Nothing, _larAccounts = Nothing}
-- | Continuation token used to page through accounts. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
larNextPageToken :: Lens' ListAccountsResponse (Maybe Text)
larNextPageToken
= lens _larNextPageToken
(\ s a -> s{_larNextPageToken = a})
-- | The accounts returned in this list response.
larAccounts :: Lens' ListAccountsResponse [Account]
larAccounts
= lens _larAccounts (\ s a -> s{_larAccounts = a}) .
_Default
. _Coerce
instance FromJSON ListAccountsResponse where
parseJSON
= withObject "ListAccountsResponse"
(\ o ->
ListAccountsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "accounts" .!= mempty))
instance ToJSON ListAccountsResponse where
toJSON ListAccountsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _larNextPageToken,
("accounts" .=) <$> _larAccounts])
-- | Representation of a saved report.
--
-- /See:/ 'savedReport' smart constructor.
data SavedReport =
SavedReport'
{ _srName :: !(Maybe Text)
, _srTitle :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SavedReport' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srName'
--
-- * 'srTitle'
savedReport
:: SavedReport
savedReport = SavedReport' {_srName = Nothing, _srTitle = Nothing}
-- | Resource name of the report. Format:
-- accounts\/{account}\/reports\/{report}
srName :: Lens' SavedReport (Maybe Text)
srName = lens _srName (\ s a -> s{_srName = a})
-- | Report title as specified by publisher.
srTitle :: Lens' SavedReport (Maybe Text)
srTitle = lens _srTitle (\ s a -> s{_srTitle = a})
instance FromJSON SavedReport where
parseJSON
= withObject "SavedReport"
(\ o ->
SavedReport' <$> (o .:? "name") <*> (o .:? "title"))
instance ToJSON SavedReport where
toJSON SavedReport'{..}
= object
(catMaybes
[("name" .=) <$> _srName, ("title" .=) <$> _srTitle])
-- | Representation of an ad unit. An ad unit represents a saved ad unit with
-- a specific set of ad settings that have been customized within an
-- account.
--
-- /See:/ 'adUnit' smart constructor.
data AdUnit =
AdUnit'
{ _auState :: !(Maybe AdUnitState)
, _auReportingDimensionId :: !(Maybe Text)
, _auName :: !(Maybe Text)
, _auContentAdsSettings :: !(Maybe ContentAdsSettings)
, _auDisplayName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'auState'
--
-- * 'auReportingDimensionId'
--
-- * 'auName'
--
-- * 'auContentAdsSettings'
--
-- * 'auDisplayName'
adUnit
:: AdUnit
adUnit =
AdUnit'
{ _auState = Nothing
, _auReportingDimensionId = Nothing
, _auName = Nothing
, _auContentAdsSettings = Nothing
, _auDisplayName = Nothing
}
-- | State of the ad unit.
auState :: Lens' AdUnit (Maybe AdUnitState)
auState = lens _auState (\ s a -> s{_auState = a})
-- | Output only. Unique ID of the ad unit as used in the \`AD_UNIT_ID\`
-- reporting dimension.
auReportingDimensionId :: Lens' AdUnit (Maybe Text)
auReportingDimensionId
= lens _auReportingDimensionId
(\ s a -> s{_auReportingDimensionId = a})
-- | Resource name of the ad unit. Format:
-- accounts\/{account}\/adclient\/{adclient}\/adunits\/{adunit}
auName :: Lens' AdUnit (Maybe Text)
auName = lens _auName (\ s a -> s{_auName = a})
-- | Settings specific to content ads (AFC).
auContentAdsSettings :: Lens' AdUnit (Maybe ContentAdsSettings)
auContentAdsSettings
= lens _auContentAdsSettings
(\ s a -> s{_auContentAdsSettings = a})
-- | Display name of the ad unit, as provided when the ad unit was created.
auDisplayName :: Lens' AdUnit (Maybe Text)
auDisplayName
= lens _auDisplayName
(\ s a -> s{_auDisplayName = a})
instance FromJSON AdUnit where
parseJSON
= withObject "AdUnit"
(\ o ->
AdUnit' <$>
(o .:? "state") <*> (o .:? "reportingDimensionId")
<*> (o .:? "name")
<*> (o .:? "contentAdsSettings")
<*> (o .:? "displayName"))
instance ToJSON AdUnit where
toJSON AdUnit'{..}
= object
(catMaybes
[("state" .=) <$> _auState,
("reportingDimensionId" .=) <$>
_auReportingDimensionId,
("name" .=) <$> _auName,
("contentAdsSettings" .=) <$> _auContentAdsSettings,
("displayName" .=) <$> _auDisplayName])
-- | Cell representation.
--
-- /See:/ 'cell' smart constructor.
newtype Cell =
Cell'
{ _cValue :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Cell' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cValue'
cell
:: Cell
cell = Cell' {_cValue = Nothing}
-- | Value in the cell. The dimension cells contain strings, and the metric
-- cells contain numbers.
cValue :: Lens' Cell (Maybe Text)
cValue = lens _cValue (\ s a -> s{_cValue = a})
instance FromJSON Cell where
parseJSON
= withObject "Cell"
(\ o -> Cell' <$> (o .:? "value"))
instance ToJSON Cell where
toJSON Cell'{..}
= object (catMaybes [("value" .=) <$> _cValue])
-- | Representation of an alert.
--
-- /See:/ 'alert' smart constructor.
data Alert =
Alert'
{ _aSeverity :: !(Maybe AlertSeverity)
, _aName :: !(Maybe Text)
, _aType :: !(Maybe Text)
, _aMessage :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Alert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aSeverity'
--
-- * 'aName'
--
-- * 'aType'
--
-- * 'aMessage'
alert
:: Alert
alert =
Alert'
{ _aSeverity = Nothing
, _aName = Nothing
, _aType = Nothing
, _aMessage = Nothing
}
-- | Output only. Severity of this alert.
aSeverity :: Lens' Alert (Maybe AlertSeverity)
aSeverity
= lens _aSeverity (\ s a -> s{_aSeverity = a})
-- | Resource name of the alert. Format: accounts\/{account}\/alerts\/{alert}
aName :: Lens' Alert (Maybe Text)
aName = lens _aName (\ s a -> s{_aName = a})
-- | Output only. Type of alert. This identifies the broad type of this
-- alert, and provides a stable machine-readable identifier that will not
-- be translated. For example, \"payment-hold\".
aType :: Lens' Alert (Maybe Text)
aType = lens _aType (\ s a -> s{_aType = a})
-- | Output only. The localized alert message. This may contain HTML markup,
-- such as phrase elements or links.
aMessage :: Lens' Alert (Maybe Text)
aMessage = lens _aMessage (\ s a -> s{_aMessage = a})
instance FromJSON Alert where
parseJSON
= withObject "Alert"
(\ o ->
Alert' <$>
(o .:? "severity") <*> (o .:? "name") <*>
(o .:? "type")
<*> (o .:? "message"))
instance ToJSON Alert where
toJSON Alert'{..}
= object
(catMaybes
[("severity" .=) <$> _aSeverity,
("name" .=) <$> _aName, ("type" .=) <$> _aType,
("message" .=) <$> _aMessage])
-- | Response definition for the saved reports list rpc.
--
-- /See:/ 'listSavedReportsResponse' smart constructor.
data ListSavedReportsResponse =
ListSavedReportsResponse'
{ _lsrrNextPageToken :: !(Maybe Text)
, _lsrrSavedReports :: !(Maybe [SavedReport])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListSavedReportsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsrrNextPageToken'
--
-- * 'lsrrSavedReports'
listSavedReportsResponse
:: ListSavedReportsResponse
listSavedReportsResponse =
ListSavedReportsResponse'
{_lsrrNextPageToken = Nothing, _lsrrSavedReports = Nothing}
-- | Continuation token used to page through reports. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
lsrrNextPageToken :: Lens' ListSavedReportsResponse (Maybe Text)
lsrrNextPageToken
= lens _lsrrNextPageToken
(\ s a -> s{_lsrrNextPageToken = a})
-- | The reports returned in this list response.
lsrrSavedReports :: Lens' ListSavedReportsResponse [SavedReport]
lsrrSavedReports
= lens _lsrrSavedReports
(\ s a -> s{_lsrrSavedReports = a})
. _Default
. _Coerce
instance FromJSON ListSavedReportsResponse where
parseJSON
= withObject "ListSavedReportsResponse"
(\ o ->
ListSavedReportsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "savedReports" .!= mempty))
instance ToJSON ListSavedReportsResponse where
toJSON ListSavedReportsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lsrrNextPageToken,
("savedReports" .=) <$> _lsrrSavedReports])
-- | Response definition for the child account list rpc.
--
-- /See:/ 'listChildAccountsResponse' smart constructor.
data ListChildAccountsResponse =
ListChildAccountsResponse'
{ _lcarNextPageToken :: !(Maybe Text)
, _lcarAccounts :: !(Maybe [Account])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListChildAccountsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lcarNextPageToken'
--
-- * 'lcarAccounts'
listChildAccountsResponse
:: ListChildAccountsResponse
listChildAccountsResponse =
ListChildAccountsResponse'
{_lcarNextPageToken = Nothing, _lcarAccounts = Nothing}
-- | Continuation token used to page through accounts. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
lcarNextPageToken :: Lens' ListChildAccountsResponse (Maybe Text)
lcarNextPageToken
= lens _lcarNextPageToken
(\ s a -> s{_lcarNextPageToken = a})
-- | The accounts returned in this list response.
lcarAccounts :: Lens' ListChildAccountsResponse [Account]
lcarAccounts
= lens _lcarAccounts (\ s a -> s{_lcarAccounts = a})
. _Default
. _Coerce
instance FromJSON ListChildAccountsResponse where
parseJSON
= withObject "ListChildAccountsResponse"
(\ o ->
ListChildAccountsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "accounts" .!= mempty))
instance ToJSON ListChildAccountsResponse where
toJSON ListChildAccountsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lcarNextPageToken,
("accounts" .=) <$> _lcarAccounts])
-- | Representation of an account.
--
-- /See:/ 'account' smart constructor.
data Account =
Account'
{ _accPremium :: !(Maybe Bool)
, _accPendingTasks :: !(Maybe [Text])
, _accName :: !(Maybe Text)
, _accDisplayName :: !(Maybe Text)
, _accTimeZone :: !(Maybe TimeZone)
, _accCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Account' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'accPremium'
--
-- * 'accPendingTasks'
--
-- * 'accName'
--
-- * 'accDisplayName'
--
-- * 'accTimeZone'
--
-- * 'accCreateTime'
account
:: Account
account =
Account'
{ _accPremium = Nothing
, _accPendingTasks = Nothing
, _accName = Nothing
, _accDisplayName = Nothing
, _accTimeZone = Nothing
, _accCreateTime = Nothing
}
-- | Output only. Whether this account is premium.
accPremium :: Lens' Account (Maybe Bool)
accPremium
= lens _accPremium (\ s a -> s{_accPremium = a})
-- | Output only. Outstanding tasks that need to be completed as part of the
-- sign-up process for a new account. e.g. \"billing-profile-creation\",
-- \"phone-pin-verification\".
accPendingTasks :: Lens' Account [Text]
accPendingTasks
= lens _accPendingTasks
(\ s a -> s{_accPendingTasks = a})
. _Default
. _Coerce
-- | Resource name of the account. Format: accounts\/pub-[0-9]+
accName :: Lens' Account (Maybe Text)
accName = lens _accName (\ s a -> s{_accName = a})
-- | Output only. Display name of this account.
accDisplayName :: Lens' Account (Maybe Text)
accDisplayName
= lens _accDisplayName
(\ s a -> s{_accDisplayName = a})
-- | The account time zone, as used by reporting. For more information, see
-- [changing the time zone of your
-- reports](https:\/\/support.google.com\/adsense\/answer\/9830725).
accTimeZone :: Lens' Account (Maybe TimeZone)
accTimeZone
= lens _accTimeZone (\ s a -> s{_accTimeZone = a})
-- | Output only. Creation time of the account.
accCreateTime :: Lens' Account (Maybe UTCTime)
accCreateTime
= lens _accCreateTime
(\ s a -> s{_accCreateTime = a})
. mapping _DateTime
instance FromJSON Account where
parseJSON
= withObject "Account"
(\ o ->
Account' <$>
(o .:? "premium") <*>
(o .:? "pendingTasks" .!= mempty)
<*> (o .:? "name")
<*> (o .:? "displayName")
<*> (o .:? "timeZone")
<*> (o .:? "createTime"))
instance ToJSON Account where
toJSON Account'{..}
= object
(catMaybes
[("premium" .=) <$> _accPremium,
("pendingTasks" .=) <$> _accPendingTasks,
("name" .=) <$> _accName,
("displayName" .=) <$> _accDisplayName,
("timeZone" .=) <$> _accTimeZone,
("createTime" .=) <$> _accCreateTime])
-- | Response definition for the adunit list rpc.
--
-- /See:/ 'listAdUnitsResponse' smart constructor.
data ListAdUnitsResponse =
ListAdUnitsResponse'
{ _laurNextPageToken :: !(Maybe Text)
, _laurAdUnits :: !(Maybe [AdUnit])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListAdUnitsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'laurNextPageToken'
--
-- * 'laurAdUnits'
listAdUnitsResponse
:: ListAdUnitsResponse
listAdUnitsResponse =
ListAdUnitsResponse' {_laurNextPageToken = Nothing, _laurAdUnits = Nothing}
-- | Continuation token used to page through ad units. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
laurNextPageToken :: Lens' ListAdUnitsResponse (Maybe Text)
laurNextPageToken
= lens _laurNextPageToken
(\ s a -> s{_laurNextPageToken = a})
-- | The ad units returned in the list response.
laurAdUnits :: Lens' ListAdUnitsResponse [AdUnit]
laurAdUnits
= lens _laurAdUnits (\ s a -> s{_laurAdUnits = a}) .
_Default
. _Coerce
instance FromJSON ListAdUnitsResponse where
parseJSON
= withObject "ListAdUnitsResponse"
(\ o ->
ListAdUnitsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "adUnits" .!= mempty))
instance ToJSON ListAdUnitsResponse where
toJSON ListAdUnitsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _laurNextPageToken,
("adUnits" .=) <$> _laurAdUnits])
-- | Represents a whole or partial calendar date, such as a birthday. The
-- time of day and time zone are either specified elsewhere or are
-- insignificant. The date is relative to the Gregorian Calendar. This can
-- represent one of the following: * A full date, with non-zero year,
-- month, and day values * A month and day value, with a zero year, such as
-- an anniversary * A year on its own, with zero month and day values * A
-- year and month value, with a zero day, such as a credit card expiration
-- date Related types are google.type.TimeOfDay and
-- \`google.protobuf.Timestamp\`.
--
-- /See:/ 'date' smart constructor.
data Date =
Date'
{ _dDay :: !(Maybe (Textual Int32))
, _dYear :: !(Maybe (Textual Int32))
, _dMonth :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Date' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dDay'
--
-- * 'dYear'
--
-- * 'dMonth'
date
:: Date
date = Date' {_dDay = Nothing, _dYear = Nothing, _dMonth = Nothing}
-- | Day of a month. Must be from 1 to 31 and valid for the year and month,
-- or 0 to specify a year by itself or a year and month where the day
-- isn\'t significant.
dDay :: Lens' Date (Maybe Int32)
dDay
= lens _dDay (\ s a -> s{_dDay = a}) .
mapping _Coerce
-- | Year of the date. Must be from 1 to 9999, or 0 to specify a date without
-- a year.
dYear :: Lens' Date (Maybe Int32)
dYear
= lens _dYear (\ s a -> s{_dYear = a}) .
mapping _Coerce
-- | Month of a year. Must be from 1 to 12, or 0 to specify a year without a
-- month and day.
dMonth :: Lens' Date (Maybe Int32)
dMonth
= lens _dMonth (\ s a -> s{_dMonth = a}) .
mapping _Coerce
instance FromJSON Date where
parseJSON
= withObject "Date"
(\ o ->
Date' <$>
(o .:? "day") <*> (o .:? "year") <*> (o .:? "month"))
instance ToJSON Date where
toJSON Date'{..}
= object
(catMaybes
[("day" .=) <$> _dDay, ("year" .=) <$> _dYear,
("month" .=) <$> _dMonth])
-- | Representation of an ad client. An ad client represents a user\'s
-- subscription with a specific AdSense product.
--
-- /See:/ 'adClient' smart constructor.
data AdClient =
AdClient'
{ _acReportingDimensionId :: !(Maybe Text)
, _acName :: !(Maybe Text)
, _acProductCode :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdClient' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acReportingDimensionId'
--
-- * 'acName'
--
-- * 'acProductCode'
adClient
:: AdClient
adClient =
AdClient'
{ _acReportingDimensionId = Nothing
, _acName = Nothing
, _acProductCode = Nothing
}
-- | Output only. Unique ID of the ad client as used in the \`AD_CLIENT_ID\`
-- reporting dimension. Present only if the ad client supports reporting.
acReportingDimensionId :: Lens' AdClient (Maybe Text)
acReportingDimensionId
= lens _acReportingDimensionId
(\ s a -> s{_acReportingDimensionId = a})
-- | Resource name of the ad client. Format:
-- accounts\/{account}\/adclient\/{adclient}
acName :: Lens' AdClient (Maybe Text)
acName = lens _acName (\ s a -> s{_acName = a})
-- | Output only. Product code of the ad client. For example, \"AFC\" for
-- AdSense for Content.
acProductCode :: Lens' AdClient (Maybe Text)
acProductCode
= lens _acProductCode
(\ s a -> s{_acProductCode = a})
instance FromJSON AdClient where
parseJSON
= withObject "AdClient"
(\ o ->
AdClient' <$>
(o .:? "reportingDimensionId") <*> (o .:? "name") <*>
(o .:? "productCode"))
instance ToJSON AdClient where
toJSON AdClient'{..}
= object
(catMaybes
[("reportingDimensionId" .=) <$>
_acReportingDimensionId,
("name" .=) <$> _acName,
("productCode" .=) <$> _acProductCode])
-- | Result of a generated report.
--
-- /See:/ 'reportResult' smart constructor.
data ReportResult =
ReportResult'
{ _rrAverages :: !(Maybe Row)
, _rrEndDate :: !(Maybe Date)
, _rrWarnings :: !(Maybe [Text])
, _rrRows :: !(Maybe [Row])
, _rrTotals :: !(Maybe Row)
, _rrStartDate :: !(Maybe Date)
, _rrHeaders :: !(Maybe [Header])
, _rrTotalMatchedRows :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ReportResult' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrAverages'
--
-- * 'rrEndDate'
--
-- * 'rrWarnings'
--
-- * 'rrRows'
--
-- * 'rrTotals'
--
-- * 'rrStartDate'
--
-- * 'rrHeaders'
--
-- * 'rrTotalMatchedRows'
reportResult
:: ReportResult
reportResult =
ReportResult'
{ _rrAverages = Nothing
, _rrEndDate = Nothing
, _rrWarnings = Nothing
, _rrRows = Nothing
, _rrTotals = Nothing
, _rrStartDate = Nothing
, _rrHeaders = Nothing
, _rrTotalMatchedRows = Nothing
}
-- | The averages of the report. This is the same length as any other row in
-- the report; cells corresponding to dimension columns are empty.
rrAverages :: Lens' ReportResult (Maybe Row)
rrAverages
= lens _rrAverages (\ s a -> s{_rrAverages = a})
-- | Required. End date of the range (inclusive).
rrEndDate :: Lens' ReportResult (Maybe Date)
rrEndDate
= lens _rrEndDate (\ s a -> s{_rrEndDate = a})
-- | Any warnings associated with generation of the report. These warnings
-- are always returned in English.
rrWarnings :: Lens' ReportResult [Text]
rrWarnings
= lens _rrWarnings (\ s a -> s{_rrWarnings = a}) .
_Default
. _Coerce
-- | The output rows of the report. Each row is a list of cells; one for each
-- dimension in the request, followed by one for each metric in the
-- request.
rrRows :: Lens' ReportResult [Row]
rrRows
= lens _rrRows (\ s a -> s{_rrRows = a}) . _Default .
_Coerce
-- | The totals of the report. This is the same length as any other row in
-- the report; cells corresponding to dimension columns are empty.
rrTotals :: Lens' ReportResult (Maybe Row)
rrTotals = lens _rrTotals (\ s a -> s{_rrTotals = a})
-- | Required. Start date of the range (inclusive).
rrStartDate :: Lens' ReportResult (Maybe Date)
rrStartDate
= lens _rrStartDate (\ s a -> s{_rrStartDate = a})
-- | The header information; one for each dimension in the request, followed
-- by one for each metric in the request.
rrHeaders :: Lens' ReportResult [Header]
rrHeaders
= lens _rrHeaders (\ s a -> s{_rrHeaders = a}) .
_Default
. _Coerce
-- | The total number of rows matched by the report request.
rrTotalMatchedRows :: Lens' ReportResult (Maybe Int64)
rrTotalMatchedRows
= lens _rrTotalMatchedRows
(\ s a -> s{_rrTotalMatchedRows = a})
. mapping _Coerce
instance FromJSON ReportResult where
parseJSON
= withObject "ReportResult"
(\ o ->
ReportResult' <$>
(o .:? "averages") <*> (o .:? "endDate") <*>
(o .:? "warnings" .!= mempty)
<*> (o .:? "rows" .!= mempty)
<*> (o .:? "totals")
<*> (o .:? "startDate")
<*> (o .:? "headers" .!= mempty)
<*> (o .:? "totalMatchedRows"))
instance ToJSON ReportResult where
toJSON ReportResult'{..}
= object
(catMaybes
[("averages" .=) <$> _rrAverages,
("endDate" .=) <$> _rrEndDate,
("warnings" .=) <$> _rrWarnings,
("rows" .=) <$> _rrRows, ("totals" .=) <$> _rrTotals,
("startDate" .=) <$> _rrStartDate,
("headers" .=) <$> _rrHeaders,
("totalMatchedRows" .=) <$> _rrTotalMatchedRows])
-- | The header information of the columns requested in the report.
--
-- /See:/ 'header' smart constructor.
data Header =
Header'
{ _hCurrencyCode :: !(Maybe Text)
, _hName :: !(Maybe Text)
, _hType :: !(Maybe HeaderType)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Header' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hCurrencyCode'
--
-- * 'hName'
--
-- * 'hType'
header
:: Header
header = Header' {_hCurrencyCode = Nothing, _hName = Nothing, _hType = Nothing}
-- | The [ISO-4217 currency code](https:\/\/en.wikipedia.org\/wiki\/ISO_4217)
-- of this column. Only present if the header type is METRIC_CURRENCY.
hCurrencyCode :: Lens' Header (Maybe Text)
hCurrencyCode
= lens _hCurrencyCode
(\ s a -> s{_hCurrencyCode = a})
-- | Required. Name of the header.
hName :: Lens' Header (Maybe Text)
hName = lens _hName (\ s a -> s{_hName = a})
-- | Required. Type of the header.
hType :: Lens' Header (Maybe HeaderType)
hType = lens _hType (\ s a -> s{_hType = a})
instance FromJSON Header where
parseJSON
= withObject "Header"
(\ o ->
Header' <$>
(o .:? "currencyCode") <*> (o .:? "name") <*>
(o .:? "type"))
instance ToJSON Header where
toJSON Header'{..}
= object
(catMaybes
[("currencyCode" .=) <$> _hCurrencyCode,
("name" .=) <$> _hName, ("type" .=) <$> _hType])
-- | Settings specific to content ads (AFC).
--
-- /See:/ 'contentAdsSettings' smart constructor.
data ContentAdsSettings =
ContentAdsSettings'
{ _casSize :: !(Maybe Text)
, _casType :: !(Maybe ContentAdsSettingsType)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ContentAdsSettings' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'casSize'
--
-- * 'casType'
contentAdsSettings
:: ContentAdsSettings
contentAdsSettings =
ContentAdsSettings' {_casSize = Nothing, _casType = Nothing}
-- | Size of the ad unit. e.g. \"728x90\", \"1x3\" (for responsive ad units).
casSize :: Lens' ContentAdsSettings (Maybe Text)
casSize = lens _casSize (\ s a -> s{_casSize = a})
-- | Type of the ad unit.
casType :: Lens' ContentAdsSettings (Maybe ContentAdsSettingsType)
casType = lens _casType (\ s a -> s{_casType = a})
instance FromJSON ContentAdsSettings where
parseJSON
= withObject "ContentAdsSettings"
(\ o ->
ContentAdsSettings' <$>
(o .:? "size") <*> (o .:? "type"))
instance ToJSON ContentAdsSettings where
toJSON ContentAdsSettings'{..}
= object
(catMaybes
[("size" .=) <$> _casSize, ("type" .=) <$> _casType])
-- | Row representation.
--
-- /See:/ 'row' smart constructor.
newtype Row =
Row'
{ _rCells :: Maybe [Cell]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Row' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rCells'
row
:: Row
row = Row' {_rCells = Nothing}
-- | Cells in the row.
rCells :: Lens' Row [Cell]
rCells
= lens _rCells (\ s a -> s{_rCells = a}) . _Default .
_Coerce
instance FromJSON Row where
parseJSON
= withObject "Row"
(\ o -> Row' <$> (o .:? "cells" .!= mempty))
instance ToJSON Row where
toJSON Row'{..}
= object (catMaybes [("cells" .=) <$> _rCells])
-- | Representation of a custom channel.
--
-- /See:/ 'customChannel' smart constructor.
data CustomChannel =
CustomChannel'
{ _ccReportingDimensionId :: !(Maybe Text)
, _ccName :: !(Maybe Text)
, _ccDisplayName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomChannel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccReportingDimensionId'
--
-- * 'ccName'
--
-- * 'ccDisplayName'
customChannel
:: CustomChannel
customChannel =
CustomChannel'
{ _ccReportingDimensionId = Nothing
, _ccName = Nothing
, _ccDisplayName = Nothing
}
-- | Output only. Unique ID of the custom channel as used in the
-- \`CUSTOM_CHANNEL_ID\` reporting dimension.
ccReportingDimensionId :: Lens' CustomChannel (Maybe Text)
ccReportingDimensionId
= lens _ccReportingDimensionId
(\ s a -> s{_ccReportingDimensionId = a})
-- | Resource name of the custom channel. Format:
-- accounts\/{account}\/adclients\/{adclient}\/customchannels\/{customchannel}
ccName :: Lens' CustomChannel (Maybe Text)
ccName = lens _ccName (\ s a -> s{_ccName = a})
-- | Display name of the custom channel.
ccDisplayName :: Lens' CustomChannel (Maybe Text)
ccDisplayName
= lens _ccDisplayName
(\ s a -> s{_ccDisplayName = a})
instance FromJSON CustomChannel where
parseJSON
= withObject "CustomChannel"
(\ o ->
CustomChannel' <$>
(o .:? "reportingDimensionId") <*> (o .:? "name") <*>
(o .:? "displayName"))
instance ToJSON CustomChannel where
toJSON CustomChannel'{..}
= object
(catMaybes
[("reportingDimensionId" .=) <$>
_ccReportingDimensionId,
("name" .=) <$> _ccName,
("displayName" .=) <$> _ccDisplayName])
-- | Representation of a URL channel. URL channels allow you to track the
-- performance of particular pages in your site; see [URL
-- channels](https:\/\/support.google.com\/adsense\/answer\/2923836) for
-- more information.
--
-- /See:/ 'urlChannel' smart constructor.
data URLChannel =
URLChannel'
{ _ucReportingDimensionId :: !(Maybe Text)
, _ucURIPattern :: !(Maybe Text)
, _ucName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'URLChannel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ucReportingDimensionId'
--
-- * 'ucURIPattern'
--
-- * 'ucName'
urlChannel
:: URLChannel
urlChannel =
URLChannel'
{ _ucReportingDimensionId = Nothing
, _ucURIPattern = Nothing
, _ucName = Nothing
}
-- | Output only. Unique ID of the custom channel as used in the
-- \`URL_CHANNEL_ID\` reporting dimension.
ucReportingDimensionId :: Lens' URLChannel (Maybe Text)
ucReportingDimensionId
= lens _ucReportingDimensionId
(\ s a -> s{_ucReportingDimensionId = a})
-- | URI pattern of the channel. Does not include \"http:\/\/\" or
-- \"https:\/\/\". Example: www.example.com\/home
ucURIPattern :: Lens' URLChannel (Maybe Text)
ucURIPattern
= lens _ucURIPattern (\ s a -> s{_ucURIPattern = a})
-- | Resource name of the URL channel. Format:
-- accounts\/{account}\/adclient\/{adclient}\/urlchannels\/{urlchannel}
ucName :: Lens' URLChannel (Maybe Text)
ucName = lens _ucName (\ s a -> s{_ucName = a})
instance FromJSON URLChannel where
parseJSON
= withObject "URLChannel"
(\ o ->
URLChannel' <$>
(o .:? "reportingDimensionId") <*>
(o .:? "uriPattern")
<*> (o .:? "name"))
instance ToJSON URLChannel where
toJSON URLChannel'{..}
= object
(catMaybes
[("reportingDimensionId" .=) <$>
_ucReportingDimensionId,
("uriPattern" .=) <$> _ucURIPattern,
("name" .=) <$> _ucName])
-- | Response definition for the payments list rpc.
--
-- /See:/ 'listPaymentsResponse' smart constructor.
newtype ListPaymentsResponse =
ListPaymentsResponse'
{ _lprPayments :: Maybe [Payment]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListPaymentsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lprPayments'
listPaymentsResponse
:: ListPaymentsResponse
listPaymentsResponse = ListPaymentsResponse' {_lprPayments = Nothing}
-- | The payments returned in this list response.
lprPayments :: Lens' ListPaymentsResponse [Payment]
lprPayments
= lens _lprPayments (\ s a -> s{_lprPayments = a}) .
_Default
. _Coerce
instance FromJSON ListPaymentsResponse where
parseJSON
= withObject "ListPaymentsResponse"
(\ o ->
ListPaymentsResponse' <$>
(o .:? "payments" .!= mempty))
instance ToJSON ListPaymentsResponse where
toJSON ListPaymentsResponse'{..}
= object
(catMaybes [("payments" .=) <$> _lprPayments])
-- | Message that represents an arbitrary HTTP body. It should only be used
-- for payload formats that can\'t be represented as JSON, such as raw
-- binary or an HTML page. This message can be used both in streaming and
-- non-streaming API methods in the request as well as the response. It can
-- be used as a top-level request field, which is convenient if one wants
-- to extract parameters from either the URL or HTTP template into the
-- request fields and also want access to the raw HTTP body. Example:
-- message GetResourceRequest { \/\/ A unique request id. string request_id
-- = 1; \/\/ The raw HTTP body is bound to this field. google.api.HttpBody
-- http_body = 2; } service ResourceService { rpc
-- GetResource(GetResourceRequest) returns (google.api.HttpBody); rpc
-- UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty); }
-- Example with streaming methods: service CaldavService { rpc
-- GetCalendar(stream google.api.HttpBody) returns (stream
-- google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody)
-- returns (stream google.api.HttpBody); } Use of this type only changes
-- how the request and response bodies are handled, all other features will
-- continue to work unchanged.
--
-- /See:/ 'hTTPBody' smart constructor.
data HTTPBody =
HTTPBody'
{ _httpbExtensions :: !(Maybe [HTTPBodyExtensionsItem])
, _httpbData :: !(Maybe Bytes)
, _httpbContentType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'HTTPBody' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'httpbExtensions'
--
-- * 'httpbData'
--
-- * 'httpbContentType'
hTTPBody
:: HTTPBody
hTTPBody =
HTTPBody'
{ _httpbExtensions = Nothing
, _httpbData = Nothing
, _httpbContentType = Nothing
}
-- | Application specific response metadata. Must be set in the first
-- response for streaming APIs.
httpbExtensions :: Lens' HTTPBody [HTTPBodyExtensionsItem]
httpbExtensions
= lens _httpbExtensions
(\ s a -> s{_httpbExtensions = a})
. _Default
. _Coerce
-- | The HTTP request\/response body as raw binary.
httpbData :: Lens' HTTPBody (Maybe ByteString)
httpbData
= lens _httpbData (\ s a -> s{_httpbData = a}) .
mapping _Bytes
-- | The HTTP Content-Type header value specifying the content type of the
-- body.
httpbContentType :: Lens' HTTPBody (Maybe Text)
httpbContentType
= lens _httpbContentType
(\ s a -> s{_httpbContentType = a})
instance FromJSON HTTPBody where
parseJSON
= withObject "HTTPBody"
(\ o ->
HTTPBody' <$>
(o .:? "extensions" .!= mempty) <*> (o .:? "data")
<*> (o .:? "contentType"))
instance ToJSON HTTPBody where
toJSON HTTPBody'{..}
= object
(catMaybes
[("extensions" .=) <$> _httpbExtensions,
("data" .=) <$> _httpbData,
("contentType" .=) <$> _httpbContentType])
-- | Response definition for the custom channels linked to an adunit list
-- rpc.
--
-- /See:/ 'listLinkedCustomChannelsResponse' smart constructor.
data ListLinkedCustomChannelsResponse =
ListLinkedCustomChannelsResponse'
{ _llccrNextPageToken :: !(Maybe Text)
, _llccrCustomChannels :: !(Maybe [CustomChannel])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListLinkedCustomChannelsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llccrNextPageToken'
--
-- * 'llccrCustomChannels'
listLinkedCustomChannelsResponse
:: ListLinkedCustomChannelsResponse
listLinkedCustomChannelsResponse =
ListLinkedCustomChannelsResponse'
{_llccrNextPageToken = Nothing, _llccrCustomChannels = Nothing}
-- | Continuation token used to page through alerts. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
llccrNextPageToken :: Lens' ListLinkedCustomChannelsResponse (Maybe Text)
llccrNextPageToken
= lens _llccrNextPageToken
(\ s a -> s{_llccrNextPageToken = a})
-- | The custom channels returned in this list response.
llccrCustomChannels :: Lens' ListLinkedCustomChannelsResponse [CustomChannel]
llccrCustomChannels
= lens _llccrCustomChannels
(\ s a -> s{_llccrCustomChannels = a})
. _Default
. _Coerce
instance FromJSON ListLinkedCustomChannelsResponse
where
parseJSON
= withObject "ListLinkedCustomChannelsResponse"
(\ o ->
ListLinkedCustomChannelsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "customChannels" .!= mempty))
instance ToJSON ListLinkedCustomChannelsResponse
where
toJSON ListLinkedCustomChannelsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _llccrNextPageToken,
("customChannels" .=) <$> _llccrCustomChannels])
-- | Representation of the AdSense code for a given ad unit.
--
-- /See:/ 'adUnitAdCode' smart constructor.
newtype AdUnitAdCode =
AdUnitAdCode'
{ _auacAdCode :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdUnitAdCode' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'auacAdCode'
adUnitAdCode
:: AdUnitAdCode
adUnitAdCode = AdUnitAdCode' {_auacAdCode = Nothing}
-- | Output only. The AdSense code snippet to add to the body of an HTML
-- page.
auacAdCode :: Lens' AdUnitAdCode (Maybe Text)
auacAdCode
= lens _auacAdCode (\ s a -> s{_auacAdCode = a})
instance FromJSON AdUnitAdCode where
parseJSON
= withObject "AdUnitAdCode"
(\ o -> AdUnitAdCode' <$> (o .:? "adCode"))
instance ToJSON AdUnitAdCode where
toJSON AdUnitAdCode'{..}
= object (catMaybes [("adCode" .=) <$> _auacAdCode])
-- | Represents a time zone from the [IANA Time Zone
-- Database](https:\/\/www.iana.org\/time-zones).
--
-- /See:/ 'timeZone' smart constructor.
data TimeZone =
TimeZone'
{ _tzVersion :: !(Maybe Text)
, _tzId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TimeZone' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tzVersion'
--
-- * 'tzId'
timeZone
:: TimeZone
timeZone = TimeZone' {_tzVersion = Nothing, _tzId = Nothing}
-- | Optional. IANA Time Zone Database version number, e.g. \"2019a\".
tzVersion :: Lens' TimeZone (Maybe Text)
tzVersion
= lens _tzVersion (\ s a -> s{_tzVersion = a})
-- | IANA Time Zone Database time zone, e.g. \"America\/New_York\".
tzId :: Lens' TimeZone (Maybe Text)
tzId = lens _tzId (\ s a -> s{_tzId = a})
instance FromJSON TimeZone where
parseJSON
= withObject "TimeZone"
(\ o ->
TimeZone' <$> (o .:? "version") <*> (o .:? "id"))
instance ToJSON TimeZone where
toJSON TimeZone'{..}
= object
(catMaybes
[("version" .=) <$> _tzVersion, ("id" .=) <$> _tzId])
-- | Representation of a Site.
--
-- /See:/ 'site' smart constructor.
data Site =
Site'
{ _sAutoAdsEnabled :: !(Maybe Bool)
, _sState :: !(Maybe SiteState)
, _sDomain :: !(Maybe Text)
, _sReportingDimensionId :: !(Maybe Text)
, _sName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Site' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sAutoAdsEnabled'
--
-- * 'sState'
--
-- * 'sDomain'
--
-- * 'sReportingDimensionId'
--
-- * 'sName'
site
:: Site
site =
Site'
{ _sAutoAdsEnabled = Nothing
, _sState = Nothing
, _sDomain = Nothing
, _sReportingDimensionId = Nothing
, _sName = Nothing
}
-- | Whether auto ads is turned on for the site.
sAutoAdsEnabled :: Lens' Site (Maybe Bool)
sAutoAdsEnabled
= lens _sAutoAdsEnabled
(\ s a -> s{_sAutoAdsEnabled = a})
-- | Output only. State of a site.
sState :: Lens' Site (Maybe SiteState)
sState = lens _sState (\ s a -> s{_sState = a})
-- | Domain (or subdomain) of the site, e.g. \"example.com\" or
-- \"www.example.com\". This is used in the \`OWNED_SITE_DOMAIN_NAME\`
-- reporting dimension.
sDomain :: Lens' Site (Maybe Text)
sDomain = lens _sDomain (\ s a -> s{_sDomain = a})
-- | Output only. Unique ID of the site as used in the \`OWNED_SITE_ID\`
-- reporting dimension.
sReportingDimensionId :: Lens' Site (Maybe Text)
sReportingDimensionId
= lens _sReportingDimensionId
(\ s a -> s{_sReportingDimensionId = a})
-- | Resource name of a site. Format: accounts\/{account}\/sites\/{site}
sName :: Lens' Site (Maybe Text)
sName = lens _sName (\ s a -> s{_sName = a})
instance FromJSON Site where
parseJSON
= withObject "Site"
(\ o ->
Site' <$>
(o .:? "autoAdsEnabled") <*> (o .:? "state") <*>
(o .:? "domain")
<*> (o .:? "reportingDimensionId")
<*> (o .:? "name"))
instance ToJSON Site where
toJSON Site'{..}
= object
(catMaybes
[("autoAdsEnabled" .=) <$> _sAutoAdsEnabled,
("state" .=) <$> _sState, ("domain" .=) <$> _sDomain,
("reportingDimensionId" .=) <$>
_sReportingDimensionId,
("name" .=) <$> _sName])
-- | Response definition for the ad client list rpc.
--
-- /See:/ 'listAdClientsResponse' smart constructor.
data ListAdClientsResponse =
ListAdClientsResponse'
{ _lacrNextPageToken :: !(Maybe Text)
, _lacrAdClients :: !(Maybe [AdClient])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListAdClientsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lacrNextPageToken'
--
-- * 'lacrAdClients'
listAdClientsResponse
:: ListAdClientsResponse
listAdClientsResponse =
ListAdClientsResponse'
{_lacrNextPageToken = Nothing, _lacrAdClients = Nothing}
-- | Continuation token used to page through ad clients. To retrieve the next
-- page of the results, set the next request\'s \"page_token\" value to
-- this.
lacrNextPageToken :: Lens' ListAdClientsResponse (Maybe Text)
lacrNextPageToken
= lens _lacrNextPageToken
(\ s a -> s{_lacrNextPageToken = a})
-- | The ad clients returned in this list response.
lacrAdClients :: Lens' ListAdClientsResponse [AdClient]
lacrAdClients
= lens _lacrAdClients
(\ s a -> s{_lacrAdClients = a})
. _Default
. _Coerce
instance FromJSON ListAdClientsResponse where
parseJSON
= withObject "ListAdClientsResponse"
(\ o ->
ListAdClientsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "adClients" .!= mempty))
instance ToJSON ListAdClientsResponse where
toJSON ListAdClientsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lacrNextPageToken,
("adClients" .=) <$> _lacrAdClients])
-- | Response definition for the sites list rpc.
--
-- /See:/ 'listSitesResponse' smart constructor.
data ListSitesResponse =
ListSitesResponse'
{ _lsrNextPageToken :: !(Maybe Text)
, _lsrSites :: !(Maybe [Site])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListSitesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsrNextPageToken'
--
-- * 'lsrSites'
listSitesResponse
:: ListSitesResponse
listSitesResponse =
ListSitesResponse' {_lsrNextPageToken = Nothing, _lsrSites = Nothing}
-- | Continuation token used to page through sites. To retrieve the next page
-- of the results, set the next request\'s \"page_token\" value to this.
lsrNextPageToken :: Lens' ListSitesResponse (Maybe Text)
lsrNextPageToken
= lens _lsrNextPageToken
(\ s a -> s{_lsrNextPageToken = a})
-- | The sites returned in this list response.
lsrSites :: Lens' ListSitesResponse [Site]
lsrSites
= lens _lsrSites (\ s a -> s{_lsrSites = a}) .
_Default
. _Coerce
instance FromJSON ListSitesResponse where
parseJSON
= withObject "ListSitesResponse"
(\ o ->
ListSitesResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "sites" .!= mempty))
instance ToJSON ListSitesResponse where
toJSON ListSitesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lsrNextPageToken,
("sites" .=) <$> _lsrSites])
| brendanhay/gogol | gogol-adsense/gen/Network/Google/AdSense/Types/Product.hs | mpl-2.0 | 59,355 | 0 | 18 | 14,633 | 11,198 | 6,445 | 4,753 | 1,230 | 1 |
-----------------------------------------------------------------------------------------
{-| Module : Frame
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Frame utility functions.
-}
-----------------------------------------------------------------------------------------
module Graphics.UI.WXCore.Frame
( -- * Frame
frameCreateTopFrame
, frameCreateDefault
, frameSetTopFrame
, frameDefaultStyle
, frameCenter
, frameCenterHorizontal
, frameCenterVertical
-- * Window
, windowGetRootParent
, windowGetFrameParent
, windowGetMousePosition
, windowGetScreenPosition
, windowChildren
-- * Dialog
, dialogDefaultStyle
-- * Status bar
, statusBarCreateFields
) where
import Data.Bits
import Foreign.Marshal.Array
import Graphics.UI.WXCore.WxcTypes
import Graphics.UI.WXCore.WxcDefs
import Graphics.UI.WXCore.WxcClassInfo
import Graphics.UI.WXCore.WxcClasses
import Graphics.UI.WXCore.WxcClassTypes
import Graphics.UI.WXCore.Types
-- | The default frame style for a normal top-level 'Frame'.
frameDefaultStyle :: Int
frameDefaultStyle
= wxDEFAULT_FRAME_STYLE .|. wxCLIP_CHILDREN -- .|. wxNO_FULL_REPAINT_ON_RESIZE
-- | The default frame style for a normal 'Dialog'.
dialogDefaultStyle :: Int
dialogDefaultStyle
= wxCAPTION .|. wxSYSTEM_MENU .|. wxTAB_TRAVERSAL .|. wxCLOSE_BOX .|. wxCLIP_CHILDREN
-- .|. wxNO_FULL_REPAINT_ON_RESIZE
------------------------------------------------------------------------------------------
--
------------------------------------------------------------------------------------------
-- | Create a default frame and make it the top-level window.
frameCreateTopFrame :: String -> IO (Frame ())
frameCreateTopFrame title
= do frame <- frameCreateDefault title
frameSetTopFrame frame
return frame
-- | Set the top-level frame (calls 'cAppSetTopWindow').
frameSetTopFrame :: Frame a -> IO ()
frameSetTopFrame frame
= wxcAppSetTopWindow frame
-- | Create a frame with default settings.
frameCreateDefault :: String -> IO (Frame ())
frameCreateDefault title
= frameCreate objectNull idAny title rectNull frameDefaultStyle
-- | Center the frame on the screen.
frameCenter :: Frame a -> IO ()
frameCenter f
= frameCentre f wxBOTH
-- | Center the frame horizontally on the screen.
frameCenterHorizontal :: Frame a -> IO ()
frameCenterHorizontal f
= frameCentre f wxHORIZONTAL
-- | Center the frame vertically on the screen.
frameCenterVertical :: Frame a -> IO ()
frameCenterVertical f
= frameCentre f wxVERTICAL
------------------------------------------------------------------------------------------
-- Window
------------------------------------------------------------------------------------------
-- | The parent frame or dialog of a widget.
windowGetFrameParent :: Window a -> IO (Window ())
windowGetFrameParent w
= if (instanceOf w classFrame || instanceOf w classDialog)
then return (downcastWindow w)
else do p <- windowGetParent w
if (objectIsNull p)
then return (downcastWindow w)
else windowGetFrameParent p
-- | The ultimate root parent of the widget.
windowGetRootParent :: Window a -> IO (Window ())
windowGetRootParent w
= do p <- windowGetParent w
if (objectIsNull p)
then return (downcastWindow w)
else windowGetRootParent p
-- | Retrieve the current mouse position relative to the window position.
windowGetMousePosition :: Window a -> IO Point
windowGetMousePosition w
= do p <- wxcGetMousePosition
windowScreenToClient2 w p
-- | Get the window position relative to the origin of the display.
windowGetScreenPosition :: Window a -> IO Point
windowGetScreenPosition w
= windowClientToScreen w pointZero
-- | Get the children of a window
windowChildren :: Window a -> IO [Window ()]
windowChildren w
= do count <- windowGetChildren w ptrNull 0
if count <= 0
then return []
else withArray (replicate count ptrNull) $ \ptrs ->
do windowGetChildren w ptrs count
ps <- peekArray count ptrs
return (map objectFromPtr ps)
------------------------------------------------------------------------------------------
-- Statusbar
------------------------------------------------------------------------------------------
statusBarCreateFields :: Frame a -> [Int] -> IO (StatusBar ())
statusBarCreateFields parent widths
= do pst <- windowGetWindowStyleFlag parent
let st = if (bitsSet wxRESIZE_BORDER pst) then wxST_SIZEGRIP else 0
sb <- frameCreateStatusBar parent (length widths) st
let len = length widths
if (len <= 1)
then return sb
else do withArray (map toCInt widths) (\pwidths -> statusBarSetStatusWidths sb (length widths) pwidths)
return sb | thielema/wxhaskell | wxcore/src/haskell/Graphics/UI/WXCore/Frame.hs | lgpl-2.1 | 5,065 | 0 | 15 | 1,023 | 915 | 472 | 443 | 90 | 3 |
{- |
Module : $Header$
Description : Various utility functions
License : PublicDomain
-}
module Utils where
import Data.Digits(digits)
import Data.List(maximumBy)
import Data.Ord(comparing)
digitSum :: Integral n => n -> n
digitSum = sum . digits 10
digitLength :: Integral n => n -> Int
digitLength = length . digits 10
tripletSum :: Integral int => (int,int,int) -> int
tripletSum (a,b,c) = a+b+c
tripletProduct :: Integral int => (int,int,int) -> int
tripletProduct (a,b,c) = a*b*c
isPalindrome :: Show a => a -> Bool
isPalindrome a = (show a) == reverse (show a)
maxIndex :: Ord a => [a] -> Int
maxIndex = fst . maximumBy (comparing snd) . zip [0..]
isPythagoran :: Integral int => (int,int,int) -> Bool
isPythagoran (a,b,c) = a^2 + b^2 == c^2
| jhnesk/project-euler | src/Utils.hs | unlicense | 767 | 0 | 9 | 145 | 343 | 185 | 158 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StaticPointers #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE BangPatterns #-}
module Driver (
tests
) where
import Test.Framework (Test, testGroup)
import Test.HUnit hiding (Test)
import Test.Framework.Providers.HUnit (testCase)
import CodeWorld
import CodeWorld.Driver
import CodeWorld.Event
import CodeWorld.CanvasM
import System.Mem.StableName
import Control.Concurrent
import GHC.Prim
tests :: Test
tests = testGroup "Driver"
[ testCase "toState preserves identity" $ do
let wrapped = wrappedInitial 42
let target = toState id wrapped
assertBool "" $ identical wrapped target
, testCase "wrappedStep preserves identity" $ do
-- Expected failure: See https://github.com/google/codeworld/issues/681
let wrapped = wrappedInitial 42
let target = wrappedStep (const id) 1 wrapped
assertBool "" $ not $ identical wrapped target
, testCase "wrapping of shared identity is shared (events)" $ do
-- Expected failure: See https://github.com/google/codeworld/issues/681
let wrapped = wrappedInitial 42
let target = wrappedEvent (const []) (const id) (const id) (TimePassing 0) wrapped
assertBool "" $ not $ identical wrapped target
]
| pranjaltale16/codeworld | codeworld-api/test/Driver.hs | apache-2.0 | 1,290 | 0 | 16 | 271 | 301 | 154 | 147 | 30 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- :script test/Spark/Core/Internal/PathsSpec.hs
module Spark.Core.Internal.PathsSpec where
import Test.Hspec
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.ByteString.Char8 as C8
import qualified Data.Text as T
import Spark.Core.StructuresInternal
import Spark.Core.Functions
import Spark.Core.Dataset
import Spark.Core.Internal.Paths
import Spark.Core.Internal.DAGStructures
import Spark.Core.Internal.DAGFunctions
import Spark.Core.Internal.ComputeDag
import Spark.Core.Internal.PathsUntyped
import Spark.Core.Internal.Utilities
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.DatasetStructures
data MyV = MyV {
mvId :: VertexId,
mvLogical :: [MyV],
mvParents :: [MyV]
} deriving (Eq)
instance Show MyV where
show v = "MyV(" ++ (C8.unpack . unVertexId . mvId $ v) ++ ")"
assignPaths :: UntypedNode -> [UntypedNode]
assignPaths n =
let cgt = buildCGraph n :: DagTry (ComputeDag UntypedNode NodeEdge)
cg = forceRight cgt
acgt = assignPathsUntyped cg
ncg = forceRight acgt
in graphDataLexico . tieNodes $ ncg
instance GraphVertexOperations MyV where
vertexToId = mvId
expandVertexAsVertices = mvParents
myv :: String -> [MyV] -> [MyV] -> MyV
myv s logical inner = MyV (VertexId (C8.pack s)) logical inner
myvToVertex :: MyV -> Vertex MyV
myvToVertex x = Vertex (mvId x) x
buildScopes :: [MyV] -> Scopes
buildScopes l = iGetScopes0 l' fun where
l' = myvToVertex <$> l
fun vx = ParentSplit {
psLogical = myvToVertex <$> (mvLogical . vertexData $ vx),
psInner = myvToVertex <$> (mvParents . vertexData $ vx) }
simple :: [(Maybe String, [String])] -> Scopes
simple [] = M.empty
simple ((ms, ss) : t) =
let
key = VertexId . C8.pack <$> ms
vals = VertexId . C8.pack <$> ss
new = M.singleton key (S.fromList vals)
in mergeScopes new (simple t)
gatherings :: [(String, [[String]])] -> M.Map VertexId [[VertexId]]
gatherings [] = M.empty
gatherings ((key, paths) : t) =
let
k = VertexId . C8.pack $ key
ps = (VertexId . C8.pack <$>) <$> paths
new = M.singleton k ps
in M.unionWith (++) new (gatherings t)
gatherPaths' :: [MyV] -> M.Map VertexId [[VertexId]]
gatherPaths' = gatherPaths . buildScopes
spec :: Spec
spec = do
describe "Tests on paths" $ do
it "nothing" $ do
buildScopes [] `shouldBe` simple []
it "no parent" $ do
let v0 = myv "v0" [] []
let res = [ (Nothing, ["v0"]), (Just "v0", []) ]
buildScopes [v0] `shouldBe` simple res
it "one logical parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [v0] []
let res = [ (Nothing, ["v0", "v1"])
, (Just "v1", [])
, (Just "v0", []) ]
buildScopes [v1, v0] `shouldBe` simple res
it "one inner parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [] [v0]
let res = [ (Nothing, ["v1"])
, (Just "v1", ["v0"])
, (Just "v0", []) ]
buildScopes [v1, v0] `shouldBe` simple res
it "logical scoping over a parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [v0] []
let v2 = myv "v2" [v0] [v1]
let res = [ (Nothing, ["v0", "v2"])
, (Just "v0", [])
, (Just "v1", [])
, (Just "v2", ["v1"]) ]
buildScopes [v2] `shouldBe` simple res
it "common ancestor" $ do
let top = myv "top" [] []
let inner = myv "inner" [top] []
let v1 = myv "v1" [top] [inner]
let v2 = myv "v2" [top] [inner]
let res = [ (Nothing, ["top", "v1", "v2"])
, (Just "inner", [])
, (Just "top", [])
, (Just "v1", ["inner"])
, (Just "v2", ["inner"]) ]
buildScopes [v1, v2] `shouldBe` simple res
it "common ancestor, unbalanced" $ do
let top = myv "top" [] []
let inner = myv "inner" [top] []
let v1 = myv "v1" [top] [inner]
let v2 = myv "v2" [] [inner]
let res = [ (Nothing, ["top", "v1", "v2"])
, (Just "inner", [])
, (Just "top", [])
, (Just "v1", ["inner"])
, (Just "v2", ["inner", "top"]) ]
buildScopes [v1, v2] `shouldBe` simple res
describe "Path gatherings" $ do
it "nothing" $ do
gatherPaths' [] `shouldBe` gatherings []
it "no parent" $ do
let v0 = myv "v0" [] []
let res = [("v0", [[]])]
gatherPaths' [v0] `shouldBe` gatherings res
it "one logical parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [v0] []
let res = [ ("v1", [[]])
, ("v0", [[]])]
gatherPaths' [v1] `shouldBe` gatherings res
it "one inner parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [] [v0]
let res = [ ("v1", [[]])
, ("v0", [["v1"]])]
gatherPaths' [v1] `shouldBe` gatherings res
it "logical scoping over a parent" $ do
let v0 = myv "v0" [] []
let v1 = myv "v1" [v0] []
let v2 = myv "v2" [v0] [v1]
let res = [ ("v0", [[]])
, ("v1", [["v2"]])
, ("v2", [[]]) ]
gatherPaths' [v2] `shouldBe` gatherings res
it "common ancestor" $ do
let top = myv "top" [] []
let inner = myv "inner" [top] []
let v1 = myv "v1" [top] [inner]
let v2 = myv "v2" [top] [inner]
let res = [ ("inner", [["v1"], ["v2"]])
, ("top", [[]])
, ("v1", [[]])
, ("v2", [[]]) ]
gatherPaths' [v1, v2] `shouldBe` gatherings res
describe "Real paths" $ do
it "simple test" $ do
let c0 = constant (1 :: Int) @@ "c0"
let c1 = identity c0 @@ "c1"
let c2 = identity c1 `logicalParents` [untyped c0] @@ "c2"
nodeId <$> nodeParents c1 `shouldBe` [nodeId c0]
nodeId <$> nodeParents c2 `shouldBe` [nodeId c1]
let withParents = T.unpack . catNodePath . nodePath <$> assignPaths (untyped c2)
withParents `shouldBe` ["c0", "c2/c1", "c2"]
it "simple test 2" $ do
let ds = dataset ([1 ,2, 3, 4]::[Int]) @@ "ds"
let c = count ds @@ "c"
let c2 = (c + (identity c @@ "id")) `logicalParents` [untyped ds] @@ "c2"
let withParents = T.unpack . catNodePath . nodePath <$> assignPaths (untyped c2)
withParents `shouldBe` ["ds", "c2/c","c2/id","c2"]
| krapsh/kraps-haskell | test/Spark/Core/Internal/PathsSpec.hs | apache-2.0 | 6,446 | 0 | 22 | 1,856 | 2,708 | 1,433 | 1,275 | 171 | 1 |
module NLP.TAG.Vanilla.SubtreeSharing.Tests where
import Control.Applicative ((<$>))
import qualified Data.Set as S
import Test.Tasty (TestTree, testGroup) -- , localOptions)
import Test.HUnit (Assertion, (@?=))
import Test.Tasty.HUnit (testCase)
import NLP.TAG.Vanilla.Tree (Tree (..), AuxTree (..))
import NLP.TAG.Vanilla.Earley.Basic (recognize, recognizeFrom)
import NLP.TAG.Vanilla.Rule (Rule)
import qualified NLP.TAG.Vanilla.Rule as R
import NLP.TAG.Vanilla.SubtreeSharing (compile)
---------------------------------------------------------------------
-- Prerequisites
---------------------------------------------------------------------
type Tr = Tree String String
type AuxTr = AuxTree String String
type Rl = Rule String String
---------------------------------------------------------------------
-- Grammar 1
---------------------------------------------------------------------
tree1 :: Tr
tree1 = INode "S"
[ abc
, INode "D"
[ abc
, INode "E" [] ]
]
where
abc = INode "A"
[ INode "B" []
, INode "C" [] ]
tree2 :: Tr
tree2 = INode "S"
[ INode "D"
[ abc
, INode "E" [] ]
, abc
]
where
abc = INode "A"
[ INode "B" []
, INode "C" [] ]
tree3 :: Tr
tree3 = INode "D"
[ abc
, INode "E" [] ]
where
abc = INode "A"
[ INode "B" []
, INode "C" [] ]
mkGram1 :: IO (S.Set Rl)
mkGram1 = compile (map Left [tree1, tree2, tree3])
---------------------------------------------------------------------
-- Grammar 2
---------------------------------------------------------------------
aux1 :: AuxTr
aux1 = AuxTree (INode "A"
[ INode "B" []
, INode "C"
[ INode "A" []
, INode "D" [] ]
]) [1, 0]
aux2 :: AuxTr
aux2 = AuxTree (INode "A"
[ INode "C"
[ INode "A" []
, INode "D" [] ]
, INode "B" []
]) [0, 0]
aux3 :: Tr
aux3 = INode "A"
[ INode "B" []
, INode "C"
[ INode "A" []
, INode "D" [] ]
]
-- | Note: tree identical to `aux3`!
aux4 :: Tr
aux4 = INode "A"
[ INode "B" []
, INode "C"
[ INode "A" []
, INode "D" [] ]
]
mkGram2 :: IO (S.Set Rl)
mkGram2 = compile $
(map Left [aux3, aux4]) ++
(map Right [aux1, aux2])
---------------------------------------------------------------------
-- Tests
---------------------------------------------------------------------
tests :: TestTree
tests = testGroup "NLP.TAG.Vanilla.SubtreeSharing"
[ testCase "Subtree Sharing (Initial)" testShareInit
, testCase "Subtree Sharing (Auxiliary)" testShareAux ]
testShareInit :: Assertion
testShareInit = do
gram <- mkGram1
S.size gram @?= 5
testShareAux :: Assertion
testShareAux = do
gram <- mkGram2
S.size gram @?= 5
localTest :: Assertion
localTest = do
gram <- mkGram1
mapM_ print $ S.toList gram
-- ---------------------------------------------------------------------
-- -- Utils
-- ---------------------------------------------------------------------
--
--
-- (@@?=) :: (Show a, Eq a) => IO a -> a -> Assertion
-- mx @@?= y = do
-- x <- mx
-- x @?= y
| kawu/tag-vanilla | src/NLP/TAG/Vanilla/SubtreeSharing/Tests.hs | bsd-2-clause | 3,252 | 0 | 12 | 829 | 858 | 479 | 379 | 87 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Radiation.Parsers.Internal.CStyle where
import Data.Attoparsec.ByteString.Char8 as BP
import Data.Attoparsec.ByteString.Lazy as Lazy
import qualified Data.Attoparsec.ByteString as BB
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Char as C
import Control.Applicative
import Control.Monad
import Data.Monoid (mappend)
import My.Utils
import Debug.Trace
spaced :: Parser BS.ByteString -> Parser BS.ByteString
spaced p = skipSpace *> p <* skipSpace
attribute :: Parser BS.ByteString
attribute = do
string "__attribute__"
skipSpace
balancedParens
removePattern :: Parser BS.ByteString -> Parser BS.ByteString
removePattern pattern = BS.concat <$> many ((pattern >> return BS.empty) <|>
(BS.singleton <$> BB.anyWord8))
subparse :: Parser a -> BS.ByteString -> Parser a
subparse myParser bs =
case parseOnly myParser bs of
Left err -> fail err
Right map -> return map
(+>) :: Parser BS.ByteString -> Parser BS.ByteString -> Parser BS.ByteString
(+>) p1 p2 = BS.append <$> p1 <*> p2
{- Take an identifier from the parser -}
identifier :: Parser BS.ByteString
identifier = skipSpace *> BP.takeWhile1 isIdentifierChar <* skipSpace
notIdentifier :: Parser BS.ByteString
notIdentifier = skipSpace *> BP.takeWhile1 (\c -> not (isIdentifierChar c || isSpace c)) <* skipSpace
isIdentifierChar :: Char -> Bool
isIdentifierChar ch = C.isDigit ch || C.isAlpha ch || ch == '_'
between :: Char -> Char -> Parser BS.ByteString
between open close = skipSpace *> char open *> (between open close <|> BP.takeWhile sat) <* char close
where sat ch = ch /= open && ch /= close
nextToken :: Parser BS.ByteString
nextToken = identifier <|> notIdentifier
token :: BS.ByteString -> Parser BS.ByteString
token str = do
tkn <- nextToken
if tkn == str then return str else fail "Could not match token"
balanced :: Char -> Char -> Parser BS.ByteString
balanced c1 c2 =
let
looseBalanced :: BS.ByteString -> Int -> Parser BS.ByteString
looseBalanced cur 0 = return cur
looseBalanced cur n = do
rest <- BP.takeWhile (\ch -> ch /= c1 && ch /= c2)
ch <- char c1 <|> char c2
let cur' = cur `mappend` rest `mappend` BSC.singleton ch
case () of
() | ch == c1 -> looseBalanced cur' (n + 1)
| ch == c2 -> looseBalanced cur' (n - 1)
| otherwise -> looseBalanced cur' n
in
BP.char c1 >> looseBalanced (BSC.singleton c1) 1
balancedParens :: Parser BS.ByteString
balancedParens = balanced '(' ')'
body :: Parser BS.ByteString
body = balanced '{' '}'
parens :: Parser BS.ByteString
parens = between '(' ')'
nextWord :: BS.ByteString -> Parser BS.ByteString
nextWord str = BP.takeWhile C.isSpace +> string str
primitive :: Parser BS.ByteString
primitive = skipSpace *> choice [integral, floating] -- >>= (\bs -> if BS.null bs then fail "" else return bs)
where integral = option "" (option "" (string "un") +> nextWord "signed") +> choice (map nextWord ["int","char"])
floating = string "float"
data CTypeHeader = CTypeHeader (Maybe (BS.ByteString,BS.ByteString))
{- Parses a type in C. Tries to do all possibilities, including
- anonymous structures, but alas it proves difficult -}
ctype :: Parser CTypeHeader
ctype = (<|>) (primitive $> CTypeHeader Nothing) $ do
typeoftype <- optional (choice $ fmap string ["struct","enum","union"])
id1 <- (body $> Nothing) <|> (Just <$> (identifier <* body))
identifier `mplus` (BSC.pack <$> many (skipSpace *> char '*' <* skipSpace))
return $ CTypeHeader ((,) <$> typeoftype <*> id1)
{- Parses C++ types. These inculde the apersaned for
- references -}
cpptype :: Parser BS.ByteString
cpptype = identifier `mplus` (BSC.pack <$> many (skipSpace *> (char '*' <|> char '&') <* skipSpace))
| jrahm/Radiation | src/Radiation/Parsers/Internal/CStyle.hs | bsd-2-clause | 4,032 | 0 | 17 | 874 | 1,299 | 667 | 632 | 81 | 2 |
-- | The Ox monad facilitates writing functional expressions over the
-- input sentence with arbitrary type of sentence token.
module Control.Monad.Ox
(
-- * Types
Ox
, Id
-- * Functions
, save
, saves
, when
, whenJT
, group
-- * Ox monad execution
, execOx
-- * Utilities
, atWith
, atsWith
) where
import Control.Applicative ((<$>), (<*), (*>))
import Control.Arrow (first)
import Control.Monad.State hiding (when)
import Control.Monad.Writer hiding (when)
import Data.Maybe (maybeToList)
import qualified Data.Vector as V
-- | Observation type identifier. It consists of a list of
-- integers, each integer representing a state of the Ox
-- monad on the particular level.
type Id = [Int]
-- | Increment the integer component of the top-most level.
inc :: Id -> Id
inc [] = error "incId: null id"
inc (x:xs) = x+1 : xs
-- | Push new value to the Id stack.
grow :: Id -> Id
grow xs = 1 : xs
-- | Pop value from the stack.
shrink :: Id -> Id
shrink [] = error "shrink: null id"
shrink (_:xs) = xs
-- | Set the top-most component to the given value.
getTop :: Id -> Int
getTop [] = error "getTop: null id"
getTop (x:_) = x
-- | Set the top-most component to the given value.
setTop :: Int -> Id -> Id
setTop _ [] = error "setTop: null id"
setTop x (_:xs) = x:xs
-- | The Ox is a monad stack with observation type identifier handled by
-- the state monad and the resulting observation values paired with identifiers
-- printed using the writer monad.
type Ox o a = WriterT [(Id, o)] (State Id) a
-- | Retrieve the current identifier value.
getId :: Ox o Id
getId = lift get
{-# INLINE getId #-}
-- | Set the new identifier value.
setId :: Id -> Ox o ()
setId = lift . put
{-# INLINE setId #-}
-- | Update the current identifier of the Ox monad.
updateId :: (Id -> Id) -> Ox o ()
updateId f = do
i <- getId
setId (f i)
-- | Increase the current identifier of the Ox monad.
incId :: Ox o ()
incId = updateId inc
-- | Perform the identifier-dependent action and increase the identifier.
withId :: (Id -> Ox o a) -> Ox o a
withId act = do
x <- act =<< getId
incId
return x
-- | Perform the Ox action on the lower level.
below :: Ox o a -> Ox o a
below act = updateId grow *> act <* updateId shrink
-- | Save observation values in the writer monad of the Ox stack.
saves :: [o] -> Ox o ()
saves xs = withId $ \i -> tell [(i, x) | x <- xs]
-- | Save the observation value.
save :: Maybe o -> Ox o ()
save = saves . maybeToList
-- | Perform the Ox action only when the 'cond' is True. It works like
-- the standard 'Control.Monad.when' function but also changes the current
-- identifier value.
when :: Bool -> Ox o a -> Ox o (Maybe a)
when cond act = do
x <- case cond of
False -> return Nothing
True -> Just <$> below act
incId
return x
-- | Perform the action only when the given condition is equal to Just True.
whenJT :: Maybe Bool -> Ox o a -> Ox o (Maybe a)
whenJT cond =
when (justTrue cond)
where
justTrue Nothing = False
justTrue (Just x) = x
-- | Make all embedded observations to be indistinguishable with respect
-- to their top-most identifier components.
-- TODO: Perhaps should set only the current level, not the deeper ones.
group :: Ox o a -> Ox o a
group act = do
i <- getId
let top = getTop i
x <- censor (map . first . setTop $ top) act
setId (inc i)
return x
-- | Execute the Ox monad and retrieve the saved (with the 'save' and
-- 'saves' functions) results.
execOx :: Ox o a -> [(Id, o)]
execOx ox =
(map (first reverse) . fst)
(runState (execWriterT ox) [1])
------------------------------
-- Utilities
------------------------------
-- | Value of the 't -> a' function with respect to the given sentence
-- and sentence position. Return Nothing if the position is out of
-- bounds.
atWith :: V.Vector a -> (a -> b) -> Int -> Maybe b
atWith xs f k =
if k < 0 || k >= V.length xs
then Nothing
else Just $ f (xs V.! k)
-- | Value of the 't -> [a]' function with respect to the given sentence
-- and sentence position. Return empty list if the position is out of
-- bounds.
atsWith :: V.Vector a -> (a -> [b]) -> Int -> [b]
atsWith xs f k =
if k < 0 || k >= V.length xs
then []
else f (xs V.! k)
| kawu/monad-ox | Control/Monad/Ox.hs | bsd-2-clause | 4,293 | 0 | 12 | 1,015 | 1,161 | 622 | 539 | 90 | 2 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
module Tap
( Tap (..)
, TapRoute (..)
, resourcesTap
, Handler
, Widget
, module Yesod.Core
, module Settings
, StaticRoute (..)
, lift
, liftIO
) where
import Tap.Redis
import Yesod.Core
import Yesod.Helpers.Static
import qualified Settings
import System.Directory
import qualified Data.ByteString.Lazy as L
import Settings (hamletFile, cassiusFile, luciusFile, juliusFile, widgetFile)
import StaticFiles
import Control.Monad (unless)
import Control.Monad.Trans.Class (lift)
import Control.Monad.IO.Class (liftIO)
import qualified Data.Text as T
import qualified Data.Sequence as Seq
import qualified Data.ByteString as B
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data Tap = Tap
{ getStatic :: Static -- ^ Settings for static file serving.
, getMessages :: AtomicMessageStore
}
-- | A useful synonym; most of the handler functions in your application
-- will need to be of this type.
type Handler = GHandler Tap Tap
-- | A useful synonym; most of the widgets functions in your application
-- will need to be of this type.
type Widget = GWidget Tap Tap
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://docs.yesodweb.com/book/web-routes-quasi/
--
-- This function does three things:
--
-- * Creates the route datatype TapRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route Tap = TapRoute
-- * Creates the value resourcesTap which contains information on the
-- resources declared below. This is used in Controller.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- Tap. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the TapRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "Tap" $(parseRoutesFile "config/routes")
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod Tap where
approot _ = Settings.approot
defaultLayout widget = do
mmsg <- getMessage
pc <- widgetToPageContent $ do
widget
addCassius $(Settings.cassiusFile "default-layout")
hamletToRepHtml $(Settings.hamletFile "default-layout")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticroot setting in Settings.hs
urlRenderOverride a (StaticR s) =
Just $ uncurry (joinPath a Settings.staticroot) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext' _ content = do
let fn = base64md5 content ++ '.' : T.unpack ext'
let statictmp = Settings.staticdir ++ "/tmp/"
liftIO $ createDirectoryIfMissing True statictmp
let fn' = statictmp ++ fn
exists <- liftIO $ doesFileExist fn'
unless exists $ liftIO $ L.writeFile fn' content
return $ Just $ Right (StaticR $ StaticRoute ["tmp", T.pack fn] [], [])
| KirinDave/redis-conduit | Tap.hs | bsd-2-clause | 3,858 | 0 | 16 | 816 | 546 | 313 | 233 | 52 | 0 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTextDocument_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:28
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QTextDocument_h (
QcreateObject_h(..)
) where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QTextDocument ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QTextDocument_unSetUserMethod" qtc_QTextDocument_unSetUserMethod :: Ptr (TQTextDocument a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QTextDocumentSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QTextDocument ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QTextDocumentSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QTextDocument ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QTextDocumentSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextDocument_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QTextDocument ()) (QTextDocument x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QTextDocument setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QTextDocument_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QTextDocument_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setUserMethod" qtc_QTextDocument_setUserMethod :: Ptr (TQTextDocument a) -> CInt -> Ptr (Ptr (TQTextDocument x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QTextDocument :: (Ptr (TQTextDocument x0) -> IO ()) -> IO (FunPtr (Ptr (TQTextDocument x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QTextDocument_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QTextDocumentSc a) (QTextDocument x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QTextDocument setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QTextDocument_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QTextDocument_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QTextDocument ()) (QTextDocument x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QTextDocument setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QTextDocument_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QTextDocument_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setUserMethodVariant" qtc_QTextDocument_setUserMethodVariant :: Ptr (TQTextDocument a) -> CInt -> Ptr (Ptr (TQTextDocument x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QTextDocument :: (Ptr (TQTextDocument x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQTextDocument x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QTextDocument_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QTextDocumentSc a) (QTextDocument x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QTextDocument setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QTextDocument_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QTextDocument_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QTextDocument ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QTextDocument_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QTextDocument_unSetHandler" qtc_QTextDocument_unSetHandler :: Ptr (TQTextDocument a) -> CWString -> IO (CBool)
instance QunSetHandler (QTextDocumentSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QTextDocument_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QTextDocument ()) (QTextDocument x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- qTextDocumentFromPtr x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setHandler1" qtc_QTextDocument_setHandler1 :: Ptr (TQTextDocument a) -> CWString -> Ptr (Ptr (TQTextDocument x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QTextDocument1 :: (Ptr (TQTextDocument x0) -> IO ()) -> IO (FunPtr (Ptr (TQTextDocument x0) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QTextDocument1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QTextDocumentSc a) (QTextDocument x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- qTextDocumentFromPtr x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qclear_h (QTextDocument ()) (()) where
clear_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextDocument_clear cobj_x0
foreign import ccall "qtc_QTextDocument_clear" qtc_QTextDocument_clear :: Ptr (TQTextDocument a) -> IO ()
instance Qclear_h (QTextDocumentSc a) (()) where
clear_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextDocument_clear cobj_x0
instance QsetHandler (QTextDocument ()) (QTextDocument x0 -> QTextFormat t1 -> IO (QObject t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQObject t0))
setHandlerWrapper x0 x1
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setHandler2" qtc_QTextDocument_setHandler2 :: Ptr (TQTextDocument a) -> CWString -> Ptr (Ptr (TQTextDocument x0) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQObject t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QTextDocument2 :: (Ptr (TQTextDocument x0) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQObject t0))) -> IO (FunPtr (Ptr (TQTextDocument x0) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQObject t0))))
foreign import ccall "wrapper" wrapSetHandler_QTextDocument2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QTextDocumentSc a) (QTextDocument x0 -> QTextFormat t1 -> IO (QObject t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQObject t0))
setHandlerWrapper x0 x1
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
class QcreateObject_h x0 x1 where
createObject_h :: x0 -> x1 -> IO (QTextObject ())
instance QcreateObject_h (QTextDocument ()) ((QTextFormat t1)) where
createObject_h x0 (x1)
= withQTextObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextDocument_createObject cobj_x0 cobj_x1
foreign import ccall "qtc_QTextDocument_createObject" qtc_QTextDocument_createObject :: Ptr (TQTextDocument a) -> Ptr (TQTextFormat t1) -> IO (Ptr (TQTextObject ()))
instance QcreateObject_h (QTextDocumentSc a) ((QTextFormat t1)) where
createObject_h x0 (x1)
= withQTextObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextDocument_createObject cobj_x0 cobj_x1
instance QsetHandler (QTextDocument ()) (QTextDocument x0 -> Int -> QUrl t2 -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1 x2
= do x0obj <- qTextDocumentFromPtr x0
let x1int = fromCInt x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1int x2obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setHandler3" qtc_QTextDocument_setHandler3 :: Ptr (TQTextDocument a) -> CWString -> Ptr (Ptr (TQTextDocument x0) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QTextDocument3 :: (Ptr (TQTextDocument x0) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant t0))) -> IO (FunPtr (Ptr (TQTextDocument x0) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant t0))))
foreign import ccall "wrapper" wrapSetHandler_QTextDocument3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QTextDocumentSc a) (QTextDocument x0 -> Int -> QUrl t2 -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1 x2
= do x0obj <- qTextDocumentFromPtr x0
let x1int = fromCInt x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1int x2obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QloadResource_h (QTextDocument ()) ((Int, QUrl t2)) where
loadResource_h x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextDocument_loadResource cobj_x0 (toCInt x1) cobj_x2
foreign import ccall "qtc_QTextDocument_loadResource" qtc_QTextDocument_loadResource :: Ptr (TQTextDocument a) -> CInt -> Ptr (TQUrl t2) -> IO (Ptr (TQVariant ()))
instance QloadResource_h (QTextDocumentSc a) ((Int, QUrl t2)) where
loadResource_h x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextDocument_loadResource cobj_x0 (toCInt x1) cobj_x2
instance QsetHandler (QTextDocument ()) (QTextDocument x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setHandler4" qtc_QTextDocument_setHandler4 :: Ptr (TQTextDocument a) -> CWString -> Ptr (Ptr (TQTextDocument x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QTextDocument4 :: (Ptr (TQTextDocument x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQTextDocument x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QTextDocument4_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QTextDocumentSc a) (QTextDocument x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QTextDocument ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextDocument_event cobj_x0 cobj_x1
foreign import ccall "qtc_QTextDocument_event" qtc_QTextDocument_event :: Ptr (TQTextDocument a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QTextDocumentSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextDocument_event cobj_x0 cobj_x1
instance QsetHandler (QTextDocument ()) (QTextDocument x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QTextDocument_setHandler5" qtc_QTextDocument_setHandler5 :: Ptr (TQTextDocument a) -> CWString -> Ptr (Ptr (TQTextDocument x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QTextDocument5 :: (Ptr (TQTextDocument x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQTextDocument x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QTextDocument5_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QTextDocumentSc a) (QTextDocument x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QTextDocument5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QTextDocument5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QTextDocument_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQTextDocument x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qTextDocumentFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QTextDocument ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextDocument_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QTextDocument_eventFilter" qtc_QTextDocument_eventFilter :: Ptr (TQTextDocument a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QTextDocumentSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextDocument_eventFilter cobj_x0 cobj_x1 cobj_x2
| keera-studios/hsQt | Qtc/Gui/QTextDocument_h.hs | bsd-2-clause | 28,767 | 0 | 18 | 6,549 | 9,473 | 4,506 | 4,967 | -1 | -1 |
module NametableSpec where
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Test.Test
import Grin.Nametable
import Grin.Pretty
runTests :: IO ()
runTests = hspec spec
spec :: Spec
spec = do
describe "Property" $ do
it "restore . convert == id" $ property $
forAll genProg $ \p ->
let p' = restore $ convert p
in (PP p') `shouldBe` (PP p)
| andorp/grin | grin/test/NametableSpec.hs | bsd-3-clause | 400 | 0 | 17 | 92 | 135 | 71 | 64 | 16 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module TitleScene (
titleScene
) where
import GameEngine.Scene
import GameEngine.Sprite
import GameEngine.Sprite.Label
import GameEngine.Sprite.Colored
import Control.Lens
import Control.Monad (when)
import Control.Monad.IO.Class
import Control.Monad.State
import Control.Monad.Except
import Data.Color.Names
import qualified Graphics.UI.GLFW as GLFW
import System.Random
data SceneState = SceneState { _title1 :: LabelSprite
, _title2 :: LabelSprite
, _enterKeyPressed :: Bool
}
makeLenses ''SceneState
titleScene :: Scene ()
titleScene = do
s <- liftIO $ initialSceneState
makeScene s sceneGen
initialSceneState :: IO SceneState
initialSceneState = do
freeSans <- loadFont "font/FreeSans.ttf"
return $ SceneState {
_title1 = configureSprite $ do
text .= "Snake Game Haskell"
color .= white
font .= freeSans
scale .= 2
position.x .= 120
position.y .= 240
, _title2 = configureSprite $ do
text .= "Press enter"
color .= white
font .= freeSans
scale .= 1.5
position.x .= 240
position.y .= 180
, _enterKeyPressed = False
}
sceneGen :: SceneGen SceneState ()
sceneGen = SceneGen { keyHandler = keyHandler'
, stepHandler = stepHandler'
, drawHandler = drawHandler'
}
keyHandler' :: GLFW.Key -> GLFW.KeyState -> GLFW.ModifierKeys -> StateT SceneState IO ()
keyHandler' key _ _ = case key of
GLFW.Key'Enter -> enterKeyPressed .= True
_ -> return ()
stepHandler' :: Double -> ExceptT () (StateT SceneState IO) ()
stepHandler' dt = do
gameStart <- use enterKeyPressed
when gameStart $ exitScene ()
drawHandler' :: (Int, Int) -> SceneState -> IO ()
drawHandler' (w, h) state = do
let draw = drawInWindow w h
draw $ state ^. title1
draw $ state ^. title2
| lotz84/SnakeGameHaskell | src/TitleScene.hs | bsd-3-clause | 2,381 | 0 | 14 | 925 | 551 | 288 | 263 | -1 | -1 |
import Language.Haskell.TH
putLeftRight :: Int -> ExpQ -> ExpQ
putLeftRight 0 ex = leftE `appE` ex
putLeftRight n ex = rightE `appE` putLeftRight (n - 1) ex
rightE, leftE :: ExpQ
rightE = conE $ mkName "Right"
leftE = conE $ mkName "Left"
reduce :: Either Int (Either String (Either Char ()))
-> Either Int (Either String Char)
reduce (Right (Right (Left c))) = Right $ Right c
reduce (Right (Left s)) = Right $ Left s
reduce (Left i) = Left i
data DotList x = x :-: (Either (DotList x) x) deriving Show
| YoshikuniJujo/papillon | test/templateHaskell/leftRight.hs | bsd-3-clause | 509 | 14 | 10 | 100 | 247 | 130 | 117 | 13 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Rules.Kind.Infer.SyntaxDirected (
IKSyntax
) where
import Control.Monad (unless)
import Control.Monad.Except (MonadError)
import Control.Monad.Error.Lens (throwing)
import Data.List.NonEmpty (NonEmpty(..))
import Ast.Error.Common
import Data.Functor.Rec
import Rules.Kind.Infer.Common
data IKSyntax
instance MkInferKind IKSyntax where
type MkInferKindConstraint e w s r m ki ty a IKSyntax =
( Eq a
, EqRec ki
, MonadError e m
, AsUnknownKindError e
, AsUnexpectedKind e ki a
, AsExpectedKindEq e ki a
, AsExpectedKindAllEq e ki a
)
type InferKindMonad m ki a IKSyntax =
m
type MkInferKindErrorList ki ty a IKSyntax =
'[]
type MkInferKindWarningList ki ty a IKSyntax =
'[]
mkCheckKind m ki ty a i =
mkCheckKind' i (expectKind m ki ty a i)
expectKind _ _ _ _ _ e@(ExpectedKind ki1) a@(ActualKind ki2) =
unless (ki1 == ki2) $
throwing _UnexpectedKind (e, a)
expectKindEq _ _ _ _ _ ki1 ki2 =
unless (ki1 == ki2) $
throwing _ExpectedKindEq (ki1, ki2)
expectKindAllEq _ _ _ _ _ (ki :| kis) = do
unless (all (== ki) kis) $
throwing _ExpectedKindAllEq (ki :| kis)
return ki
prepareInferKind pm pki pty pa pi ki =
let
i = mkInferKind . kriInferRules $ ki
c = mkCheckKind pm pki pty pa pi i
in
InferKindOutput i c
| dalaing/type-systems | src/Rules/Kind/Infer/SyntaxDirected.hs | bsd-3-clause | 1,701 | 0 | 12 | 398 | 500 | 269 | 231 | -1 | -1 |
{-# LANGUAGE GADTs, MultiParamTypeClasses, FlexibleInstances, Rank2Types, PolyKinds, UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Indexed.Free
-- Copyright : (C) 2013 Fumiaki Kinoshita
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Fumiaki Kinoshita <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Control.Monad.Indexed.Free (IxFree(..), hoistIxFree, module Control.Monad.Indexed.Free.Class) where
import Control.Applicative
import Control.Monad.Indexed
import Control.Monad.Indexed.Free.Class
data IxFree f i j x where
Pure :: a -> IxFree f i i a
Free :: f i j (IxFree f j k a) -> IxFree f i k a
instance IxFunctor f => IxFunctor (IxFree f) where
imap f (Pure a) = Pure (f a)
imap f (Free w) = Free (imap (imap f) w)
instance IxFunctor f => IxPointed (IxFree f) where
ireturn = Pure
instance IxFunctor f => IxApplicative (IxFree f) where
iap (Pure a) (Pure b) = Pure (a b)
iap (Pure a) (Free fb) = Free (imap a `imap` fb)
iap (Free fa) mb = Free $ imap (`iap` mb) fa
instance IxFunctor f => IxMonad (IxFree f) where
ibind k (Pure a) = k a
ibind k (Free fm) = Free $ imap (ibind k) fm
instance IxFunctor f => IxMonadFree f (IxFree f) where
iwrap = Free
instance IxFunctor f => Functor (IxFree f i i) where
fmap = imap
instance IxFunctor f => Applicative (IxFree f i i) where
pure = ireturn
(<*>) = iap
instance IxFunctor f => Monad (IxFree f i i) where
return = ireturn
(>>=) = (>>>=)
hoistIxFree :: (IxFunctor g, IxMonadFree g m) => (forall i j x. f i j x -> g i j x) -> IxFree f i j a -> m i j a
hoistIxFree _ (Pure a) = ireturn a
hoistIxFree f (Free fm) = iwrap $ imap (hoistIxFree f) $ f fm | fumieval/indexed-free | Control/Monad/Indexed/Free.hs | bsd-3-clause | 1,914 | 0 | 10 | 398 | 682 | 361 | 321 | 33 | 1 |
{-# LANGUAGE TypeFamilies, TypeOperators, TupleSections #-}
{-# OPTIONS_GHC -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : FunctorCombo.Holey
-- Copyright : (c) Conal Elliott 2010
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Filling and extracting derivatives (one-hole contexts)
-- Variation on Holey, integrating 'Der'
----------------------------------------------------------------------
module FunctorCombo.DHoley (Holey(..), fill) where
import Control.Arrow (first,second)
import FunctorCombo.Functor
{--------------------------------------------------------------------
Extraction
--------------------------------------------------------------------}
-- | Location, i.e., one-hole context and a value for the hole.
type Loc f a = (Der f a, a)
-- | Alternative interface for 'fillC'.
fill :: Holey f => Loc f a -> f a
fill = uncurry fillC
class Functor f => Holey f where
-- | Derivative, i.e., one-hole context
type Der f :: * -> *
-- | Fill a hole
fillC :: Der f a -> a -> f a
-- | All extractions
extract :: f a -> f (Loc f a)
-- The Functor constraint simplifies several signatures below.
instance Holey (Const x) where
type Der (Const x) = Void
fillC = voidF
extract (Const x) = Const x
instance Holey Id where
type Der Id = Unit
fillC (Const ()) = Id
extract (Id a) = Id (Const (), a)
instance (Holey f, Holey g) => Holey (f :+: g) where
type Der (f :+: g) = Der f :+: Der g
fillC (InL df) = InL . fillC df
fillC (InR df) = InR . fillC df
extract (InL fa) = InL ((fmap.first) InL (extract fa))
extract (InR ga) = InR ((fmap.first) InR (extract ga))
{-
InL fa :: (f :+: g) a
fa :: f a
extract fa :: f (Loc f a)
extract fa :: f (Der f a, a)
(fmap.first) InL (extract fa) :: f ((Der f :+: Der g) a, a)
(fmap.first) InL (extract fa) :: f ((Der (f :+: g) a), a)
InL ((fmap.first) InL (extract fa)) :: (f :+: g) ((Der (f :+: g) a), a)
-}
-- Der (f :*: g) = Der f :*: g :+: f :*: Der g
instance (Holey f, Holey g) => Holey (f :*: g) where
type Der (f :*: g) = Der f :*: g :+: f :*: Der g
fillC (InL (dfa :*: ga)) = (:*: ga) . fillC dfa
fillC (InR ( fa :*: dga)) = (fa :*:) . fillC dga
extract (fa :*: ga) = (fmap.first) (InL . (:*: ga)) (extract fa) :*:
(fmap.first) (InR . (fa :*:)) (extract ga)
{-
fa :*: ga :: (f :*: g) a
fa :: f a
extract fa :: f (Loc f a)
(fmap.first) (:*: ga) (extract fa) :: f ((Der f :*: g) a, a)
(fmap.first) (InL . (:*: ga)) (extract fa)
:: f (((Der f :*: g) :+: (f :*: Der g)) a, a)
(fmap.first) (InL . (:*: ga)) (extract fa) :: f ((Der (f :*: g)) a, a)
(fmap.first) (InR . (fa :*:)) (extract ga) :: g ((Der (f :*: g)) a, a)
(fmap.first) (InL . (:*: ga)) (extract fa) :*: (fmap.first) (InR . (fa :*:)) (extract ga)
:: (f :*: g) (Der (f :*: g) a, a)
-}
-- type instance Der (g :. f) = Der g :. f :*: Der f
{-
lassoc :: (p,(q,r)) -> ((p,q),r)
lassoc (p,(q,r)) = ((p,q),r)
squishP :: Functor f => (a, f b) -> f (a,b)
squishP (a,fb) = fmap (a,) fb
tweak1 :: Functor f => (dg (fa), f (dfa, a)) -> f ((dg (fa), dfa), a)
tweak1 = fmap lassoc . squishP
chainRule :: (dg (f a), df a) -> ((dg :. f) :*: df) a
chainRule (dgfa, dfa) = O dgfa :*: dfa
tweak2 :: Functor f => (dg (f a), f (df a, a)) -> f (((dg :. f) :*: df) a, a)
tweak2 = (fmap.first) chainRule . tweak1
-}
-- Sjoerd Visscher wrote <http://conal.net/blog/posts/another-angle-on-zippers/#comment-51328>:
-- At first it was a bit disappointing that extract is so complicated for
-- functor composition, but I played a bit with the code and tweak2 can be
-- simplified (if I didn't make a mistake) to:
-- tweak2 (dgfa, fl) = (fmap.first) (O dgfa :*:) fl
-- It's interesting that (tweak2 . second extract) is very much like down!
-- Probably because Fix f is like repeated functor composition of f.
tweak2 :: Functor f => (dg (f a), f (df a, a)) -> f (((dg :. f) :*: df) a, a)
tweak2 (dgfa, fl) = (fmap.first) (O dgfa :*:) fl
-- And more specifically,
--
-- tweak2 :: Functor f => (Der g (f a), f (Loc f a)) -> f (((Der g :. f) :*: Der f) a, a)
-- tweak2 :: Functor f => (Der g (f a), f (Loc f a)) -> f (Der (g :. f) a, a)
-- tweak2 :: Functor f => (Der g (f a), f (Loc f a)) -> f (Loc (g :. f) a)
{-
(dg fa, f (dfa,a))
f (dg fa, (df,a))
f ((dg fa, dfa), a)
-}
extractGF :: (Holey f, Holey g) =>
g (f a) -> g (f (Loc (g :. f) a))
extractGF = fmap (tweak2 . second extract) . extract
{-
gfa :: g (f a)
extract gfa :: g (Der g (f a), f a)
fmap (second extract) (extract gfa) :: g (Der g (f a), f (Loc f a))
fmap (tweak2 . second extract) (extract gfa)
:: g (f (Loc (g :. f)) a)
-}
-- Der (g :. f) = Der g :. f :*: Der f
instance (Holey f, Holey g) => Holey (g :. f) where
type Der (g :. f) = Der g :. f :*: Der f
fillC (O dgfa :*: dfa) = O. fillC dgfa . fillC dfa
extract = inO extractGF
-- extract (O gfa) = O (extractGF gfa)
{-
O dgfa :*: dfa :: Der (g :. f) a
O dgfa :*: dfa :: (Der g :. f :*: Der f) a
dgfa :: Der g (f a)
dfa :: Der f a
fillC dfa a :: f a
fillC dgfa (fillC dfa a) :: g (f a)
O (fillC dgfa (fillC dfa a)) :: (g :. f) a
-}
| conal/functor-combo | src/FunctorCombo/DHoley.hs | bsd-3-clause | 5,280 | 0 | 13 | 1,294 | 963 | 520 | 443 | 41 | 1 |
{-# LANGUAGE
OverloadedStrings
, FlexibleContexts
#-}
module Application where
import Template.Main (htmlLight, page)
import Application.Types (MonadApp)
import Network.Wai.Trans (ApplicationT, MiddlewareT)
import Network.Wai (pathInfo)
import Network.Wai.Middleware.ContentType
import Network.Wai.Middleware.Verbs (get)
import Network.HTTP.Types (status200, status404)
import Web.Page.Lucid (template)
import Web.Routes.Nested (RouterT, route, matchHere, match, matchAny, action)
import Lucid
server :: MonadApp m => MiddlewareT m
server = route routes
where
routes :: MonadApp m => RouterT (MiddlewareT m) sec m ()
routes = do
matchHere $ action $ get $ htmlLight status200 $ template page $ p_ "Sup!"
matchAny $ action $ get $ htmlLight status404 $ template page $ p_ "404 d:"
defApp :: MonadApp m => ApplicationT m
defApp _ respond = respond $ textOnly "404 d:" status404 []
| Debatable-Online/backend | src/Application.hs | bsd-3-clause | 917 | 0 | 13 | 156 | 286 | 156 | 130 | 22 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Handles @deriving@ clauses on @data@ declarations.
-}
{-# LANGUAGE CPP #-}
module TcDeriv ( tcDeriving, DerivInfo(..), mkDerivInfos ) where
#include "HsVersions.h"
import HsSyn
import DynFlags
import TcRnMonad
import FamInst
import TcErrors( reportAllUnsolved )
import TcValidity( validDerivPred )
import TcClassDcl( tcATDefault, tcMkDeclCtxt )
import TcEnv
import TcGenDeriv -- Deriv stuff
import TcGenGenerics
import InstEnv
import Inst
import FamInstEnv
import TcHsType
import TcMType
import TcSimplify
import TcUnify( buildImplicationFor )
import LoadIface( loadInterfaceForName )
import Module( getModule )
import RnNames( extendGlobalRdrEnvRn )
import RnBinds
import RnEnv
import RnSource ( addTcgDUs )
import HscTypes
import Avail
import Unify( tcUnifyTy )
import Class
import Type
import ErrUtils
import DataCon
import Maybes
import RdrName
import Name
import NameSet
import TyCon
import TcType
import Var
import VarEnv
import VarSet
import PrelNames
import THNames ( liftClassKey )
import SrcLoc
import Util
import Outputable
import FastString
import Bag
import Pair
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.List
{-
************************************************************************
* *
Overview
* *
************************************************************************
Overall plan
~~~~~~~~~~~~
1. Convert the decls (i.e. data/newtype deriving clauses,
plus standalone deriving) to [EarlyDerivSpec]
2. Infer the missing contexts for the InferTheta's
3. Add the derived bindings, generating InstInfos
-}
-- DerivSpec is purely local to this module
data DerivSpec theta = DS { ds_loc :: SrcSpan
, ds_name :: Name -- DFun name
, ds_tvs :: [TyVar]
, ds_theta :: theta
, ds_cls :: Class
, ds_tys :: [Type]
, ds_tc :: TyCon
, ds_overlap :: Maybe OverlapMode
, ds_newtype :: Maybe Type } -- The newtype rep type
-- This spec implies a dfun declaration of the form
-- df :: forall tvs. theta => C tys
-- The Name is the name for the DFun we'll build
-- The tyvars bind all the variables in the theta
-- For type families, the tycon in
-- in ds_tys is the *family* tycon
-- in ds_tc is the *representation* type
-- For non-family tycons, both are the same
-- the theta is either the given and final theta, in standalone deriving,
-- or the not-yet-simplified list of constraints together with their origin
-- ds_newtype = Just rep_ty <=> Generalised Newtype Deriving (GND)
-- Nothing <=> Vanilla deriving
{-
Example:
newtype instance T [a] = MkT (Tree a) deriving( C s )
==>
axiom T [a] = :RTList a
axiom :RTList a = Tree a
DS { ds_tvs = [a,s], ds_cls = C, ds_tys = [s, T [a]]
, ds_tc = :RTList, ds_newtype = Just (Tree a) }
-}
type DerivContext = Maybe ThetaType
-- Nothing <=> Vanilla deriving; infer the context of the instance decl
-- Just theta <=> Standalone deriving: context supplied by programmer
data PredOrigin = PredOrigin PredType CtOrigin TypeOrKind
type ThetaOrigin = [PredOrigin]
mkPredOrigin :: CtOrigin -> TypeOrKind -> PredType -> PredOrigin
mkPredOrigin origin t_or_k pred = PredOrigin pred origin t_or_k
mkThetaOrigin :: CtOrigin -> TypeOrKind -> ThetaType -> ThetaOrigin
mkThetaOrigin origin t_or_k = map (mkPredOrigin origin t_or_k)
data EarlyDerivSpec = InferTheta (DerivSpec ThetaOrigin)
| GivenTheta (DerivSpec ThetaType)
-- InferTheta ds => the context for the instance should be inferred
-- In this case ds_theta is the list of all the constraints
-- needed, such as (Eq [a], Eq a), together with a suitable CtLoc
-- to get good error messages.
-- The inference process is to reduce this to a
-- simpler form (e.g. Eq a)
--
-- GivenTheta ds => the exact context for the instance is supplied
-- by the programmer; it is ds_theta
-- See Note [Inferring the instance context]
earlyDSLoc :: EarlyDerivSpec -> SrcSpan
earlyDSLoc (InferTheta spec) = ds_loc spec
earlyDSLoc (GivenTheta spec) = ds_loc spec
splitEarlyDerivSpec :: [EarlyDerivSpec] -> ([DerivSpec ThetaOrigin], [DerivSpec ThetaType])
splitEarlyDerivSpec [] = ([],[])
splitEarlyDerivSpec (InferTheta spec : specs) =
case splitEarlyDerivSpec specs of (is, gs) -> (spec : is, gs)
splitEarlyDerivSpec (GivenTheta spec : specs) =
case splitEarlyDerivSpec specs of (is, gs) -> (is, spec : gs)
pprDerivSpec :: Outputable theta => DerivSpec theta -> SDoc
pprDerivSpec (DS { ds_loc = l, ds_name = n, ds_tvs = tvs,
ds_cls = c, ds_tys = tys, ds_theta = rhs })
= hang (text "DerivSpec")
2 (vcat [ text "ds_loc =" <+> ppr l
, text "ds_name =" <+> ppr n
, text "ds_tvs =" <+> ppr tvs
, text "ds_cls =" <+> ppr c
, text "ds_tys =" <+> ppr tys
, text "ds_theta =" <+> ppr rhs ])
instance Outputable theta => Outputable (DerivSpec theta) where
ppr = pprDerivSpec
instance Outputable EarlyDerivSpec where
ppr (InferTheta spec) = ppr spec <+> text "(Infer)"
ppr (GivenTheta spec) = ppr spec <+> text "(Given)"
instance Outputable PredOrigin where
ppr (PredOrigin ty _ _) = ppr ty -- The origin is not so interesting when debugging
{- Note [Inferring the instance context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are two sorts of 'deriving':
* InferTheta: the deriving clause for a data type
data T a = T1 a deriving( Eq )
Here we must infer an instance context,
and generate instance declaration
instance Eq a => Eq (T a) where ...
* CheckTheta: standalone deriving
deriving instance Eq a => Eq (T a)
Here we only need to fill in the bindings;
the instance context is user-supplied
For a deriving clause (InferTheta) we must figure out the
instance context (inferConstraints). Suppose we are inferring
the instance context for
C t1 .. tn (T s1 .. sm)
There are two cases
* (T s1 .. sm) :: * (the normal case)
Then we behave like Eq and guess (C t1 .. tn t)
for each data constructor arg of type t. More
details below.
* (T s1 .. sm) :: * -> * (the functor-like case)
Then we behave like Functor.
In both cases we produce a bunch of un-simplified constraints
and them simplify them in simplifyInstanceContexts; see
Note [Simplifying the instance context].
Note [Data decl contexts]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data (RealFloat a) => Complex a = !a :+ !a deriving( Read )
We will need an instance decl like:
instance (Read a, RealFloat a) => Read (Complex a) where
...
The RealFloat in the context is because the read method for Complex is bound
to construct a Complex, and doing that requires that the argument type is
in RealFloat.
But this ain't true for Show, Eq, Ord, etc, since they don't construct
a Complex; they only take them apart.
Our approach: identify the offending classes, and add the data type
context to the instance decl. The "offending classes" are
Read, Enum?
FURTHER NOTE ADDED March 2002. In fact, Haskell98 now requires that
pattern matching against a constructor from a data type with a context
gives rise to the constraints for that context -- or at least the thinned
version. So now all classes are "offending".
Note [Newtype deriving]
~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
class C a b
instance C [a] Char
newtype T = T Char deriving( C [a] )
Notice the free 'a' in the deriving. We have to fill this out to
newtype T = T Char deriving( forall a. C [a] )
And then translate it to:
instance C [a] Char => C [a] T where ...
Note [Newtype deriving superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(See also Trac #1220 for an interesting exchange on newtype
deriving and superclasses.)
The 'tys' here come from the partial application in the deriving
clause. The last arg is the new instance type.
We must pass the superclasses; the newtype might be an instance
of them in a different way than the representation type
E.g. newtype Foo a = Foo a deriving( Show, Num, Eq )
Then the Show instance is not done via Coercible; it shows
Foo 3 as "Foo 3"
The Num instance is derived via Coercible, but the Show superclass
dictionary must the Show instance for Foo, *not* the Show dictionary
gotten from the Num dictionary. So we must build a whole new dictionary
not just use the Num one. The instance we want is something like:
instance (Num a, Show (Foo a), Eq (Foo a)) => Num (Foo a) where
(+) = ((+)@a)
...etc...
There may be a coercion needed which we get from the tycon for the newtype
when the dict is constructed in TcInstDcls.tcInstDecl2
Note [Unused constructors and deriving clauses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See Trac #3221. Consider
data T = T1 | T2 deriving( Show )
Are T1 and T2 unused? Well, no: the deriving clause expands to mention
both of them. So we gather defs/uses from deriving just like anything else.
-}
-- | Stuff needed to process a `deriving` clause
data DerivInfo = DerivInfo { di_rep_tc :: TyCon
-- ^ The data tycon for normal datatypes,
-- or the *representation* tycon for data families
, di_preds :: [LHsSigType Name]
, di_ctxt :: SDoc -- ^ error context
}
-- | Extract `deriving` clauses of proper data type (skips data families)
mkDerivInfos :: [TyClGroup Name] -> TcM [DerivInfo]
mkDerivInfos tycls = concatMapM mk_derivs tycls
where
mk_derivs (TyClGroup { group_tyclds = decls })
= concatMapM (mk_deriv . unLoc) decls
mk_deriv decl@(DataDecl { tcdLName = L _ data_name
, tcdDataDefn =
HsDataDefn { dd_derivs = Just (L _ preds) } })
= do { tycon <- tcLookupTyCon data_name
; return [DerivInfo { di_rep_tc = tycon, di_preds = preds
, di_ctxt = tcMkDeclCtxt decl }] }
mk_deriv _ = return []
{-
************************************************************************
* *
\subsection[TcDeriv-driver]{Top-level function for \tr{derivings}}
* *
************************************************************************
-}
tcDeriving :: [DerivInfo] -- All `deriving` clauses
-> [LDerivDecl Name] -- All stand-alone deriving declarations
-> TcM (TcGblEnv, Bag (InstInfo Name), HsValBinds Name)
tcDeriving deriv_infos deriv_decls
= recoverM (do { g <- getGblEnv
; return (g, emptyBag, emptyValBindsOut)}) $
do { -- Fish the "deriving"-related information out of the TcEnv
-- And make the necessary "equations".
is_boot <- tcIsHsBootOrSig
; traceTc "tcDeriving" (ppr is_boot)
; early_specs <- makeDerivSpecs is_boot deriv_infos deriv_decls
; traceTc "tcDeriving 1" (ppr early_specs)
; let (infer_specs, given_specs) = splitEarlyDerivSpec early_specs
; insts1 <- mapM genInst given_specs
-- the stand-alone derived instances (@insts1@) are used when inferring
-- the contexts for "deriving" clauses' instances (@infer_specs@)
; final_specs <- extendLocalInstEnv (map (iSpec . fstOf3) insts1) $
simplifyInstanceContexts infer_specs
; insts2 <- mapM genInst final_specs
; let (inst_infos, deriv_stuff, maybe_fvs) = unzip3 (insts1 ++ insts2)
; loc <- getSrcSpanM
; let (binds, famInsts, extraInstances) =
genAuxBinds loc (unionManyBags deriv_stuff)
; dflags <- getDynFlags
; (inst_info, rn_binds, rn_dus) <-
renameDeriv is_boot (inst_infos ++ (bagToList extraInstances)) binds
; unless (isEmptyBag inst_info) $
liftIO (dumpIfSet_dyn dflags Opt_D_dump_deriv "Derived instances"
(ddump_deriving inst_info rn_binds famInsts))
; gbl_env <- tcExtendLocalFamInstEnv (bagToList famInsts) $
tcExtendLocalInstEnv (map iSpec (bagToList inst_info)) getGblEnv
; let all_dus = rn_dus `plusDU` usesOnly (mkFVs $ catMaybes maybe_fvs)
; return (addTcgDUs gbl_env all_dus, inst_info, rn_binds) }
where
ddump_deriving :: Bag (InstInfo Name) -> HsValBinds Name
-> Bag FamInst -- ^ Rep type family instances
-> SDoc
ddump_deriving inst_infos extra_binds repFamInsts
= hang (text "Derived instances:")
2 (vcat (map (\i -> pprInstInfoDetails i $$ text "") (bagToList inst_infos))
$$ ppr extra_binds)
$$ hangP "GHC.Generics representation types:"
(vcat (map pprRepTy (bagToList repFamInsts)))
hangP s x = text "" $$ hang (ptext (sLit s)) 2 x
-- Prints the representable type family instance
pprRepTy :: FamInst -> SDoc
pprRepTy fi@(FamInst { fi_tys = lhs })
= text "type" <+> ppr (mkTyConApp (famInstTyCon fi) lhs) <+>
equals <+> ppr rhs
where rhs = famInstRHS fi
renameDeriv :: Bool
-> [InstInfo RdrName]
-> Bag (LHsBind RdrName, LSig RdrName)
-> TcM (Bag (InstInfo Name), HsValBinds Name, DefUses)
renameDeriv is_boot inst_infos bagBinds
| is_boot -- If we are compiling a hs-boot file, don't generate any derived bindings
-- The inst-info bindings will all be empty, but it's easier to
-- just use rn_inst_info to change the type appropriately
= do { (rn_inst_infos, fvs) <- mapAndUnzipM rn_inst_info inst_infos
; return ( listToBag rn_inst_infos
, emptyValBindsOut, usesOnly (plusFVs fvs)) }
| otherwise
= discardWarnings $ -- Discard warnings about unused bindings etc
setXOptM LangExt.EmptyCase $ -- Derived decls (for empty types) can have
-- case x of {}
setXOptM LangExt.ScopedTypeVariables $ -- Derived decls (for newtype-deriving) can
setXOptM LangExt.KindSignatures $ -- used ScopedTypeVariables & KindSignatures
do {
-- Bring the extra deriving stuff into scope
-- before renaming the instances themselves
; traceTc "rnd" (vcat (map (\i -> pprInstInfoDetails i $$ text "") inst_infos))
; (aux_binds, aux_sigs) <- mapAndUnzipBagM return bagBinds
; let aux_val_binds = ValBindsIn aux_binds (bagToList aux_sigs)
; rn_aux_lhs <- rnTopBindsLHS emptyFsEnv aux_val_binds
; let bndrs = collectHsValBinders rn_aux_lhs
; envs <- extendGlobalRdrEnvRn (map avail bndrs) emptyFsEnv ;
; setEnvs envs $
do { (rn_aux, dus_aux) <- rnValBindsRHS (TopSigCtxt (mkNameSet bndrs)) rn_aux_lhs
; (rn_inst_infos, fvs_insts) <- mapAndUnzipM rn_inst_info inst_infos
; return (listToBag rn_inst_infos, rn_aux,
dus_aux `plusDU` usesOnly (plusFVs fvs_insts)) } }
where
rn_inst_info :: InstInfo RdrName -> TcM (InstInfo Name, FreeVars)
rn_inst_info
inst_info@(InstInfo { iSpec = inst
, iBinds = InstBindings
{ ib_binds = binds
, ib_tyvars = tyvars
, ib_pragmas = sigs
, ib_extensions = exts -- Only for type-checking
, ib_derived = sa } })
= ASSERT( null sigs )
bindLocalNamesFV tyvars $
do { (rn_binds,_, fvs) <- rnMethodBinds False (is_cls_nm inst) [] binds []
; let binds' = InstBindings { ib_binds = rn_binds
, ib_tyvars = tyvars
, ib_pragmas = []
, ib_extensions = exts
, ib_derived = sa }
; return (inst_info { iBinds = binds' }, fvs) }
{-
Note [Newtype deriving and unused constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (see Trac #1954):
module Bug(P) where
newtype P a = MkP (IO a) deriving Monad
If you compile with -Wunused-binds you do not expect the warning
"Defined but not used: data consructor MkP". Yet the newtype deriving
code does not explicitly mention MkP, but it should behave as if you
had written
instance Monad P where
return x = MkP (return x)
...etc...
So we want to signal a user of the data constructor 'MkP'.
This is the reason behind the (Maybe Name) part of the return type
of genInst.
Note [Why we don't pass rep_tc into deriveTyData]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Down in the bowels of mkEqnHelp, we need to convert the fam_tc back into
the rep_tc by means of a lookup. And yet we have the rep_tc right here!
Why look it up again? Answer: it's just easier this way.
We drop some number of arguments from the end of the datatype definition
in deriveTyData. The arguments are dropped from the fam_tc.
This action may drop a *different* number of arguments
passed to the rep_tc, depending on how many free variables, etc., the
dropped patterns have.
Also, this technique carries over the kind substitution from deriveTyData
nicely.
************************************************************************
* *
From HsSyn to DerivSpec
* *
************************************************************************
@makeDerivSpecs@ fishes around to find the info about needed derived instances.
-}
makeDerivSpecs :: Bool
-> [DerivInfo]
-> [LDerivDecl Name]
-> TcM [EarlyDerivSpec]
makeDerivSpecs is_boot deriv_infos deriv_decls
= do { eqns1 <- concatMapM (recoverM (return []) . deriveDerivInfo) deriv_infos
; eqns2 <- concatMapM (recoverM (return []) . deriveStandalone) deriv_decls
; let eqns = eqns1 ++ eqns2
; if is_boot then -- No 'deriving' at all in hs-boot files
do { unless (null eqns) (add_deriv_err (head eqns))
; return [] }
else return eqns }
where
add_deriv_err eqn
= setSrcSpan (earlyDSLoc eqn) $
addErr (hang (text "Deriving not permitted in hs-boot file")
2 (text "Use an instance declaration instead"))
------------------------------------------------------------------
-- | Process a `deriving` clause
deriveDerivInfo :: DerivInfo -> TcM [EarlyDerivSpec]
deriveDerivInfo (DerivInfo { di_rep_tc = rep_tc, di_preds = preds
, di_ctxt = err_ctxt })
= addErrCtxt err_ctxt $
concatMapM (deriveTyData tvs tc tys) preds
where
tvs = tyConTyVars rep_tc
(tc, tys) = case tyConFamInstSig_maybe rep_tc of
-- data family:
Just (fam_tc, pats, _) -> (fam_tc, pats)
-- NB: deriveTyData wants the *user-specified*
-- name. See Note [Why we don't pass rep_tc into deriveTyData]
_ -> (rep_tc, mkTyVarTys tvs) -- datatype
------------------------------------------------------------------
deriveStandalone :: LDerivDecl Name -> TcM [EarlyDerivSpec]
-- Standalone deriving declarations
-- e.g. deriving instance Show a => Show (T a)
-- Rather like tcLocalInstDecl
deriveStandalone (L loc (DerivDecl deriv_ty overlap_mode))
= setSrcSpan loc $
addErrCtxt (standaloneCtxt deriv_ty) $
do { traceTc "Standalone deriving decl for" (ppr deriv_ty)
; (tvs, theta, cls, inst_tys) <- tcHsClsInstType TcType.InstDeclCtxt deriv_ty
; traceTc "Standalone deriving;" $ vcat
[ text "tvs:" <+> ppr tvs
, text "theta:" <+> ppr theta
, text "cls:" <+> ppr cls
, text "tys:" <+> ppr inst_tys ]
-- C.f. TcInstDcls.tcLocalInstDecl1
; checkTc (not (null inst_tys)) derivingNullaryErr
; let cls_tys = take (length inst_tys - 1) inst_tys
inst_ty = last inst_tys
; traceTc "Standalone deriving:" $ vcat
[ text "class:" <+> ppr cls
, text "class types:" <+> ppr cls_tys
, text "type:" <+> ppr inst_ty ]
; case tcSplitTyConApp_maybe inst_ty of
Just (tc, tc_args)
| className cls == typeableClassName
-> do warnUselessTypeable
return []
| isAlgTyCon tc || isDataFamilyTyCon tc -- All other classes
-> do { spec <- mkEqnHelp (fmap unLoc overlap_mode)
tvs cls cls_tys tc tc_args
(Just theta)
; return [spec] }
_ -> -- Complain about functions, primitive types, etc,
failWithTc $ derivingThingErr False cls cls_tys inst_ty $
text "The last argument of the instance must be a data or newtype application"
}
warnUselessTypeable :: TcM ()
warnUselessTypeable
= do { warn <- woptM Opt_WarnDerivingTypeable
; when warn $ addWarnTc (Reason Opt_WarnDerivingTypeable)
$ text "Deriving" <+> quotes (ppr typeableClassName) <+>
text "has no effect: all types now auto-derive Typeable" }
------------------------------------------------------------------
deriveTyData :: [TyVar] -> TyCon -> [Type] -- LHS of data or data instance
-- Can be a data instance, hence [Type] args
-> LHsSigType Name -- The deriving predicate
-> TcM [EarlyDerivSpec]
-- The deriving clause of a data or newtype declaration
-- I.e. not standalone deriving
deriveTyData tvs tc tc_args deriv_pred
= setSrcSpan (getLoc (hsSigType deriv_pred)) $ -- Use loc of the 'deriving' item
do { (deriv_tvs, cls, cls_tys, cls_arg_kind)
<- tcExtendTyVarEnv tvs $
tcHsDeriv deriv_pred
-- Deriving preds may (now) mention
-- the type variables for the type constructor, hence tcExtendTyVarenv
-- The "deriv_pred" is a LHsType to take account of the fact that for
-- newtype deriving we allow deriving (forall a. C [a]).
-- Typeable is special, because Typeable :: forall k. k -> Constraint
-- so the argument kind 'k' is not decomposable by splitKindFunTys
-- as is the case for all other derivable type classes
; if className cls == typeableClassName
then do warnUselessTypeable
return []
else
do { -- Given data T a b c = ... deriving( C d ),
-- we want to drop type variables from T so that (C d (T a)) is well-kinded
let (arg_kinds, _) = splitFunTys cls_arg_kind
n_args_to_drop = length arg_kinds
n_args_to_keep = tyConArity tc - n_args_to_drop
(tc_args_to_keep, args_to_drop)
= splitAt n_args_to_keep tc_args
inst_ty_kind = typeKind (mkTyConApp tc tc_args_to_keep)
-- Use exactTyCoVarsOfTypes, not tyCoVarsOfTypes, so that we
-- don't mistakenly grab a type variable mentioned in a type
-- synonym that drops it.
-- See Note [Eta-reducing type synonyms].
dropped_tvs = exactTyCoVarsOfTypes args_to_drop
-- Match up the kinds, and apply the resulting kind substitution
-- to the types. See Note [Unify kinds in deriving]
-- We are assuming the tycon tyvars and the class tyvars are distinct
mb_match = tcUnifyTy inst_ty_kind cls_arg_kind
Just kind_subst = mb_match
all_tkvs = varSetElemsWellScoped $
mkVarSet deriv_tvs `unionVarSet`
tyCoVarsOfTypes tc_args_to_keep
unmapped_tkvs = filter (`notElemTCvSubst` kind_subst) all_tkvs
(subst, tkvs) = mapAccumL substTyVarBndr
kind_subst unmapped_tkvs
final_tc_args = substTys subst tc_args_to_keep
final_cls_tys = substTys subst cls_tys
; traceTc "derivTyData1" (vcat [ pprTvBndrs tvs, ppr tc, ppr tc_args, ppr deriv_pred
, pprTvBndrs (tyCoVarsOfTypesList tc_args)
, ppr n_args_to_keep, ppr n_args_to_drop
, ppr inst_ty_kind, ppr cls_arg_kind, ppr mb_match
, ppr final_tc_args, ppr final_cls_tys ])
-- Check that the result really is well-kinded
; checkTc (n_args_to_keep >= 0 && isJust mb_match)
(derivingKindErr tc cls cls_tys cls_arg_kind)
; traceTc "derivTyData2" (vcat [ ppr tkvs ])
; checkTc (allDistinctTyVars args_to_drop && -- (a) and (b)
not (any (`elemVarSet` dropped_tvs) tkvs)) -- (c)
(derivingEtaErr cls final_cls_tys (mkTyConApp tc final_tc_args))
-- Check that
-- (a) The args to drop are all type variables; eg reject:
-- data instance T a Int = .... deriving( Monad )
-- (b) The args to drop are all *distinct* type variables; eg reject:
-- class C (a :: * -> * -> *) where ...
-- data instance T a a = ... deriving( C )
-- (c) The type class args, or remaining tycon args,
-- do not mention any of the dropped type variables
-- newtype T a s = ... deriving( ST s )
-- newtype instance K a a = ... deriving( Monad )
; spec <- mkEqnHelp Nothing tkvs
cls final_cls_tys tc final_tc_args Nothing
; traceTc "derivTyData" (ppr spec)
; return [spec] } }
{-
Note [Unify kinds in deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #8534)
data T a b = MkT a deriving( Functor )
-- where Functor :: (*->*) -> Constraint
So T :: forall k. * -> k -> *. We want to get
instance Functor (T * (a:*)) where ...
Notice the '*' argument to T.
Moreover, as well as instantiating T's kind arguments, we may need to instantiate
C's kind args. Consider (Trac #8865):
newtype T a b = MkT (Either a b) deriving( Category )
where
Category :: forall k. (k -> k -> *) -> Constraint
We need to generate the instance
instance Category * (Either a) where ...
Notice the '*' argument to Category.
So we need to
* drop arguments from (T a b) to match the number of
arrows in the (last argument of the) class;
* and then *unify* kind of the remaining type against the
expected kind, to figure out how to instantiate C's and T's
kind arguments.
In the two examples,
* we unify kind-of( T k (a:k) ) ~ kind-of( Functor )
i.e. (k -> *) ~ (* -> *) to find k:=*.
yielding k:=*
* we unify kind-of( Either ) ~ kind-of( Category )
i.e. (* -> * -> *) ~ (k -> k -> k)
yielding k:=*
Now we get a kind substitution. We then need to:
1. Remove the substituted-out kind variables from the quantified kind vars
2. Apply the substitution to the kinds of quantified *type* vars
(and extend the substitution to reflect this change)
3. Apply that extended substitution to the non-dropped args (types and
kinds) of the type and class
Forgetting step (2) caused Trac #8893:
data V a = V [a] deriving Functor
data P (x::k->*) (a:k) = P (x a) deriving Functor
data C (x::k->*) (a:k) = C (V (P x a)) deriving Functor
When deriving Functor for P, we unify k to *, but we then want
an instance $df :: forall (x:*->*). Functor x => Functor (P * (x:*->*))
and similarly for C. Notice the modified kind of x, both at binding
and occurrence sites.
Note [Eta-reducing type synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One can instantiate a type in a data family instance with a type synonym that
mentions other type variables:
type Const a b = a
data family Fam (f :: * -> *) (a :: *)
newtype instance Fam f (Const a f) = Fam (f a) deriving Functor
With -XTypeInType, it is also possible to define kind synonyms, and they can
mention other types in a datatype declaration. For example,
type Const a b = a
newtype T f (a :: Const * f) = T (f a) deriving Functor
When deriving, we need to perform eta-reduction analysis to ensure that none of
the eta-reduced type variables are mentioned elsewhere in the declaration. But
we need to be careful, because if we don't expand through the Const type
synonym, we will mistakenly believe that f is an eta-reduced type variable and
fail to derive Functor, even though the code above is correct (see Trac #11416,
where this was first noticed).
For this reason, we call exactTyCoVarsOfTypes on the eta-reduced types so that
we only consider the type variables that remain after expanding through type
synonyms.
-}
mkEqnHelp :: Maybe OverlapMode
-> [TyVar]
-> Class -> [Type]
-> TyCon -> [Type]
-> DerivContext -- Just => context supplied (standalone deriving)
-- Nothing => context inferred (deriving on data decl)
-> TcRn EarlyDerivSpec
-- Make the EarlyDerivSpec for an instance
-- forall tvs. theta => cls (tys ++ [ty])
-- where the 'theta' is optional (that's the Maybe part)
-- Assumes that this declaration is well-kinded
mkEqnHelp overlap_mode tvs cls cls_tys tycon tc_args mtheta
= do { -- Find the instance of a data family
-- Note [Looking up family instances for deriving]
fam_envs <- tcGetFamInstEnvs
; let (rep_tc, rep_tc_args, _co) = tcLookupDataFamInst fam_envs tycon tc_args
-- If it's still a data family, the lookup failed; i.e no instance exists
; when (isDataFamilyTyCon rep_tc)
(bale_out (text "No family instance for" <+> quotes (pprTypeApp tycon tc_args)))
-- For standalone deriving (mtheta /= Nothing),
-- check that all the data constructors are in scope.
; rdr_env <- getGlobalRdrEnv
; let data_con_names = map dataConName (tyConDataCons rep_tc)
hidden_data_cons = not (isWiredInName (tyConName rep_tc)) &&
(isAbstractTyCon rep_tc ||
any not_in_scope data_con_names)
not_in_scope dc = null (lookupGRE_Name rdr_env dc)
; addUsedDataCons rdr_env rep_tc
; unless (isNothing mtheta || not hidden_data_cons)
(bale_out (derivingHiddenErr tycon))
; dflags <- getDynFlags
; if isDataTyCon rep_tc then
mkDataTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta
else
mkNewTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta }
where
bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg)
{-
Note [Looking up family instances for deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcLookupFamInstExact is an auxiliary lookup wrapper which requires
that looked-up family instances exist. If called with a vanilla
tycon, the old type application is simply returned.
If we have
data instance F () = ... deriving Eq
data instance F () = ... deriving Eq
then tcLookupFamInstExact will be confused by the two matches;
but that can't happen because tcInstDecls1 doesn't call tcDeriving
if there are any overlaps.
There are two other things that might go wrong with the lookup.
First, we might see a standalone deriving clause
deriving Eq (F ())
when there is no data instance F () in scope.
Note that it's OK to have
data instance F [a] = ...
deriving Eq (F [(a,b)])
where the match is not exact; the same holds for ordinary data types
with standalone deriving declarations.
Note [Deriving, type families, and partial applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When there are no type families, it's quite easy:
newtype S a = MkS [a]
-- :CoS :: S ~ [] -- Eta-reduced
instance Eq [a] => Eq (S a) -- by coercion sym (Eq (:CoS a)) : Eq [a] ~ Eq (S a)
instance Monad [] => Monad S -- by coercion sym (Monad :CoS) : Monad [] ~ Monad S
When type familes are involved it's trickier:
data family T a b
newtype instance T Int a = MkT [a] deriving( Eq, Monad )
-- :RT is the representation type for (T Int a)
-- :Co:RT :: :RT ~ [] -- Eta-reduced!
-- :CoF:RT a :: T Int a ~ :RT a -- Also eta-reduced!
instance Eq [a] => Eq (T Int a) -- easy by coercion
-- d1 :: Eq [a]
-- d2 :: Eq (T Int a) = d1 |> Eq (sym (:Co:RT a ; :coF:RT a))
instance Monad [] => Monad (T Int) -- only if we can eta reduce???
-- d1 :: Monad []
-- d2 :: Monad (T Int) = d1 |> Monad (sym (:Co:RT ; :coF:RT))
Note the need for the eta-reduced rule axioms. After all, we can
write it out
instance Monad [] => Monad (T Int) -- only if we can eta reduce???
return x = MkT [x]
... etc ...
See Note [Eta reduction for data families] in FamInstEnv
%************************************************************************
%* *
Deriving data types
* *
************************************************************************
-}
mkDataTypeEqn :: DynFlags
-> Maybe OverlapMode
-> [TyVar] -- Universally quantified type variables in the instance
-> Class -- Class for which we need to derive an instance
-> [Type] -- Other parameters to the class except the last
-> TyCon -- Type constructor for which the instance is requested
-- (last parameter to the type class)
-> [Type] -- Parameters to the type constructor
-> TyCon -- rep of the above (for type families)
-> [Type] -- rep of the above
-> DerivContext -- Context of the instance, for standalone deriving
-> TcRn EarlyDerivSpec -- Return 'Nothing' if error
mkDataTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta
= case checkSideConditions dflags mtheta cls cls_tys rep_tc rep_tc_args of
-- NB: pass the *representation* tycon to checkSideConditions
NonDerivableClass msg -> bale_out (nonStdErr cls $$ msg)
DerivableClassError msg -> bale_out msg
CanDerive -> go_for_it
DerivableViaInstance -> go_for_it
where
go_for_it = mk_data_eqn overlap_mode tvs cls cls_tys tycon tc_args rep_tc rep_tc_args mtheta
bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg)
mk_data_eqn :: Maybe OverlapMode -> [TyVar] -> Class -> [Type]
-> TyCon -> [TcType] -> TyCon -> [TcType] -> DerivContext
-> TcM EarlyDerivSpec
mk_data_eqn overlap_mode tvs cls cls_tys tycon tc_args rep_tc rep_tc_args mtheta
= do loc <- getSrcSpanM
dfun_name <- newDFunName' cls tycon
case mtheta of
Nothing -> do --Infer context
inferred_constraints <- inferConstraints cls cls_tys inst_ty rep_tc rep_tc_args
return $ InferTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tc
, ds_theta = inferred_constraints
, ds_overlap = overlap_mode
, ds_newtype = Nothing }
Just theta -> do -- Specified context
return $ GivenTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tc
, ds_theta = theta
, ds_overlap = overlap_mode
, ds_newtype = Nothing }
where
inst_ty = mkTyConApp tycon tc_args
inst_tys = cls_tys ++ [inst_ty]
----------------------
inferConstraints :: Class -> [TcType] -> TcType
-> TyCon -> [TcType]
-> TcM ThetaOrigin
-- inferConstraints figures out the constraints needed for the
-- instance declaration generated by a 'deriving' clause on a
-- data type declaration.
-- See Note [Inferring the instance context]
-- e.g. inferConstraints
-- C Int (T [a]) -- Class and inst_tys
-- :RTList a -- Rep tycon and its arg tys
-- where T [a] ~R :RTList a
--
-- Generate a sufficiently large set of constraints that typechecking the
-- generated method definitions should succeed. This set will be simplified
-- before being used in the instance declaration
inferConstraints main_cls cls_tys inst_ty rep_tc rep_tc_args
| main_cls `hasKey` genClassKey -- Generic constraints are easy
= return []
| main_cls `hasKey` gen1ClassKey -- Gen1 needs Functor
= ASSERT( length rep_tc_tvs > 0 ) -- See Note [Getting base classes]
ASSERT( null cls_tys )
do { functorClass <- tcLookupClass functorClassName
; return (con_arg_constraints (get_gen1_constraints functorClass)) }
| otherwise -- The others are a bit more complicated
= ASSERT2( equalLength rep_tc_tvs all_rep_tc_args
, ppr main_cls <+> ppr rep_tc
$$ ppr rep_tc_tvs $$ ppr all_rep_tc_args )
do { traceTc "inferConstraints" (vcat [ppr main_cls <+> ppr inst_tys, ppr arg_constraints])
; return (stupid_constraints ++ extra_constraints
++ sc_constraints
++ arg_constraints) }
where
tc_binders = tyConBinders rep_tc
choose_level bndr
| isNamedBinder bndr = KindLevel
| otherwise = TypeLevel
t_or_ks = map choose_level tc_binders ++ repeat TypeLevel
-- want to report *kind* errors when possible
arg_constraints = con_arg_constraints get_std_constrained_tys
-- Constraints arising from the arguments of each constructor
con_arg_constraints :: (CtOrigin -> TypeOrKind -> Type -> [PredOrigin])
-> [PredOrigin]
con_arg_constraints get_arg_constraints
= [ pred
| data_con <- tyConDataCons rep_tc
, (arg_n, arg_t_or_k, arg_ty)
<- zip3 [1..] t_or_ks $
dataConInstOrigArgTys data_con all_rep_tc_args
, not (isUnliftedType arg_ty)
, let orig = DerivOriginDC data_con arg_n
, pred <- get_arg_constraints orig arg_t_or_k arg_ty ]
-- No constraints for unlifted types
-- See Note [Deriving and unboxed types]
-- is_functor_like: see Note [Inferring the instance context]
is_functor_like = typeKind inst_ty `tcEqKind` typeToTypeKind
get_gen1_constraints functor_cls orig t_or_k ty
= mk_functor_like_constraints orig t_or_k functor_cls $
get_gen1_constrained_tys last_tv ty
get_std_constrained_tys :: CtOrigin -> TypeOrKind -> Type -> [PredOrigin]
get_std_constrained_tys orig t_or_k ty
| is_functor_like = mk_functor_like_constraints orig t_or_k main_cls $
deepSubtypesContaining last_tv ty
| otherwise = [mk_cls_pred orig t_or_k main_cls ty]
mk_functor_like_constraints :: CtOrigin -> TypeOrKind
-> Class -> [Type] -> [PredOrigin]
-- 'cls' is usually main_cls (Functor or Traversable etc), but if
-- main_cls = Generic1, then 'cls' can be Functor; see get_gen1_constraints
--
-- For each type, generate two constraints: (cls ty, kind(ty) ~ (*->*))
-- The second constraint checks that the first is well-kinded.
-- Lacking that, as Trac #10561 showed, we can just generate an
-- ill-kinded instance.
mk_functor_like_constraints orig t_or_k cls tys
= [ pred_o
| ty <- tys
, pred_o <- [ mk_cls_pred orig t_or_k cls ty
, mkPredOrigin orig KindLevel
(mkPrimEqPred (typeKind ty) typeToTypeKind) ] ]
rep_tc_tvs = tyConTyVars rep_tc
last_tv = last rep_tc_tvs
all_rep_tc_args | is_functor_like = rep_tc_args ++ [mkTyVarTy last_tv]
| otherwise = rep_tc_args
-- Constraints arising from superclasses
-- See Note [Superclasses of derived instance]
cls_tvs = classTyVars main_cls
inst_tys = cls_tys ++ [inst_ty]
sc_constraints = ASSERT2( equalLength cls_tvs inst_tys, ppr main_cls <+> ppr rep_tc)
mkThetaOrigin DerivOrigin TypeLevel $
substTheta cls_subst (classSCTheta main_cls)
cls_subst = ASSERT( equalLength cls_tvs inst_tys )
zipTvSubst cls_tvs inst_tys
-- Stupid constraints
stupid_constraints = mkThetaOrigin DerivOrigin TypeLevel $
substTheta tc_subst (tyConStupidTheta rep_tc)
tc_subst = ASSERT( equalLength rep_tc_tvs all_rep_tc_args )
zipTvSubst rep_tc_tvs all_rep_tc_args
-- Extra Data constraints
-- The Data class (only) requires that for
-- instance (...) => Data (T t1 t2)
-- IF t1:*, t2:*
-- THEN (Data t1, Data t2) are among the (...) constraints
-- Reason: when the IF holds, we generate a method
-- dataCast2 f = gcast2 f
-- and we need the Data constraints to typecheck the method
extra_constraints
| main_cls `hasKey` dataClassKey
, all (isLiftedTypeKind . typeKind) rep_tc_args
= [ mk_cls_pred DerivOrigin t_or_k main_cls ty
| (t_or_k, ty) <- zip t_or_ks rep_tc_args]
| otherwise
= []
mk_cls_pred orig t_or_k cls ty -- Don't forget to apply to cls_tys too
-- In the awkward Generic1 casde, cls_tys is empty
= mkPredOrigin orig t_or_k (mkClassPred cls (cls_tys ++ [ty]))
{- Note [Getting base classes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Functor and Typeable are defined in package 'base', and that is not available
when compiling 'ghc-prim'. So we must be careful that 'deriving' for stuff in
ghc-prim does not use Functor or Typeable implicitly via these lookups.
Note [Deriving and unboxed types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have some special hacks to support things like
data T = MkT Int# deriving ( Show )
Specifically, we use TcGenDeriv.box to box the Int# into an Int
(which we know how to show), and append a '#'. Parenthesis are not required
for unboxed values (`MkT -3#` is a valid expression).
Note [Deriving any class]
~~~~~~~~~~~~~~~~~~~~~~~~~
Classic uses of a deriving clause, or a standalone-deriving declaration, are
for:
* a built-in class like Eq or Show, for which GHC knows how to generate
the instance code
* a newtype, via the mechanism enabled by GeneralizedNewtypeDeriving
The DeriveAnyClass extension adds a third way to derive instances, based on
empty instance declarations.
The canonical use case is in combination with GHC.Generics and default method
signatures. These allow us to have instance declarations being empty, but still
useful, e.g.
data T a = ...blah..blah... deriving( Generic )
instance C a => C (T a) -- No 'where' clause
where C is some "random" user-defined class.
This boilerplate code can be replaced by the more compact
data T a = ...blah..blah... deriving( Generic, C )
if DeriveAnyClass is enabled.
This is not restricted to Generics; any class can be derived, simply giving
rise to an empty instance.
Unfortunately, it is not clear how to determine the context (in case of
standard deriving; in standalone deriving, the user provides the context).
GHC uses the same heuristic for figuring out the class context that it uses for
Eq in the case of *-kinded classes, and for Functor in the case of
* -> *-kinded classes. That may not be optimal or even wrong. But in such
cases, standalone deriving can still be used.
-}
------------------------------------------------------------------
-- Check side conditions that dis-allow derivability for particular classes
-- This is *apart* from the newtype-deriving mechanism
--
-- Here we get the representation tycon in case of family instances as it has
-- the data constructors - but we need to be careful to fall back to the
-- family tycon (with indexes) in error messages.
data DerivStatus = CanDerive -- Standard class, can derive
| DerivableClassError SDoc -- Standard class, but can't do it
| DerivableViaInstance -- See Note [Deriving any class]
| NonDerivableClass SDoc -- Non-standard class
-- A "standard" class is one defined in the Haskell report which GHC knows how
-- to generate code for, such as Eq, Ord, Ix, etc.
checkSideConditions :: DynFlags -> DerivContext -> Class -> [TcType]
-> TyCon -> [Type] -- tycon and its parameters
-> DerivStatus
checkSideConditions dflags mtheta cls cls_tys rep_tc rep_tc_args
| Just cond <- sideConditions mtheta cls
= case (cond (dflags, rep_tc, rep_tc_args)) of
NotValid err -> DerivableClassError err -- Class-specific error
IsValid | null cls_tys -> CanDerive -- All derivable classes are unary, so
-- cls_tys (the type args other than last)
-- should be null
| otherwise -> DerivableClassError (classArgsErr cls cls_tys) -- e.g. deriving( Eq s )
| Just err <- canDeriveAnyClass dflags rep_tc cls
= NonDerivableClass err -- DeriveAnyClass does not work
| otherwise
= DerivableViaInstance -- DeriveAnyClass should work
classArgsErr :: Class -> [Type] -> SDoc
classArgsErr cls cls_tys = quotes (ppr (mkClassPred cls cls_tys)) <+> text "is not a class"
nonStdErr :: Class -> SDoc
nonStdErr cls =
quotes (ppr cls)
<+> text "is not a standard derivable class (Eq, Show, etc.)"
sideConditions :: DerivContext -> Class -> Maybe Condition
-- Side conditions for classes that GHC knows about,
-- that is, "deriviable classes"
-- Returns Nothing for a non-derivable class
sideConditions mtheta cls
| cls_key == eqClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == ordClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == showClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == readClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == enumClassKey = Just (cond_std `andCond` cond_isEnumeration)
| cls_key == ixClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == boundedClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == dataClassKey = Just (checkFlag LangExt.DeriveDataTypeable `andCond`
cond_std `andCond`
cond_args cls)
| cls_key == functorClassKey = Just (checkFlag LangExt.DeriveFunctor `andCond`
cond_vanilla `andCond`
cond_functorOK True False)
| cls_key == foldableClassKey = Just (checkFlag LangExt.DeriveFoldable `andCond`
cond_vanilla `andCond`
cond_functorOK False True)
-- Functor/Fold/Trav works ok
-- for rank-n types
| cls_key == traversableClassKey = Just (checkFlag LangExt.DeriveTraversable `andCond`
cond_vanilla `andCond`
cond_functorOK False False)
| cls_key == genClassKey = Just (checkFlag LangExt.DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_RepresentableOk)
| cls_key == gen1ClassKey = Just (checkFlag LangExt.DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_Representable1Ok)
| cls_key == liftClassKey = Just (checkFlag LangExt.DeriveLift `andCond`
cond_vanilla `andCond`
cond_args cls)
| otherwise = Nothing
where
cls_key = getUnique cls
cond_std = cond_stdOK mtheta False -- Vanilla data constructors, at least one,
-- and monotype arguments
cond_vanilla = cond_stdOK mtheta True -- Vanilla data constructors but
-- allow no data cons or polytype arguments
canDeriveAnyClass :: DynFlags -> TyCon -> Class -> Maybe SDoc
-- Nothing: we can (try to) derive it via an empty instance declaration
-- Just s: we can't, reason s
-- Precondition: the class is not one of the standard ones
canDeriveAnyClass dflags _tycon clas
| not (xopt LangExt.DeriveAnyClass dflags)
= Just (text "Try enabling DeriveAnyClass")
| not (any (target_kind `tcEqKind`) [ liftedTypeKind, typeToTypeKind ])
= Just (text "The last argument of class" <+> quotes (ppr clas)
<+> text "does not have kind * or (* -> *)")
| otherwise
= Nothing -- OK!
where
-- We are making an instance (C t1 .. tn (T s1 .. sm))
-- and we can only do so if the kind of C's last argument
-- is * or (* -> *). Because only then can we make a reasonable
-- guess at the instance context
target_kind = tyVarKind (last (classTyVars clas))
typeToTypeKind :: Kind
typeToTypeKind = liftedTypeKind `mkFunTy` liftedTypeKind
type Condition = (DynFlags, TyCon, [Type]) -> Validity
-- first Bool is whether or not we are allowed to derive Data and Typeable
-- second Bool is whether or not we are allowed to derive Functor
-- TyCon is the *representation* tycon if the data type is an indexed one
-- [Type] are the type arguments to the (representation) TyCon
-- Nothing => OK
orCond :: Condition -> Condition -> Condition
orCond c1 c2 tc
= case (c1 tc, c2 tc) of
(IsValid, _) -> IsValid -- c1 succeeds
(_, IsValid) -> IsValid -- c21 succeeds
(NotValid x, NotValid y) -> NotValid (x $$ text " or" $$ y)
-- Both fail
andCond :: Condition -> Condition -> Condition
andCond c1 c2 tc = c1 tc `andValid` c2 tc
cond_stdOK :: DerivContext -- Says whether this is standalone deriving or not;
-- if standalone, we just say "yes, go for it"
-> Bool -- True <=> permissive: allow higher rank
-- args and no data constructors
-> Condition
cond_stdOK (Just _) _ _
= IsValid -- Don't check these conservative conditions for
-- standalone deriving; just generate the code
-- and let the typechecker handle the result
cond_stdOK Nothing permissive (_, rep_tc, _)
| null data_cons
, not permissive = NotValid (no_cons_why rep_tc $$ suggestion)
| not (null con_whys) = NotValid (vcat con_whys $$ suggestion)
| otherwise = IsValid
where
suggestion = text "Possible fix: use a standalone deriving declaration instead"
data_cons = tyConDataCons rep_tc
con_whys = getInvalids (map check_con data_cons)
check_con :: DataCon -> Validity
check_con con
| not (isVanillaDataCon con)
= NotValid (badCon con (text "has existentials or constraints in its type"))
| not (permissive || all isTauTy (dataConOrigArgTys con))
= NotValid (badCon con (text "has a higher-rank type"))
| otherwise
= IsValid
no_cons_why :: TyCon -> SDoc
no_cons_why rep_tc = quotes (pprSourceTyCon rep_tc) <+>
text "must have at least one data constructor"
cond_RepresentableOk :: Condition
cond_RepresentableOk (dflags, tc, tc_args) = canDoGenerics dflags tc tc_args
cond_Representable1Ok :: Condition
cond_Representable1Ok (dflags, tc, tc_args) = canDoGenerics1 dflags tc tc_args
cond_enumOrProduct :: Class -> Condition
cond_enumOrProduct cls = cond_isEnumeration `orCond`
(cond_isProduct `andCond` cond_args cls)
cond_args :: Class -> Condition
-- For some classes (eg Eq, Ord) we allow unlifted arg types
-- by generating specialised code. For others (eg Data) we don't.
cond_args cls (_, tc, _)
= case bad_args of
[] -> IsValid
(ty:_) -> NotValid (hang (text "Don't know how to derive" <+> quotes (ppr cls))
2 (text "for type" <+> quotes (ppr ty)))
where
bad_args = [ arg_ty | con <- tyConDataCons tc
, arg_ty <- dataConOrigArgTys con
, isUnliftedType arg_ty
, not (ok_ty arg_ty) ]
cls_key = classKey cls
ok_ty arg_ty
| cls_key == eqClassKey = check_in arg_ty ordOpTbl
| cls_key == ordClassKey = check_in arg_ty ordOpTbl
| cls_key == showClassKey = check_in arg_ty boxConTbl
| cls_key == liftClassKey = check_in arg_ty litConTbl
| otherwise = False -- Read, Ix etc
check_in :: Type -> [(Type,a)] -> Bool
check_in arg_ty tbl = any (eqType arg_ty . fst) tbl
cond_isEnumeration :: Condition
cond_isEnumeration (_, rep_tc, _)
| isEnumerationTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = sep [ quotes (pprSourceTyCon rep_tc) <+>
text "must be an enumeration type"
, text "(an enumeration consists of one or more nullary, non-GADT constructors)" ]
-- See Note [Enumeration types] in TyCon
cond_isProduct :: Condition
cond_isProduct (_, rep_tc, _)
| isProductTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = quotes (pprSourceTyCon rep_tc) <+>
text "must have precisely one constructor"
cond_functorOK :: Bool -> Bool -> Condition
-- OK for Functor/Foldable/Traversable class
-- Currently: (a) at least one argument
-- (b) don't use argument contravariantly
-- (c) don't use argument in the wrong place, e.g. data T a = T (X a a)
-- (d) optionally: don't use function types
-- (e) no "stupid context" on data type
cond_functorOK allowFunctions allowExQuantifiedLastTyVar (_, rep_tc, _)
| null tc_tvs
= NotValid (text "Data type" <+> quotes (ppr rep_tc)
<+> text "must have some type parameters")
| not (null bad_stupid_theta)
= NotValid (text "Data type" <+> quotes (ppr rep_tc)
<+> text "must not have a class context:" <+> pprTheta bad_stupid_theta)
| otherwise
= allValid (map check_con data_cons)
where
tc_tvs = tyConTyVars rep_tc
Just (_, last_tv) = snocView tc_tvs
bad_stupid_theta = filter is_bad (tyConStupidTheta rep_tc)
is_bad pred = last_tv `elemVarSet` tyCoVarsOfType pred
data_cons = tyConDataCons rep_tc
check_con con = allValid (check_universal con : foldDataConArgs (ft_check con) con)
check_universal :: DataCon -> Validity
check_universal con
| allowExQuantifiedLastTyVar
= IsValid -- See Note [DeriveFoldable with ExistentialQuantification]
-- in TcGenDeriv
| Just tv <- getTyVar_maybe (last (tyConAppArgs (dataConOrigResTy con)))
, tv `elem` dataConUnivTyVars con
, not (tv `elemVarSet` tyCoVarsOfTypes (dataConTheta con))
= IsValid -- See Note [Check that the type variable is truly universal]
| otherwise
= NotValid (badCon con existential)
ft_check :: DataCon -> FFoldType Validity
ft_check con = FT { ft_triv = IsValid, ft_var = IsValid
, ft_co_var = NotValid (badCon con covariant)
, ft_fun = \x y -> if allowFunctions then x `andValid` y
else NotValid (badCon con functions)
, ft_tup = \_ xs -> allValid xs
, ft_ty_app = \_ x -> x
, ft_bad_app = NotValid (badCon con wrong_arg)
, ft_forall = \_ x -> x }
existential = text "must be truly polymorphic in the last argument of the data type"
covariant = text "must not use the type variable in a function argument"
functions = text "must not contain function types"
wrong_arg = text "must use the type variable only as the last argument of a data type"
checkFlag :: LangExt.Extension -> Condition
checkFlag flag (dflags, _, _)
| xopt flag dflags = IsValid
| otherwise = NotValid why
where
why = text "You need " <> text flag_str
<+> text "to derive an instance for this class"
flag_str = case [ flagSpecName f | f <- xFlags , flagSpecFlag f == flag ] of
[s] -> s
other -> pprPanic "checkFlag" (ppr other)
std_class_via_coercible :: Class -> Bool
-- These standard classes can be derived for a newtype
-- using the coercible trick *even if no -XGeneralizedNewtypeDeriving
-- because giving so gives the same results as generating the boilerplate
std_class_via_coercible clas
= classKey clas `elem` [eqClassKey, ordClassKey, ixClassKey, boundedClassKey]
-- Not Read/Show/Lift because they respect the type
-- Not Enum, because newtypes are never in Enum
non_coercible_class :: Class -> Bool
-- *Never* derive Read, Show, Typeable, Data, Generic, Generic1, Lift
-- by Coercible, even with -XGeneralizedNewtypeDeriving
-- Also, avoid Traversable, as the Coercible-derived instance and the "normal"-derived
-- instance behave differently if there's a non-lawful Applicative out there.
-- Besides, with roles, Coercible-deriving Traversable is ill-roled.
non_coercible_class cls
= classKey cls `elem` ([ readClassKey, showClassKey, dataClassKey
, genClassKey, gen1ClassKey, typeableClassKey
, traversableClassKey, liftClassKey ])
badCon :: DataCon -> SDoc -> SDoc
badCon con msg = text "Constructor" <+> quotes (ppr con) <+> msg
{-
Note [Check that the type variable is truly universal]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For Functor and Traversable instances, we must check that the *last argument*
of the type constructor is used truly universally quantified. Example
data T a b where
T1 :: a -> b -> T a b -- Fine! Vanilla H-98
T2 :: b -> c -> T a b -- Fine! Existential c, but we can still map over 'b'
T3 :: b -> T Int b -- Fine! Constraint 'a', but 'b' is still polymorphic
T4 :: Ord b => b -> T a b -- No! 'b' is constrained
T5 :: b -> T b b -- No! 'b' is constrained
T6 :: T a (b,b) -- No! 'b' is constrained
Notice that only the first of these constructors is vanilla H-98. We only
need to take care about the last argument (b in this case). See Trac #8678.
Eg. for T1-T3 we can write
fmap f (T1 a b) = T1 a (f b)
fmap f (T2 b c) = T2 (f b) c
fmap f (T3 x) = T3 (f x)
We need not perform these checks for Foldable instances, however, since
functions in Foldable can only consume existentially quantified type variables,
rather than produce them (as is the case in Functor and Traversable functions.)
As a result, T can have a derived Foldable instance:
foldr f z (T1 a b) = f b z
foldr f z (T2 b c) = f b z
foldr f z (T3 x) = f x z
foldr f z (T4 x) = f x z
foldr f z (T5 x) = f x z
foldr _ z T6 = z
See Note [DeriveFoldable with ExistentialQuantification] in TcGenDeriv.
Note [Superclasses of derived instance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general, a derived instance decl needs the superclasses of the derived
class too. So if we have
data T a = ...deriving( Ord )
then the initial context for Ord (T a) should include Eq (T a). Often this is
redundant; we'll also generate an Ord constraint for each constructor argument,
and that will probably generate enough constraints to make the Eq (T a) constraint
be satisfied too. But not always; consider:
data S a = S
instance Eq (S a)
instance Ord (S a)
data T a = MkT (S a) deriving( Ord )
instance Num a => Eq (T a)
The derived instance for (Ord (T a)) must have a (Num a) constraint!
Similarly consider:
data T a = MkT deriving( Data, Typeable )
Here there *is* no argument field, but we must nevertheless generate
a context for the Data instances:
instance Typable a => Data (T a) where ...
************************************************************************
* *
Deriving newtypes
* *
************************************************************************
-}
mkNewTypeEqn :: DynFlags -> Maybe OverlapMode -> [TyVar] -> Class
-> [Type] -> TyCon -> [Type] -> TyCon -> [Type]
-> DerivContext
-> TcRn EarlyDerivSpec
mkNewTypeEqn dflags overlap_mode tvs
cls cls_tys tycon tc_args rep_tycon rep_tc_args mtheta
-- Want: instance (...) => cls (cls_tys ++ [tycon tc_args]) where ...
| ASSERT( length cls_tys + 1 == classArity cls )
might_derive_via_coercible && ((newtype_deriving && not deriveAnyClass)
|| std_class_via_coercible cls)
= do traceTc "newtype deriving:" (ppr tycon <+> ppr rep_tys <+> ppr all_preds)
dfun_name <- newDFunName' cls tycon
loc <- getSrcSpanM
case mtheta of
Just theta -> return $ GivenTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = varSetElemsWellScoped dfun_tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tycon
, ds_theta = theta
, ds_overlap = overlap_mode
, ds_newtype = Just rep_inst_ty }
Nothing -> return $ InferTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = varSetElemsWellScoped dfun_tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tycon
, ds_theta = all_preds
, ds_overlap = overlap_mode
, ds_newtype = Just rep_inst_ty }
| otherwise
= case checkSideConditions dflags mtheta cls cls_tys rep_tycon rep_tc_args of
-- Error with standard class
DerivableClassError msg
| might_derive_via_coercible -> bale_out (msg $$ suggest_gnd)
| otherwise -> bale_out msg
-- Must use newtype deriving or DeriveAnyClass
NonDerivableClass _msg
-- Too hard, even with newtype deriving
| newtype_deriving -> bale_out cant_derive_err
-- Try newtype deriving!
-- Here we suggest GeneralizedNewtypeDeriving even in cases where it may
-- not be applicable. See Trac #9600.
| otherwise -> bale_out (non_std $$ suggest_gnd)
-- CanDerive/DerivableViaInstance
_ -> do when (newtype_deriving && deriveAnyClass) $
addWarnTc NoReason
(sep [ text "Both DeriveAnyClass and GeneralizedNewtypeDeriving are enabled"
, text "Defaulting to the DeriveAnyClass strategy for instantiating" <+> ppr cls ])
go_for_it
where
newtype_deriving = xopt LangExt.GeneralizedNewtypeDeriving dflags
deriveAnyClass = xopt LangExt.DeriveAnyClass dflags
go_for_it = mk_data_eqn overlap_mode tvs cls cls_tys tycon tc_args
rep_tycon rep_tc_args mtheta
bale_out = bale_out' newtype_deriving
bale_out' b = failWithTc . derivingThingErr b cls cls_tys inst_ty
non_std = nonStdErr cls
suggest_gnd = text "Try GeneralizedNewtypeDeriving for GHC's newtype-deriving extension"
-- Here is the plan for newtype derivings. We see
-- newtype T a1...an = MkT (t ak+1...an) deriving (.., C s1 .. sm, ...)
-- where t is a type,
-- ak+1...an is a suffix of a1..an, and are all tyars
-- ak+1...an do not occur free in t, nor in the s1..sm
-- (C s1 ... sm) is a *partial applications* of class C
-- with the last parameter missing
-- (T a1 .. ak) matches the kind of C's last argument
-- (and hence so does t)
-- The latter kind-check has been done by deriveTyData already,
-- and tc_args are already trimmed
--
-- We generate the instance
-- instance forall ({a1..ak} u fvs(s1..sm)).
-- C s1 .. sm t => C s1 .. sm (T a1...ak)
-- where T a1...ap is the partial application of
-- the LHS of the correct kind and p >= k
--
-- NB: the variables below are:
-- tc_tvs = [a1, ..., an]
-- tyvars_to_keep = [a1, ..., ak]
-- rep_ty = t ak .. an
-- deriv_tvs = fvs(s1..sm) \ tc_tvs
-- tys = [s1, ..., sm]
-- rep_fn' = t
--
-- Running example: newtype T s a = MkT (ST s a) deriving( Monad )
-- We generate the instance
-- instance Monad (ST s) => Monad (T s) where
nt_eta_arity = newTyConEtadArity rep_tycon
-- For newtype T a b = MkT (S a a b), the TyCon machinery already
-- eta-reduces the representation type, so we know that
-- T a ~ S a a
-- That's convenient here, because we may have to apply
-- it to fewer than its original complement of arguments
-- Note [Newtype representation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Need newTyConRhs (*not* a recursive representation finder)
-- to get the representation type. For example
-- newtype B = MkB Int
-- newtype A = MkA B deriving( Num )
-- We want the Num instance of B, *not* the Num instance of Int,
-- when making the Num instance of A!
rep_inst_ty = newTyConInstRhs rep_tycon rep_tc_args
rep_tys = cls_tys ++ [rep_inst_ty]
rep_pred = mkClassPred cls rep_tys
rep_pred_o = mkPredOrigin DerivOrigin TypeLevel rep_pred
-- rep_pred is the representation dictionary, from where
-- we are gong to get all the methods for the newtype
-- dictionary
-- Next we figure out what superclass dictionaries to use
-- See Note [Newtype deriving superclasses] above
cls_tyvars = classTyVars cls
dfun_tvs = tyCoVarsOfTypes inst_tys
inst_ty = mkTyConApp tycon tc_args
inst_tys = cls_tys ++ [inst_ty]
sc_theta = mkThetaOrigin DerivOrigin TypeLevel $
substTheta (zipTvSubst cls_tyvars inst_tys) $
classSCTheta cls
-- Next we collect Coercible constraints between
-- the Class method types, instantiated with the representation and the
-- newtype type; precisely the constraints required for the
-- calls to coercible that we are going to generate.
coercible_constraints =
[ let (Pair t1 t2) = mkCoerceClassMethEqn cls (varSetElemsWellScoped dfun_tvs) inst_tys rep_inst_ty meth
in mkPredOrigin (DerivOriginCoerce meth t1 t2) TypeLevel
(mkReprPrimEqPred t1 t2)
| meth <- classMethods cls ]
-- If there are no tyvars, there's no need
-- to abstract over the dictionaries we need
-- Example: newtype T = MkT Int deriving( C )
-- We get the derived instance
-- instance C T
-- rather than
-- instance C Int => C T
all_preds = rep_pred_o : coercible_constraints ++ sc_theta -- NB: rep_pred comes first
-------------------------------------------------------------------
-- Figuring out whether we can only do this newtype-deriving thing
-- See Note [Determining whether newtype-deriving is appropriate]
might_derive_via_coercible
= not (non_coercible_class cls)
&& eta_ok
&& ats_ok
-- && not (isRecursiveTyCon tycon) -- Note [Recursive newtypes]
-- Check that eta reduction is OK
eta_ok = nt_eta_arity <= length rep_tc_args
-- The newtype can be eta-reduced to match the number
-- of type argument actually supplied
-- newtype T a b = MkT (S [a] b) deriving( Monad )
-- Here the 'b' must be the same in the rep type (S [a] b)
-- And the [a] must not mention 'b'. That's all handled
-- by nt_eta_rity.
ats_ok = null (classATs cls)
-- No associated types for the class, because we don't
-- currently generate type 'instance' decls; and cannot do
-- so for 'data' instance decls
cant_derive_err
= vcat [ ppUnless eta_ok eta_msg
, ppUnless ats_ok ats_msg ]
eta_msg = text "cannot eta-reduce the representation type enough"
ats_msg = text "the class has associated types"
{-
Note [Recursive newtypes]
~~~~~~~~~~~~~~~~~~~~~~~~~
Newtype deriving works fine, even if the newtype is recursive.
e.g. newtype S1 = S1 [T1 ()]
newtype T1 a = T1 (StateT S1 IO a ) deriving( Monad )
Remember, too, that type families are currently (conservatively) given
a recursive flag, so this also allows newtype deriving to work
for type famillies.
We used to exclude recursive types, because we had a rather simple
minded way of generating the instance decl:
newtype A = MkA [A]
instance Eq [A] => Eq A -- Makes typechecker loop!
But now we require a simple context, so it's ok.
Note [Determining whether newtype-deriving is appropriate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we see
newtype NT = MkNT Foo
deriving C
we have to decide how to perform the deriving. Do we do newtype deriving,
or do we do normal deriving? In general, we prefer to do newtype deriving
wherever possible. So, we try newtype deriving unless there's a glaring
reason not to.
Note that newtype deriving might fail, even after we commit to it. This
is because the derived instance uses `coerce`, which must satisfy its
`Coercible` constraint. This is different than other deriving scenarios,
where we're sure that the resulting instance will type-check.
************************************************************************
* *
Finding the fixed point of deriving equations
* *
************************************************************************
Note [Simplifying the instance context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a b = C1 (Foo a) (Bar b)
| C2 Int (T b a)
| C3 (T a a)
deriving (Eq)
We want to come up with an instance declaration of the form
instance (Ping a, Pong b, ...) => Eq (T a b) where
x == y = ...
It is pretty easy, albeit tedious, to fill in the code "...". The
trick is to figure out what the context for the instance decl is,
namely Ping, Pong and friends.
Let's call the context reqd for the T instance of class C at types
(a,b, ...) C (T a b). Thus:
Eq (T a b) = (Ping a, Pong b, ...)
Now we can get a (recursive) equation from the data decl. This part
is done by inferConstraints.
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
Foo and Bar may have explicit instances for Eq, in which case we can
just substitute for them. Alternatively, either or both may have
their Eq instances given by deriving clauses, in which case they
form part of the system of equations.
Now all we need do is simplify and solve the equations, iterating to
find the least fixpoint. This is done by simplifyInstanceConstraints.
Notice that the order of the arguments can
switch around, as here in the recursive calls to T.
Let's suppose Eq (Foo a) = Eq a, and Eq (Bar b) = Ping b.
We start with:
Eq (T a b) = {} -- The empty set
Next iteration:
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
After simplification:
= Eq a u Ping b u {} u {} u {}
= Eq a u Ping b
Next iteration:
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
After simplification:
= Eq a u Ping b
u (Eq b u Ping a)
u (Eq a u Ping a)
= Eq a u Ping b u Eq b u Ping a
The next iteration gives the same result, so this is the fixpoint. We
need to make a canonical form of the RHS to ensure convergence. We do
this by simplifying the RHS to a form in which
- the classes constrain only tyvars
- the list is sorted by tyvar (major key) and then class (minor key)
- no duplicates, of course
-}
simplifyInstanceContexts :: [DerivSpec ThetaOrigin] -> TcM [DerivSpec ThetaType]
-- Used only for deriving clauses (InferTheta)
-- not for standalone deriving
-- See Note [Simplifying the instance context]
simplifyInstanceContexts [] = return []
simplifyInstanceContexts infer_specs
= do { traceTc "simplifyInstanceContexts" $ vcat (map pprDerivSpec infer_specs)
; iterate_deriv 1 initial_solutions }
where
------------------------------------------------------------------
-- The initial solutions for the equations claim that each
-- instance has an empty context; this solution is certainly
-- in canonical form.
initial_solutions :: [ThetaType]
initial_solutions = [ [] | _ <- infer_specs ]
------------------------------------------------------------------
-- iterate_deriv calculates the next batch of solutions,
-- compares it with the current one; finishes if they are the
-- same, otherwise recurses with the new solutions.
-- It fails if any iteration fails
iterate_deriv :: Int -> [ThetaType] -> TcM [DerivSpec ThetaType]
iterate_deriv n current_solns
| n > 20 -- Looks as if we are in an infinite loop
-- This can happen if we have -XUndecidableInstances
-- (See TcSimplify.tcSimplifyDeriv.)
= pprPanic "solveDerivEqns: probable loop"
(vcat (map pprDerivSpec infer_specs) $$ ppr current_solns)
| otherwise
= do { -- Extend the inst info from the explicit instance decls
-- with the current set of solutions, and simplify each RHS
inst_specs <- zipWithM newDerivClsInst current_solns infer_specs
; new_solns <- checkNoErrs $
extendLocalInstEnv inst_specs $
mapM gen_soln infer_specs
; if (current_solns `eqSolution` new_solns) then
return [ spec { ds_theta = soln }
| (spec, soln) <- zip infer_specs current_solns ]
else
iterate_deriv (n+1) new_solns }
eqSolution = eqListBy (eqListBy eqType)
------------------------------------------------------------------
gen_soln :: DerivSpec ThetaOrigin -> TcM ThetaType
gen_soln (DS { ds_loc = loc, ds_tvs = tyvars
, ds_cls = clas, ds_tys = inst_tys, ds_theta = deriv_rhs })
= setSrcSpan loc $
addErrCtxt (derivInstCtxt the_pred) $
do { theta <- simplifyDeriv the_pred tyvars deriv_rhs
-- checkValidInstance tyvars theta clas inst_tys
-- Not necessary; see Note [Exotic derived instance contexts]
; traceTc "TcDeriv" (ppr deriv_rhs $$ ppr theta)
-- Claim: the result instance declaration is guaranteed valid
-- Hence no need to call:
-- checkValidInstance tyvars theta clas inst_tys
; return (sortBy cmpType theta) } -- Canonicalise before returning the solution
where
the_pred = mkClassPred clas inst_tys
------------------------------------------------------------------
newDerivClsInst :: ThetaType -> DerivSpec theta -> TcM ClsInst
newDerivClsInst theta (DS { ds_name = dfun_name, ds_overlap = overlap_mode
, ds_tvs = tvs, ds_cls = clas, ds_tys = tys })
= newClsInst overlap_mode dfun_name tvs theta clas tys
extendLocalInstEnv :: [ClsInst] -> TcM a -> TcM a
-- Add new locally-defined instances; don't bother to check
-- for functional dependency errors -- that'll happen in TcInstDcls
extendLocalInstEnv dfuns thing_inside
= do { env <- getGblEnv
; let inst_env' = extendInstEnvList (tcg_inst_env env) dfuns
env' = env { tcg_inst_env = inst_env' }
; setGblEnv env' thing_inside }
{-
***********************************************************************************
* *
* Simplify derived constraints
* *
***********************************************************************************
-}
simplifyDeriv :: PredType
-> [TyVar]
-> ThetaOrigin -- Wanted
-> TcM ThetaType -- Needed
-- Given instance (wanted) => C inst_ty
-- Simplify 'wanted' as much as possibles
-- Fail if not possible
simplifyDeriv pred tvs theta
= do { (skol_subst, tvs_skols) <- tcInstSkolTyVars tvs -- Skolemize
-- The constraint solving machinery
-- expects *TcTyVars* not TyVars.
-- We use *non-overlappable* (vanilla) skolems
-- See Note [Overlap and deriving]
; let skol_set = mkVarSet tvs_skols
skol_info = DerivSkol pred
doc = text "deriving" <+> parens (ppr pred)
mk_ct (PredOrigin t o t_or_k)
= newWanted o (Just t_or_k) (substTy skol_subst t)
; (wanted, tclvl) <- pushTcLevelM (mapM mk_ct theta)
; traceTc "simplifyDeriv" $
vcat [ pprTvBndrs tvs $$ ppr theta $$ ppr wanted, doc ]
; residual_wanted <- simplifyWantedsTcM wanted
-- Result is zonked
; let residual_simple = wc_simple residual_wanted
(good, bad) = partitionBagWith get_good residual_simple
unsolved = residual_wanted { wc_simple = bad }
-- See Note [Exotic derived instance contexts]
get_good :: Ct -> Either PredType Ct
get_good ct | validDerivPred skol_set p
, isWantedCt ct
= Left p
-- NB: residual_wanted may contain unsolved
-- Derived and we stick them into the bad set
-- so that reportUnsolved may decide what to do with them
| otherwise
= Right ct
where p = ctPred ct
; traceTc "simplifyDeriv 2" $
vcat [ ppr tvs_skols, ppr residual_simple, ppr good, ppr bad ]
-- If we are deferring type errors, simply ignore any insoluble
-- constraints. They'll come up again when we typecheck the
-- generated instance declaration
; defer <- goptM Opt_DeferTypeErrors
; (implic, _) <- buildImplicationFor tclvl skol_info tvs_skols [] unsolved
-- The buildImplicationFor is just to bind the skolems,
-- in case they are mentioned in error messages
-- See Trac #11347
; unless defer (reportAllUnsolved (mkImplicWC implic))
; let min_theta = mkMinimalBySCs (bagToList good)
subst_skol = zipTvSubst tvs_skols $ mkTyVarTys tvs
-- The reverse substitution (sigh)
; return (substTheta subst_skol min_theta) }
{-
Note [Overlap and deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider some overlapping instances:
data Show a => Show [a] where ..
data Show [Char] where ...
Now a data type with deriving:
data T a = MkT [a] deriving( Show )
We want to get the derived instance
instance Show [a] => Show (T a) where...
and NOT
instance Show a => Show (T a) where...
so that the (Show (T Char)) instance does the Right Thing
It's very like the situation when we're inferring the type
of a function
f x = show [x]
and we want to infer
f :: Show [a] => a -> String
BOTTOM LINE: use vanilla, non-overlappable skolems when inferring
the context for the derived instance.
Hence tcInstSkolTyVars not tcInstSuperSkolTyVars
Note [Exotic derived instance contexts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a 'derived' instance declaration, we *infer* the context. It's a
bit unclear what rules we should apply for this; the Haskell report is
silent. Obviously, constraints like (Eq a) are fine, but what about
data T f a = MkT (f a) deriving( Eq )
where we'd get an Eq (f a) constraint. That's probably fine too.
One could go further: consider
data T a b c = MkT (Foo a b c) deriving( Eq )
instance (C Int a, Eq b, Eq c) => Eq (Foo a b c)
Notice that this instance (just) satisfies the Paterson termination
conditions. Then we *could* derive an instance decl like this:
instance (C Int a, Eq b, Eq c) => Eq (T a b c)
even though there is no instance for (C Int a), because there just
*might* be an instance for, say, (C Int Bool) at a site where we
need the equality instance for T's.
However, this seems pretty exotic, and it's quite tricky to allow
this, and yet give sensible error messages in the (much more common)
case where we really want that instance decl for C.
So for now we simply require that the derived instance context
should have only type-variable constraints.
Here is another example:
data Fix f = In (f (Fix f)) deriving( Eq )
Here, if we are prepared to allow -XUndecidableInstances we
could derive the instance
instance Eq (f (Fix f)) => Eq (Fix f)
but this is so delicate that I don't think it should happen inside
'deriving'. If you want this, write it yourself!
NB: if you want to lift this condition, make sure you still meet the
termination conditions! If not, the deriving mechanism generates
larger and larger constraints. Example:
data Succ a = S a
data Seq a = Cons a (Seq (Succ a)) | Nil deriving Show
Note the lack of a Show instance for Succ. First we'll generate
instance (Show (Succ a), Show a) => Show (Seq a)
and then
instance (Show (Succ (Succ a)), Show (Succ a), Show a) => Show (Seq a)
and so on. Instead we want to complain of no instance for (Show (Succ a)).
The bottom line
~~~~~~~~~~~~~~~
Allow constraints which consist only of type variables, with no repeats.
************************************************************************
* *
\subsection[TcDeriv-normal-binds]{Bindings for the various classes}
* *
************************************************************************
After all the trouble to figure out the required context for the
derived instance declarations, all that's left is to chug along to
produce them. They will then be shoved into @tcInstDecls2@, which
will do all its usual business.
There are lots of possibilities for code to generate. Here are
various general remarks.
PRINCIPLES:
\begin{itemize}
\item
We want derived instances of @Eq@ and @Ord@ (both v common) to be
``you-couldn't-do-better-by-hand'' efficient.
\item
Deriving @Show@---also pretty common--- should also be reasonable good code.
\item
Deriving for the other classes isn't that common or that big a deal.
\end{itemize}
PRAGMATICS:
\begin{itemize}
\item
Deriving @Ord@ is done mostly with the 1.3 @compare@ method.
\item
Deriving @Eq@ also uses @compare@, if we're deriving @Ord@, too.
\item
We {\em normally} generate code only for the non-defaulted methods;
there are some exceptions for @Eq@ and (especially) @Ord@...
\item
Sometimes we use a @_con2tag_<tycon>@ function, which returns a data
constructor's numeric (@Int#@) tag. These are generated by
@gen_tag_n_con_binds@, and the heuristic for deciding if one of
these is around is given by @hasCon2TagFun@.
The examples under the different sections below will make this
clearer.
\item
Much less often (really just for deriving @Ix@), we use a
@_tag2con_<tycon>@ function. See the examples.
\item
We use the renamer!!! Reason: we're supposed to be
producing @LHsBinds Name@ for the methods, but that means
producing correctly-uniquified code on the fly. This is entirely
possible (the @TcM@ monad has a @UniqueSupply@), but it is painful.
So, instead, we produce @MonoBinds RdrName@ then heave 'em through
the renamer. What a great hack!
\end{itemize}
-}
-- Generate the InstInfo for the required instance paired with the
-- *representation* tycon for that instance,
-- plus any auxiliary bindings required
--
-- Representation tycons differ from the tycon in the instance signature in
-- case of instances for indexed families.
--
genInst :: DerivSpec ThetaType
-> TcM (InstInfo RdrName, BagDerivStuff, Maybe Name)
genInst spec@(DS { ds_tvs = tvs, ds_tc = rep_tycon
, ds_theta = theta, ds_newtype = is_newtype, ds_tys = tys
, ds_name = dfun_name, ds_cls = clas, ds_loc = loc })
| Just rhs_ty <- is_newtype -- See Note [Bindings for Generalised Newtype Deriving]
= do { inst_spec <- newDerivClsInst theta spec
; traceTc "genInst/is_newtype" (vcat [ppr loc, ppr clas, ppr tvs, ppr tys, ppr rhs_ty])
; return ( InstInfo
{ iSpec = inst_spec
, iBinds = InstBindings
{ ib_binds = gen_Newtype_binds loc clas tvs tys rhs_ty
, ib_tyvars = map Var.varName tvs -- Scope over bindings
, ib_pragmas = []
, ib_extensions = [ LangExt.ImpredicativeTypes
, LangExt.RankNTypes ]
, ib_derived = True } }
, emptyBag
, Just $ getName $ head $ tyConDataCons rep_tycon ) }
-- See Note [Newtype deriving and unused constructors]
| otherwise
= do { (meth_binds, deriv_stuff) <- genDerivStuff loc clas
dfun_name rep_tycon
tys tvs
; inst_spec <- newDerivClsInst theta spec
; traceTc "newder" (ppr inst_spec)
; let inst_info = InstInfo { iSpec = inst_spec
, iBinds = InstBindings
{ ib_binds = meth_binds
, ib_tyvars = map Var.varName tvs
, ib_pragmas = []
, ib_extensions = []
, ib_derived = True } }
; return ( inst_info, deriv_stuff, Nothing ) }
-- Generate the bindings needed for a derived class that isn't handled by
-- -XGeneralizedNewtypeDeriving.
genDerivStuff :: SrcSpan -> Class -> Name -> TyCon -> [Type] -> [TyVar]
-> TcM (LHsBinds RdrName, BagDerivStuff)
genDerivStuff loc clas dfun_name tycon inst_tys tyvars
-- Special case for DeriveGeneric
| let ck = classKey clas
, ck `elem` [genClassKey, gen1ClassKey]
= let gk = if ck == genClassKey then Gen0 else Gen1
-- TODO NSF: correctly identify when we're building Both instead of One
in do
(binds, faminst) <- gen_Generic_binds gk tycon (nameModule dfun_name)
return (binds, unitBag (DerivFamInst faminst))
-- Not deriving Generic(1), so we first check if the compiler has built-in
-- support for deriving the class in question.
| otherwise
= do { dflags <- getDynFlags
; fix_env <- getDataConFixityFun tycon
; case hasBuiltinDeriving dflags fix_env clas of
Just gen_fn -> return (gen_fn loc tycon)
Nothing -> genDerivAnyClass dflags }
where
genDerivAnyClass :: DynFlags -> TcM (LHsBinds RdrName, BagDerivStuff)
genDerivAnyClass dflags =
do { -- If there isn't compiler support for deriving the class, our last
-- resort is -XDeriveAnyClass (since -XGeneralizedNewtypeDeriving
-- fell through).
let mini_env = mkVarEnv (classTyVars clas `zip` inst_tys)
mini_subst = mkTvSubst (mkInScopeSet (mkVarSet tyvars)) mini_env
; tyfam_insts <-
ASSERT2( isNothing (canDeriveAnyClass dflags tycon clas)
, ppr "genDerivStuff: bad derived class" <+> ppr clas )
mapM (tcATDefault False loc mini_subst emptyNameSet)
(classATItems clas)
; return ( emptyBag -- No method bindings are needed...
, listToBag (map DerivFamInst (concat tyfam_insts))
-- ...but we may need to generate binding for associated type
-- family default instances.
-- See Note [DeriveAnyClass and default family instances]
) }
getDataConFixityFun :: TyCon -> TcM (Name -> Fixity)
-- If the TyCon is locally defined, we want the local fixity env;
-- but if it is imported (which happens for standalone deriving)
-- we need to get the fixity env from the interface file
-- c.f. RnEnv.lookupFixity, and Trac #9830
getDataConFixityFun tc
= do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod name
then do { fix_env <- getFixityEnv
; return (lookupFixity fix_env) }
else do { iface <- loadInterfaceForName doc name
-- Should already be loaded!
; return (mi_fix iface . nameOccName) } }
where
name = tyConName tc
doc = text "Data con fixities for" <+> ppr name
{-
Note [Bindings for Generalised Newtype Deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class Eq a => C a where
f :: a -> a
newtype N a = MkN [a] deriving( C )
instance Eq (N a) where ...
The 'deriving C' clause generates, in effect
instance (C [a], Eq a) => C (N a) where
f = coerce (f :: [a] -> [a])
This generates a cast for each method, but allows the superclasse to
be worked out in the usual way. In this case the superclass (Eq (N
a)) will be solved by the explicit Eq (N a) instance. We do *not*
create the superclasses by casting the superclass dictionaries for the
representation type.
See the paper "Safe zero-cost coercions for Hsakell".
Note [DeriveAnyClass and default family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When a class has a associated type family with a default instance, e.g.:
class C a where
type T a
type T a = Char
then there are a couple of scenarios in which a user would expect T a to
default to Char. One is when an instance declaration for C is given without
an implementation for T:
instance C Int
Another scenario in which this can occur is when the -XDeriveAnyClass extension
is used:
data Example = Example deriving (C, Generic)
In the latter case, we must take care to check if C has any associated type
families with default instances, because -XDeriveAnyClass will never provide
an implementation for them. We "fill in" the default instances using the
tcATDefault function from TcClsDcl (which is also used in TcInstDcls to handle
the empty instance declaration case).
************************************************************************
* *
\subsection[TcDeriv-taggery-Names]{What con2tag/tag2con functions are available?}
* *
************************************************************************
-}
derivingNullaryErr :: MsgDoc
derivingNullaryErr = text "Cannot derive instances for nullary classes"
derivingKindErr :: TyCon -> Class -> [Type] -> Kind -> MsgDoc
derivingKindErr tc cls cls_tys cls_kind
= hang (text "Cannot derive well-kinded instance of form"
<+> quotes (pprClassPred cls cls_tys <+> parens (ppr tc <+> text "...")))
2 (text "Class" <+> quotes (ppr cls)
<+> text "expects an argument of kind" <+> quotes (pprKind cls_kind))
derivingEtaErr :: Class -> [Type] -> Type -> MsgDoc
derivingEtaErr cls cls_tys inst_ty
= sep [text "Cannot eta-reduce to an instance of form",
nest 2 (text "instance (...) =>"
<+> pprClassPred cls (cls_tys ++ [inst_ty]))]
derivingThingErr :: Bool -> Class -> [Type] -> Type -> MsgDoc -> MsgDoc
derivingThingErr newtype_deriving clas tys ty why
= sep [(hang (text "Can't make a derived instance of")
2 (quotes (ppr pred))
$$ nest 2 extra) <> colon,
nest 2 why]
where
extra | newtype_deriving = text "(even with cunning GeneralizedNewtypeDeriving)"
| otherwise = Outputable.empty
pred = mkClassPred clas (tys ++ [ty])
derivingHiddenErr :: TyCon -> SDoc
derivingHiddenErr tc
= hang (text "The data constructors of" <+> quotes (ppr tc) <+> ptext (sLit "are not all in scope"))
2 (text "so you cannot derive an instance for it")
standaloneCtxt :: LHsSigType Name -> SDoc
standaloneCtxt ty = hang (text "In the stand-alone deriving instance for")
2 (quotes (ppr ty))
derivInstCtxt :: PredType -> MsgDoc
derivInstCtxt pred
= text "When deriving the instance for" <+> parens (ppr pred)
| mcschroeder/ghc | compiler/typecheck/TcDeriv.hs | bsd-3-clause | 96,356 | 50 | 18 | 29,216 | 12,270 | 6,465 | 5,805 | -1 | -1 |
module Test where
import Data.List
main :: IO ()
main = return ()
f::(Ord b, Show b, Eq a) => b -> a
f = undefined
| sebastiaanvisser/ghc-goals | tests/Test.hs | bsd-3-clause | 120 | 0 | 6 | 31 | 64 | 35 | 29 | 6 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcSplice: Template Haskell splices
-}
{-# LANGUAGE CPP, FlexibleInstances, MagicHash, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TcSplice(
-- These functions are defined in stage1 and stage2
-- The raise civilised errors in stage1
tcSpliceExpr, tcTypedBracket, tcUntypedBracket,
-- runQuasiQuoteExpr, runQuasiQuotePat,
-- runQuasiQuoteDecl, runQuasiQuoteType,
runAnnotation,
#ifdef GHCI
-- These ones are defined only in stage2, and are
-- called only in stage2 (ie GHCI is on)
runMetaE, runMetaP, runMetaT, runMetaD, runQuasi,
tcTopSpliceExpr, lookupThName_maybe,
defaultRunMeta, runMeta'
#endif
) where
#include "HsVersions.h"
import HsSyn
import Annotations
import Name
import TcRnMonad
import TcType
#ifdef GHCI
import HscMain
-- These imports are the reason that TcSplice
-- is very high up the module hierarchy
import RnSplice( traceSplice, SpliceInfo(..) )
import RdrName
import HscTypes
import Convert
import RnExpr
import RnEnv
import RnTypes
import TcExpr
import TcHsSyn
import TcSimplify
import TcUnify
import Type
import Kind
import NameSet
import TcEnv
import TcMType
import TcHsType
import TcIface
import TypeRep
import FamInst
import FamInstEnv
import InstEnv
import NameEnv
import PrelNames
import OccName
import Hooks
import Var
import Module
import LoadIface
import Class
import Inst
import TyCon
import CoAxiom
import PatSyn ( patSynName )
import ConLike
import DataCon
import TcEvidence( TcEvBinds(..) )
import Id
import IdInfo
import DsExpr
import DsMonad
import Serialized
import ErrUtils
import SrcLoc
import Util
import Data.List ( mapAccumL )
import Unique
import VarSet ( isEmptyVarSet )
import Data.Maybe
import BasicTypes hiding( SuccessFlag(..) )
import Maybes( MaybeErr(..) )
import DynFlags
import Panic
import Lexeme
import FastString
import Outputable
import DsMeta
import qualified Language.Haskell.TH as TH
-- THSyntax gives access to internal functions and data types
import qualified Language.Haskell.TH.Syntax as TH
-- Because GHC.Desugar might not be in the base library of the bootstrapping compiler
import GHC.Desugar ( AnnotationWrapper(..) )
import qualified Data.Map as Map
import Data.Dynamic ( fromDynamic, toDyn )
import Data.Typeable ( typeOf )
import Data.Data (Data)
import GHC.Exts ( unsafeCoerce# )
#endif
{-
************************************************************************
* *
\subsection{Main interface + stubs for the non-GHCI case
* *
************************************************************************
-}
tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcSpliceExpr :: HsSplice Name -> TcRhoType -> TcM (HsExpr TcId)
-- None of these functions add constraints to the LIE
-- runQuasiQuoteExpr :: HsQuasiQuote RdrName -> RnM (LHsExpr RdrName)
-- runQuasiQuotePat :: HsQuasiQuote RdrName -> RnM (LPat RdrName)
-- runQuasiQuoteType :: HsQuasiQuote RdrName -> RnM (LHsType RdrName)
-- runQuasiQuoteDecl :: HsQuasiQuote RdrName -> RnM [LHsDecl RdrName]
runAnnotation :: CoreAnnTarget -> LHsExpr Name -> TcM Annotation
#ifndef GHCI
tcTypedBracket x _ = failTH x "Template Haskell bracket"
tcUntypedBracket x _ _ = failTH x "Template Haskell bracket"
tcSpliceExpr e _ = failTH e "Template Haskell splice"
-- runQuasiQuoteExpr q = failTH q "quasiquote"
-- runQuasiQuotePat q = failTH q "pattern quasiquote"
-- runQuasiQuoteType q = failTH q "type quasiquote"
-- runQuasiQuoteDecl q = failTH q "declaration quasiquote"
runAnnotation _ q = failTH q "annotation"
#else
-- The whole of the rest of the file is the else-branch (ie stage2 only)
{-
Note [How top-level splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Top-level splices (those not inside a [| .. |] quotation bracket) are handled
very straightforwardly:
1. tcTopSpliceExpr: typecheck the body e of the splice $(e)
2. runMetaT: desugar, compile, run it, and convert result back to
HsSyn RdrName (of the appropriate flavour, eg HsType RdrName,
HsExpr RdrName etc)
3. treat the result as if that's what you saw in the first place
e.g for HsType, rename and kind-check
for HsExpr, rename and type-check
(The last step is different for decls, because they can *only* be
top-level: we return the result of step 2.)
Note [How brackets and nested splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nested splices (those inside a [| .. |] quotation bracket),
are treated quite differently.
Remember, there are two forms of bracket
typed [|| e ||]
and untyped [| e |]
The life cycle of a typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s RnPendingTyped)
* Rename the body
* Result is still a HsBracket
* When typechecking:
* Set the ThStage to (Brack s (TcPending ps_var lie_var))
* Typecheck the body, and throw away the elaborated result
* Nested splices (which must be typed) are typechecked, and
the results accumulated in ps_var; their constraints
accumulate in lie_var
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
The life cycle of a un-typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s (RnPendingUntyped ps_var))
* Rename the body
* Nested splices (which must be untyped) are renamed, and the
results accumulated in ps_var
* Result is still (HsRnBracketOut rn_body pending_splices)
* When typechecking a HsRnBracketOut
* Typecheck the pending_splices individually
* Ignore the body of the bracket; just check that the context
expects a bracket of that type (e.g. a [p| pat |] bracket should
be in a context needing a (Q Pat)
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
In both cases, desugaring happens like this:
* HsTcBracketOut is desugared by DsMeta.dsBracket. It
a) Extends the ds_meta environment with the PendingSplices
attached to the bracket
b) Converts the quoted (HsExpr Name) to a CoreExpr that, when
run, will produce a suitable TH expression/type/decl. This
is why we leave the *renamed* expression attached to the bracket:
the quoted expression should not be decorated with all the goop
added by the type checker
* Each splice carries a unique Name, called a "splice point", thus
${n}(e). The name is initialised to an (Unqual "splice") when the
splice is created; the renamer gives it a unique.
* When DsMeta (used to desugar the body of the bracket) comes across
a splice, it looks up the splice's Name, n, in the ds_meta envt,
to find an (HsExpr Id) that should be substituted for the splice;
it just desugars it to get a CoreExpr (DsMeta.repSplice).
Example:
Source: f = [| Just $(g 3) |]
The [| |] part is a HsBracket
Typechecked: f = [| Just ${s7}(g 3) |]{s7 = g Int 3}
The [| |] part is a HsBracketOut, containing *renamed*
(not typechecked) expression
The "s7" is the "splice point"; the (g Int 3) part
is a typechecked expression
Desugared: f = do { s7 <- g Int 3
; return (ConE "Data.Maybe.Just" s7) }
Note [Template Haskell state diagram]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here are the ThStages, s, their corresponding level numbers
(the result of (thLevel s)), and their state transitions.
The top level of the program is stage Comp:
Start here
|
V
----------- $ ------------ $
| Comp | ---------> | Splice | -----|
| 1 | | 0 | <----|
----------- ------------
^ | ^ |
$ | | [||] $ | | [||]
| v | v
-------------- ----------------
| Brack Comp | | Brack Splice |
| 2 | | 1 |
-------------- ----------------
* Normal top-level declarations start in state Comp
(which has level 1).
Annotations start in state Splice, since they are
treated very like a splice (only without a '$')
* Code compiled in state Splice (and only such code)
will be *run at compile time*, with the result replacing
the splice
* The original paper used level -1 instead of 0, etc.
* The original paper did not allow a splice within a
splice, but there is no reason not to. This is the
$ transition in the top right.
Note [Template Haskell levels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Imported things are impLevel (= 0)
* However things at level 0 are not *necessarily* imported.
eg $( \b -> ... ) here b is bound at level 0
* In GHCi, variables bound by a previous command are treated
as impLevel, because we have bytecode for them.
* Variables are bound at the "current level"
* The current level starts off at outerLevel (= 1)
* The level is decremented by splicing $(..)
incremented by brackets [| |]
incremented by name-quoting 'f
When a variable is used, we compare
bind: binding level, and
use: current level at usage site
Generally
bind > use Always error (bound later than used)
[| \x -> $(f x) |]
bind = use Always OK (bound same stage as used)
[| \x -> $(f [| x |]) |]
bind < use Inside brackets, it depends
Inside splice, OK
Inside neither, OK
For (bind < use) inside brackets, there are three cases:
- Imported things OK f = [| map |]
- Top-level things OK g = [| f |]
- Non-top-level Only if there is a liftable instance
h = \(x:Int) -> [| x |]
To track top-level-ness we use the ThBindEnv in TcLclEnv
For example:
f = ...
g1 = $(map ...) is OK
g2 = $(f ...) is not OK; because we havn't compiled f yet
************************************************************************
* *
\subsection{Quoting an expression}
* *
************************************************************************
-}
-- See Note [How brackets and nested splices are handled]
-- tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcTypedBracket brack@(TExpBr expr) res_ty
= addErrCtxt (quotationCtxtDoc brack) $
do { cur_stage <- getStage
; ps_ref <- newMutVar []
; lie_var <- getConstraintVar -- Any constraints arising from nested splices
-- should get thrown into the constraint set
-- from outside the bracket
-- Typecheck expr to make sure it is valid,
-- Throw away the typechecked expression but return its type.
-- We'll typecheck it again when we splice it in somewhere
; (_tc_expr, expr_ty) <- setStage (Brack cur_stage (TcPending ps_ref lie_var)) $
tcInferRhoNC expr
-- NC for no context; tcBracket does that
; meta_ty <- tcTExpTy expr_ty
; co <- unifyType meta_ty res_ty
; ps' <- readMutVar ps_ref
; texpco <- tcLookupId unsafeTExpCoerceName
; return (mkHsWrapCo co (unLoc (mkHsApp (nlHsTyApp texpco [expr_ty])
(noLoc (HsTcBracketOut brack ps'))))) }
tcTypedBracket other_brack _
= pprPanic "tcTypedBracket" (ppr other_brack)
-- tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket brack ps res_ty
= do { traceTc "tc_bracket untyped" (ppr brack $$ ppr ps)
; ps' <- mapM tcPendingSplice ps
; meta_ty <- tcBrackTy brack
; co <- unifyType meta_ty res_ty
; traceTc "tc_bracket done untyped" (ppr meta_ty)
; return (mkHsWrapCo co (HsTcBracketOut brack ps')) }
---------------
tcBrackTy :: HsBracket Name -> TcM TcType
tcBrackTy (VarBr _ _) = tcMetaTy nameTyConName -- Result type is Var (not Q-monadic)
tcBrackTy (ExpBr _) = tcMetaTy expQTyConName -- Result type is ExpQ (= Q Exp)
tcBrackTy (TypBr _) = tcMetaTy typeQTyConName -- Result type is Type (= Q Typ)
tcBrackTy (DecBrG _) = tcMetaTy decsQTyConName -- Result type is Q [Dec]
tcBrackTy (PatBr _) = tcMetaTy patQTyConName -- Result type is PatQ (= Q Pat)
tcBrackTy (DecBrL _) = panic "tcBrackTy: Unexpected DecBrL"
tcBrackTy (TExpBr _) = panic "tcUntypedBracket: Unexpected TExpBr"
---------------
tcPendingSplice :: PendingRnSplice -> TcM PendingTcSplice
tcPendingSplice (PendingRnSplice flavour splice_name expr)
= do { res_ty <- tcMetaTy meta_ty_name
; expr' <- tcMonoExpr expr res_ty
; return (PendingTcSplice splice_name expr') }
where
meta_ty_name = case flavour of
UntypedExpSplice -> expQTyConName
UntypedPatSplice -> patQTyConName
UntypedTypeSplice -> typeQTyConName
UntypedDeclSplice -> decsQTyConName
---------------
-- Takes a type tau and returns the type Q (TExp tau)
tcTExpTy :: TcType -> TcM TcType
tcTExpTy tau
= do { q <- tcLookupTyCon qTyConName
; texp <- tcLookupTyCon tExpTyConName
; return (mkTyConApp q [mkTyConApp texp [tau]]) }
{-
************************************************************************
* *
\subsection{Splicing an expression}
* *
************************************************************************
-}
tcSpliceExpr splice@(HsTypedSplice name expr) res_ty
= addErrCtxt (spliceCtxtDoc splice) $
setSrcSpan (getLoc expr) $ do
{ stage <- getStage
; case stage of
Splice {} -> tcTopSplice expr res_ty
Comp -> tcTopSplice expr res_ty
Brack pop_stage pend -> tcNestedSplice pop_stage pend name expr res_ty }
tcSpliceExpr splice _
= pprPanic "tcSpliceExpr" (ppr splice)
tcNestedSplice :: ThStage -> PendingStuff -> Name
-> LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
-- See Note [How brackets and nested splices are handled]
-- A splice inside brackets
tcNestedSplice pop_stage (TcPending ps_var lie_var) splice_name expr res_ty
= do { meta_exp_ty <- tcTExpTy res_ty
; expr' <- setStage pop_stage $
setConstraintVar lie_var $
tcMonoExpr expr meta_exp_ty
; untypeq <- tcLookupId unTypeQName
; let expr'' = mkHsApp (nlHsTyApp untypeq [res_ty]) expr'
; ps <- readMutVar ps_var
; writeMutVar ps_var (PendingTcSplice splice_name expr'' : ps)
-- The returned expression is ignored; it's in the pending splices
; return (panic "tcSpliceExpr") }
tcNestedSplice _ _ splice_name _ _
= pprPanic "tcNestedSplice: rename stage found" (ppr splice_name)
tcTopSplice :: LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
tcTopSplice expr res_ty
= do { -- Typecheck the expression,
-- making sure it has type Q (T res_ty)
meta_exp_ty <- tcTExpTy res_ty
; zonked_q_expr <- tcTopSpliceExpr True $
tcMonoExpr expr meta_exp_ty
-- Run the expression
; expr2 <- runMetaE zonked_q_expr
; traceSplice (SpliceInfo { spliceDescription = "expression"
, spliceIsDecl = False
, spliceSource = Just expr
, spliceGenerated = ppr expr2 })
-- Rename and typecheck the spliced-in expression,
-- making sure it has type res_ty
-- These steps should never fail; this is a *typed* splice
; addErrCtxt (spliceResultDoc expr) $ do
{ (exp3, _fvs) <- rnLExpr expr2
; exp4 <- tcMonoExpr exp3 res_ty
; return (unLoc exp4) } }
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
quotationCtxtDoc :: HsBracket Name -> SDoc
quotationCtxtDoc br_body
= hang (ptext (sLit "In the Template Haskell quotation"))
2 (ppr br_body)
spliceCtxtDoc :: HsSplice Name -> SDoc
spliceCtxtDoc splice
= hang (ptext (sLit "In the Template Haskell splice"))
2 (pprSplice splice)
spliceResultDoc :: LHsExpr Name -> SDoc
spliceResultDoc expr
= sep [ ptext (sLit "In the result of the splice:")
, nest 2 (char '$' <> pprParendExpr expr)
, ptext (sLit "To see what the splice expanded to, use -ddump-splices")]
-------------------
tcTopSpliceExpr :: Bool -> TcM (LHsExpr Id) -> TcM (LHsExpr Id)
-- Note [How top-level splices are handled]
-- Type check an expression that is the body of a top-level splice
-- (the caller will compile and run it)
-- Note that set the level to Splice, regardless of the original level,
-- before typechecking the expression. For example:
-- f x = $( ...$(g 3) ... )
-- The recursive call to tcMonoExpr will simply expand the
-- inner escape before dealing with the outer one
tcTopSpliceExpr isTypedSplice tc_action
= checkNoErrs $ -- checkNoErrs: must not try to run the thing
-- if the type checker fails!
unsetGOptM Opt_DeferTypeErrors $
-- Don't defer type errors. Not only are we
-- going to run this code, but we do an unsafe
-- coerce, so we get a seg-fault if, say we
-- splice a type into a place where an expression
-- is expected (Trac #7276)
setStage (Splice isTypedSplice) $
do { -- Typecheck the expression
(expr', lie) <- captureConstraints tc_action
-- Solve the constraints
; const_binds <- simplifyTop lie
-- Zonk it and tie the knot of dictionary bindings
; zonkTopLExpr (mkHsDictLet (EvBinds const_binds) expr') }
{-
************************************************************************
* *
Annotations
* *
************************************************************************
-}
runAnnotation target expr = do
-- Find the classes we want instances for in order to call toAnnotationWrapper
loc <- getSrcSpanM
data_class <- tcLookupClass dataClassName
to_annotation_wrapper_id <- tcLookupId toAnnotationWrapperName
-- Check the instances we require live in another module (we want to execute it..)
-- and check identifiers live in other modules using TH stage checks. tcSimplifyStagedExpr
-- also resolves the LIE constraints to detect e.g. instance ambiguity
zonked_wrapped_expr' <- tcTopSpliceExpr False $
do { (expr', expr_ty) <- tcInferRhoNC expr
-- We manually wrap the typechecked expression in a call to toAnnotationWrapper
-- By instantiating the call >here< it gets registered in the
-- LIE consulted by tcTopSpliceExpr
-- and hence ensures the appropriate dictionary is bound by const_binds
; wrapper <- instCall AnnOrigin [expr_ty] [mkClassPred data_class [expr_ty]]
; let specialised_to_annotation_wrapper_expr
= L loc (HsWrap wrapper (HsVar to_annotation_wrapper_id))
; return (L loc (HsApp specialised_to_annotation_wrapper_expr expr')) }
-- Run the appropriately wrapped expression to get the value of
-- the annotation and its dictionaries. The return value is of
-- type AnnotationWrapper by construction, so this conversion is
-- safe
serialized <- runMetaAW zonked_wrapped_expr'
return Annotation {
ann_target = target,
ann_value = serialized
}
convertAnnotationWrapper :: AnnotationWrapper -> Either MsgDoc Serialized
convertAnnotationWrapper annotation_wrapper = Right $
case annotation_wrapper of
AnnotationWrapper value | let serialized = toSerialized serializeWithData value ->
-- Got the value and dictionaries: build the serialized value and
-- call it a day. We ensure that we seq the entire serialized value
-- in order that any errors in the user-written code for the
-- annotation are exposed at this point. This is also why we are
-- doing all this stuff inside the context of runMeta: it has the
-- facilities to deal with user error in a meta-level expression
seqSerialized serialized `seq` serialized
{-
************************************************************************
* *
\subsection{Running an expression}
* *
************************************************************************
-}
runQuasi :: TH.Q a -> TcM a
runQuasi act = TH.runQ act
runQResult :: (a -> String) -> (SrcSpan -> a -> b) -> SrcSpan -> TH.Q a -> TcM b
runQResult show_th f expr_span hval
= do { th_result <- TH.runQ hval
; traceTc "Got TH result:" (text (show_th th_result))
; return (f expr_span th_result) }
-----------------
runMeta :: (MetaHook TcM -> LHsExpr Id -> TcM hs_syn)
-> LHsExpr Id
-> TcM hs_syn
runMeta unwrap e
= do { h <- getHooked runMetaHook defaultRunMeta
; unwrap h e }
defaultRunMeta :: MetaHook TcM
defaultRunMeta (MetaE r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsExpr)
defaultRunMeta (MetaP r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToPat)
defaultRunMeta (MetaT r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsType)
defaultRunMeta (MetaD r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsDecls)
defaultRunMeta (MetaAW r)
= fmap r . runMeta' False (const empty) (const (return . convertAnnotationWrapper))
-- We turn off showing the code in meta-level exceptions because doing so exposes
-- the toAnnotationWrapper function that we slap around the users code
----------------
runMetaAW :: LHsExpr Id -- Of type AnnotationWrapper
-> TcM Serialized
runMetaAW = runMeta metaRequestAW
runMetaE :: LHsExpr Id -- Of type (Q Exp)
-> TcM (LHsExpr RdrName)
runMetaE = runMeta metaRequestE
runMetaP :: LHsExpr Id -- Of type (Q Pat)
-> TcM (LPat RdrName)
runMetaP = runMeta metaRequestP
runMetaT :: LHsExpr Id -- Of type (Q Type)
-> TcM (LHsType RdrName)
runMetaT = runMeta metaRequestT
runMetaD :: LHsExpr Id -- Of type Q [Dec]
-> TcM [LHsDecl RdrName]
runMetaD = runMeta metaRequestD
---------------
runMeta' :: Bool -- Whether code should be printed in the exception message
-> (hs_syn -> SDoc) -- how to print the code
-> (SrcSpan -> x -> TcM (Either MsgDoc hs_syn)) -- How to run x
-> LHsExpr Id -- Of type x; typically x = Q TH.Exp, or something like that
-> TcM hs_syn -- Of type t
runMeta' show_code ppr_hs run_and_convert expr
= do { traceTc "About to run" (ppr expr)
; recordThSpliceUse -- seems to be the best place to do this,
-- we catch all kinds of splices and annotations.
-- Check that we've had no errors of any sort so far.
-- For example, if we found an error in an earlier defn f, but
-- recovered giving it type f :: forall a.a, it'd be very dodgy
-- to carry ont. Mind you, the staging restrictions mean we won't
-- actually run f, but it still seems wrong. And, more concretely,
-- see Trac #5358 for an example that fell over when trying to
-- reify a function with a "?" kind in it. (These don't occur
-- in type-correct programs.
; failIfErrsM
-- Desugar
; ds_expr <- initDsTc (dsLExpr expr)
-- Compile and link it; might fail if linking fails
; hsc_env <- getTopEnv
; src_span <- getSrcSpanM
; traceTc "About to run (desugared)" (ppr ds_expr)
; either_hval <- tryM $ liftIO $
HscMain.hscCompileCoreExpr hsc_env src_span ds_expr
; case either_hval of {
Left exn -> fail_with_exn "compile and link" exn ;
Right hval -> do
{ -- Coerce it to Q t, and run it
-- Running might fail if it throws an exception of any kind (hence tryAllM)
-- including, say, a pattern-match exception in the code we are running
--
-- We also do the TH -> HS syntax conversion inside the same
-- exception-cacthing thing so that if there are any lurking
-- exceptions in the data structure returned by hval, we'll
-- encounter them inside the try
--
-- See Note [Exceptions in TH]
let expr_span = getLoc expr
; either_tval <- tryAllM $
setSrcSpan expr_span $ -- Set the span so that qLocation can
-- see where this splice is
do { mb_result <- run_and_convert expr_span (unsafeCoerce# hval)
; case mb_result of
Left err -> failWithTc err
Right result -> do { traceTc "Got HsSyn result:" (ppr_hs result)
; return $! result } }
; case either_tval of
Right v -> return v
Left se -> case fromException se of
Just IOEnvFailure -> failM -- Error already in Tc monad
_ -> fail_with_exn "run" se -- Exception
}}}
where
-- see Note [Concealed TH exceptions]
fail_with_exn phase exn = do
exn_msg <- liftIO $ Panic.safeShowException exn
let msg = vcat [text "Exception when trying to" <+> text phase <+> text "compile-time code:",
nest 2 (text exn_msg),
if show_code then text "Code:" <+> ppr expr else empty]
failWithTc msg
{-
Note [Exceptions in TH]
~~~~~~~~~~~~~~~~~~~~~~~
Supppose we have something like this
$( f 4 )
where
f :: Int -> Q [Dec]
f n | n>3 = fail "Too many declarations"
| otherwise = ...
The 'fail' is a user-generated failure, and should be displayed as a
perfectly ordinary compiler error message, not a panic or anything
like that. Here's how it's processed:
* 'fail' is the monad fail. The monad instance for Q in TH.Syntax
effectively transforms (fail s) to
qReport True s >> fail
where 'qReport' comes from the Quasi class and fail from its monad
superclass.
* The TcM monad is an instance of Quasi (see TcSplice), and it implements
(qReport True s) by using addErr to add an error message to the bag of errors.
The 'fail' in TcM raises an IOEnvFailure exception
* 'qReport' forces the message to ensure any exception hidden in unevaluated
thunk doesn't get into the bag of errors. Otherwise the following splice
will triger panic (Trac #8987):
$(fail undefined)
See also Note [Concealed TH exceptions]
* So, when running a splice, we catch all exceptions; then for
- an IOEnvFailure exception, we assume the error is already
in the error-bag (above)
- other errors, we add an error to the bag
and then fail
Note [Concealed TH exceptions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When displaying the error message contained in an exception originated from TH
code, we need to make sure that the error message itself does not contain an
exception. For example, when executing the following splice:
$( error ("foo " ++ error "bar") )
the message for the outer exception is a thunk which will throw the inner
exception when evaluated.
For this reason, we display the message of a TH exception using the
'safeShowException' function, which recursively catches any exception thrown
when showing an error message.
To call runQ in the Tc monad, we need to make TcM an instance of Quasi:
-}
instance TH.Quasi (IOEnv (Env TcGblEnv TcLclEnv)) where
qNewName s = do { u <- newUnique
; let i = getKey u
; return (TH.mkNameU s i) }
-- 'msg' is forced to ensure exceptions don't escape,
-- see Note [Exceptions in TH]
qReport True msg = seqList msg $ addErr (text msg)
qReport False msg = seqList msg $ addWarn (text msg)
qLocation = do { m <- getModule
; l <- getSrcSpanM
; r <- case l of
UnhelpfulSpan _ -> pprPanic "qLocation: Unhelpful location"
(ppr l)
RealSrcSpan s -> return s
; return (TH.Loc { TH.loc_filename = unpackFS (srcSpanFile r)
, TH.loc_module = moduleNameString (moduleName m)
, TH.loc_package = packageKeyString (modulePackageKey m)
, TH.loc_start = (srcSpanStartLine r, srcSpanStartCol r)
, TH.loc_end = (srcSpanEndLine r, srcSpanEndCol r) }) }
qLookupName = lookupName
qReify = reify
qReifyInstances = reifyInstances
qReifyRoles = reifyRoles
qReifyAnnotations = reifyAnnotations
qReifyModule = reifyModule
-- For qRecover, discard error messages if
-- the recovery action is chosen. Otherwise
-- we'll only fail higher up. c.f. tryTcLIE_
qRecover recover main = do { (msgs, mb_res) <- tryTcErrs main
; case mb_res of
Just val -> do { addMessages msgs -- There might be warnings
; return val }
Nothing -> recover -- Discard all msgs
}
qRunIO io = liftIO io
qAddDependentFile fp = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fp:dep_files)
qAddTopDecls thds = do
l <- getSrcSpanM
let either_hval = convertToHsDecls l thds
ds <- case either_hval of
Left exn -> pprPanic "qAddTopDecls: can't convert top-level declarations" exn
Right ds -> return ds
mapM_ (checkTopDecl . unLoc) ds
th_topdecls_var <- fmap tcg_th_topdecls getGblEnv
updTcRef th_topdecls_var (\topds -> ds ++ topds)
where
checkTopDecl :: HsDecl RdrName -> TcM ()
checkTopDecl (ValD binds)
= mapM_ bindName (collectHsBindBinders binds)
checkTopDecl (SigD _)
= return ()
checkTopDecl (ForD (ForeignImport (L _ name) _ _ _))
= bindName name
checkTopDecl _
= addErr $ text "Only function, value, and foreign import declarations may be added with addTopDecl"
bindName :: RdrName -> TcM ()
bindName (Exact n)
= do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; updTcRef th_topnames_var (\ns -> extendNameSet ns n)
}
bindName name =
addErr $
hang (ptext (sLit "The binder") <+> quotes (ppr name) <+> ptext (sLit "is not a NameU."))
2 (text "Probable cause: you used mkName instead of newName to generate a binding.")
qAddModFinalizer fin = do
th_modfinalizers_var <- fmap tcg_th_modfinalizers getGblEnv
updTcRef th_modfinalizers_var (\fins -> fin:fins)
qGetQ = do
th_state_var <- fmap tcg_th_state getGblEnv
th_state <- readTcRef th_state_var
let x = Map.lookup (typeOf x) th_state >>= fromDynamic
return x
qPutQ x = do
th_state_var <- fmap tcg_th_state getGblEnv
updTcRef th_state_var (\m -> Map.insert (typeOf x) (toDyn x) m)
{-
************************************************************************
* *
Instance Testing
* *
************************************************************************
-}
reifyInstances :: TH.Name -> [TH.Type] -> TcM [TH.Dec]
reifyInstances th_nm th_tys
= addErrCtxt (ptext (sLit "In the argument of reifyInstances:")
<+> ppr_th th_nm <+> sep (map ppr_th th_tys)) $
do { loc <- getSrcSpanM
; rdr_ty <- cvt loc (mkThAppTs (TH.ConT th_nm) th_tys)
-- #9262 says to bring vars into scope, like in HsForAllTy case
-- of rnHsTyKi
; let (kvs, tvs) = extractHsTyRdrTyVars rdr_ty
tv_bndrs = userHsTyVarBndrs loc tvs
hs_tvbs = mkHsQTvs tv_bndrs
-- Rename to HsType Name
; ((rn_tvbs, rn_ty), _fvs)
<- bindHsTyVars doc Nothing kvs hs_tvbs $ \ rn_tvbs ->
do { (rn_ty, fvs) <- rnLHsType doc rdr_ty
; return ((rn_tvbs, rn_ty), fvs) }
; (ty, _kind) <- tcHsTyVarBndrs rn_tvbs $ \ _tvs ->
tcLHsType rn_ty
; ty <- zonkTcTypeToType emptyZonkEnv ty
-- Substitute out the meta type variables
-- In particular, the type might have kind
-- variables inside it (Trac #7477)
; traceTc "reifyInstances" (ppr ty $$ ppr (typeKind ty))
; case splitTyConApp_maybe ty of -- This expands any type synonyms
Just (tc, tys) -- See Trac #7910
| Just cls <- tyConClass_maybe tc
-> do { inst_envs <- tcGetInstEnvs
; let (matches, unifies, _) = lookupInstEnv inst_envs cls tys
; traceTc "reifyInstances1" (ppr matches)
; reifyClassInstances cls (map fst matches ++ unifies) }
| isOpenFamilyTyCon tc
-> do { inst_envs <- tcGetFamInstEnvs
; let matches = lookupFamInstEnv inst_envs tc tys
; traceTc "reifyInstances2" (ppr matches)
; reifyFamilyInstances tc (map fim_instance matches) }
_ -> bale_out (hang (ptext (sLit "reifyInstances:") <+> quotes (ppr ty))
2 (ptext (sLit "is not a class constraint or type family application"))) }
where
doc = ClassInstanceCtx
bale_out msg = failWithTc msg
cvt :: SrcSpan -> TH.Type -> TcM (LHsType RdrName)
cvt loc th_ty = case convertToHsType loc th_ty of
Left msg -> failWithTc msg
Right ty -> return ty
{-
************************************************************************
* *
Reification
* *
************************************************************************
-}
lookupName :: Bool -- True <=> type namespace
-- False <=> value namespace
-> String -> TcM (Maybe TH.Name)
lookupName is_type_name s
= do { lcl_env <- getLocalRdrEnv
; case lookupLocalRdrEnv lcl_env rdr_name of
Just n -> return (Just (reifyName n))
Nothing -> do { mb_nm <- lookupGlobalOccRn_maybe rdr_name
; return (fmap reifyName mb_nm) } }
where
th_name = TH.mkName s -- Parses M.x into a base of 'x' and a module of 'M'
occ_fs :: FastString
occ_fs = mkFastString (TH.nameBase th_name)
occ :: OccName
occ | is_type_name
= if isLexCon occ_fs then mkTcOccFS occ_fs
else mkTyVarOccFS occ_fs
| otherwise
= if isLexCon occ_fs then mkDataOccFS occ_fs
else mkVarOccFS occ_fs
rdr_name = case TH.nameModule th_name of
Nothing -> mkRdrUnqual occ
Just mod -> mkRdrQual (mkModuleName mod) occ
getThing :: TH.Name -> TcM TcTyThing
getThing th_name
= do { name <- lookupThName th_name
; traceIf (text "reify" <+> text (show th_name) <+> brackets (ppr_ns th_name) <+> ppr name)
; tcLookupTh name }
-- ToDo: this tcLookup could fail, which would give a
-- rather unhelpful error message
where
ppr_ns (TH.Name _ (TH.NameG TH.DataName _pkg _mod)) = text "data"
ppr_ns (TH.Name _ (TH.NameG TH.TcClsName _pkg _mod)) = text "tc"
ppr_ns (TH.Name _ (TH.NameG TH.VarName _pkg _mod)) = text "var"
ppr_ns _ = panic "reify/ppr_ns"
reify :: TH.Name -> TcM TH.Info
reify th_name
= do { traceTc "reify 1" (text (TH.showName th_name))
; thing <- getThing th_name
; traceTc "reify 2" (ppr thing)
; reifyThing thing }
lookupThName :: TH.Name -> TcM Name
lookupThName th_name = do
mb_name <- lookupThName_maybe th_name
case mb_name of
Nothing -> failWithTc (notInScope th_name)
Just name -> return name
lookupThName_maybe :: TH.Name -> TcM (Maybe Name)
lookupThName_maybe th_name
= do { names <- mapMaybeM lookup (thRdrNameGuesses th_name)
-- Pick the first that works
-- E.g. reify (mkName "A") will pick the class A in preference to the data constructor A
; return (listToMaybe names) }
where
lookup rdr_name
= do { -- Repeat much of lookupOccRn, becase we want
-- to report errors in a TH-relevant way
; rdr_env <- getLocalRdrEnv
; case lookupLocalRdrEnv rdr_env rdr_name of
Just name -> return (Just name)
Nothing -> lookupGlobalOccRn_maybe rdr_name }
tcLookupTh :: Name -> TcM TcTyThing
-- This is a specialised version of TcEnv.tcLookup; specialised mainly in that
-- it gives a reify-related error message on failure, whereas in the normal
-- tcLookup, failure is a bug.
tcLookupTh name
= do { (gbl_env, lcl_env) <- getEnvs
; case lookupNameEnv (tcl_env lcl_env) name of {
Just thing -> return thing;
Nothing ->
case lookupNameEnv (tcg_type_env gbl_env) name of {
Just thing -> return (AGlobal thing);
Nothing ->
if nameIsLocalOrFrom (tcg_mod gbl_env) name
then -- It's defined in this module
failWithTc (notInEnv name)
else
do { mb_thing <- tcLookupImported_maybe name
; case mb_thing of
Succeeded thing -> return (AGlobal thing)
Failed msg -> failWithTc msg
}}}}
notInScope :: TH.Name -> SDoc
notInScope th_name = quotes (text (TH.pprint th_name)) <+>
ptext (sLit "is not in scope at a reify")
-- Ugh! Rather an indirect way to display the name
notInEnv :: Name -> SDoc
notInEnv name = quotes (ppr name) <+>
ptext (sLit "is not in the type environment at a reify")
------------------------------
reifyRoles :: TH.Name -> TcM [TH.Role]
reifyRoles th_name
= do { thing <- getThing th_name
; case thing of
AGlobal (ATyCon tc) -> return (map reify_role (tyConRoles tc))
_ -> failWithTc (ptext (sLit "No roles associated with") <+> (ppr thing))
}
where
reify_role Nominal = TH.NominalR
reify_role Representational = TH.RepresentationalR
reify_role Phantom = TH.PhantomR
------------------------------
reifyThing :: TcTyThing -> TcM TH.Info
-- The only reason this is monadic is for error reporting,
-- which in turn is mainly for the case when TH can't express
-- some random GHC extension
reifyThing (AGlobal (AnId id))
= do { ty <- reifyType (idType id)
; fix <- reifyFixity (idName id)
; let v = reifyName id
; case idDetails id of
ClassOpId cls -> return (TH.ClassOpI v ty (reifyName cls) fix)
_ -> return (TH.VarI v ty Nothing fix)
}
reifyThing (AGlobal (ATyCon tc)) = reifyTyCon tc
reifyThing (AGlobal (AConLike (RealDataCon dc)))
= do { let name = dataConName dc
; ty <- reifyType (idType (dataConWrapId dc))
; fix <- reifyFixity name
; return (TH.DataConI (reifyName name) ty
(reifyName (dataConOrigTyCon dc)) fix)
}
reifyThing (AGlobal (AConLike (PatSynCon ps)))
= noTH (sLit "pattern synonyms") (ppr $ patSynName ps)
reifyThing (ATcId {tct_id = id})
= do { ty1 <- zonkTcType (idType id) -- Make use of all the info we have, even
-- though it may be incomplete
; ty2 <- reifyType ty1
; fix <- reifyFixity (idName id)
; return (TH.VarI (reifyName id) ty2 Nothing fix) }
reifyThing (ATyVar tv tv1)
= do { ty1 <- zonkTcTyVar tv1
; ty2 <- reifyType ty1
; return (TH.TyVarI (reifyName tv) ty2) }
reifyThing thing = pprPanic "reifyThing" (pprTcTyThingCategory thing)
-------------------------------------------
reifyAxBranch :: CoAxBranch -> TcM TH.TySynEqn
reifyAxBranch (CoAxBranch { cab_lhs = args, cab_rhs = rhs })
-- remove kind patterns (#8884)
= do { args' <- mapM reifyType (filter (not . isKind) args)
; rhs' <- reifyType rhs
; return (TH.TySynEqn args' rhs') }
reifyTyCon :: TyCon -> TcM TH.Info
reifyTyCon tc
| Just cls <- tyConClass_maybe tc
= reifyClass cls
| isFunTyCon tc
= return (TH.PrimTyConI (reifyName tc) 2 False)
| isPrimTyCon tc
= return (TH.PrimTyConI (reifyName tc) (tyConArity tc) (isUnLiftedTyCon tc))
| isFamilyTyCon tc
= do { let tvs = tyConTyVars tc
kind = tyConKind tc
-- we need the *result kind* (see #8884)
(kvs, mono_kind) = splitForAllTys kind
-- tyConArity includes *kind* params
(_, res_kind) = splitKindFunTysN (tyConArity tc - length kvs)
mono_kind
; kind' <- fmap Just (reifyKind res_kind)
; tvs' <- reifyTyVars tvs
; flav' <- reifyFamFlavour tc
; case flav' of
{ Left flav -> -- open type/data family
do { fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc
(familyInstances fam_envs tc)
; return (TH.FamilyI
(TH.FamilyD flav (reifyName tc) tvs' kind')
instances) }
; Right eqns -> -- closed type family
return (TH.FamilyI
(TH.ClosedTypeFamilyD (reifyName tc) tvs' kind' eqns)
[]) } }
| Just (tvs, rhs) <- synTyConDefn_maybe tc -- Vanilla type synonym
= do { rhs' <- reifyType rhs
; tvs' <- reifyTyVars tvs
; return (TH.TyConI
(TH.TySynD (reifyName tc) tvs' rhs'))
}
| otherwise
= do { cxt <- reifyCxt (tyConStupidTheta tc)
; let tvs = tyConTyVars tc
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons tc)
; r_tvs <- reifyTyVars tvs
; let name = reifyName tc
deriv = [] -- Don't know about deriving
decl | isNewTyCon tc = TH.NewtypeD cxt name r_tvs (head cons) deriv
| otherwise = TH.DataD cxt name r_tvs cons deriv
; return (TH.TyConI decl) }
reifyDataCon :: [Type] -> DataCon -> TcM TH.Con
-- For GADTs etc, see Note [Reifying data constructors]
reifyDataCon tys dc
= do { let (tvs, theta, arg_tys, _) = dataConSig dc
subst = mkTopTvSubst (tvs `zip` tys) -- Dicard ex_tvs
(subst', ex_tvs') = mapAccumL substTyVarBndr subst (dropList tys tvs)
theta' = substTheta subst' theta
arg_tys' = substTys subst' arg_tys
stricts = map reifyStrict (dataConSrcBangs dc)
fields = dataConFieldLabels dc
name = reifyName dc
; r_arg_tys <- reifyTypes arg_tys'
; let main_con | not (null fields)
= TH.RecC name (zip3 (map reifyName fields) stricts r_arg_tys)
| dataConIsInfix dc
= ASSERT( length arg_tys == 2 )
TH.InfixC (s1,r_a1) name (s2,r_a2)
| otherwise
= TH.NormalC name (stricts `zip` r_arg_tys)
[r_a1, r_a2] = r_arg_tys
[s1, s2] = stricts
; ASSERT( length arg_tys == length stricts )
if null ex_tvs' && null theta then
return main_con
else do
{ cxt <- reifyCxt theta'
; ex_tvs'' <- reifyTyVars ex_tvs'
; return (TH.ForallC ex_tvs'' cxt main_con) } }
------------------------------
reifyClass :: Class -> TcM TH.Info
reifyClass cls
= do { cxt <- reifyCxt theta
; inst_envs <- tcGetInstEnvs
; insts <- reifyClassInstances cls (InstEnv.classInstances inst_envs cls)
; ops <- concatMapM reify_op op_stuff
; tvs' <- reifyTyVars tvs
; let dec = TH.ClassD cxt (reifyName cls) tvs' fds' ops
; return (TH.ClassI dec insts ) }
where
(tvs, fds, theta, _, _, op_stuff) = classExtraBigSig cls
fds' = map reifyFunDep fds
reify_op (op, def_meth)
= do { ty <- reifyType (idType op)
; let nm' = reifyName op
; case def_meth of
GenDefMeth gdm_nm ->
do { gdm_id <- tcLookupId gdm_nm
; gdm_ty <- reifyType (idType gdm_id)
; return [TH.SigD nm' ty, TH.DefaultSigD nm' gdm_ty] }
_ -> return [TH.SigD nm' ty] }
------------------------------
-- | Annotate (with TH.SigT) a type if the first parameter is True
-- and if the type contains a free variable.
-- This is used to annotate type patterns for poly-kinded tyvars in
-- reifying class and type instances. See #8953 and th/T8953.
annotThType :: Bool -- True <=> annotate
-> TypeRep.Type -> TH.Type -> TcM TH.Type
-- tiny optimization: if the type is annotated, don't annotate again.
annotThType _ _ th_ty@(TH.SigT {}) = return th_ty
annotThType True ty th_ty
| not $ isEmptyVarSet $ tyVarsOfType ty
= do { let ki = typeKind ty
; th_ki <- reifyKind ki
; return (TH.SigT th_ty th_ki) }
annotThType _ _ th_ty = return th_ty
-- | For every *type* variable (not *kind* variable) in the input,
-- report whether or not the tv is poly-kinded. This is used to eventually
-- feed into 'annotThType'.
mkIsPolyTvs :: [TyVar] -> [Bool]
mkIsPolyTvs tvs = [ is_poly_tv tv | tv <- tvs
, not (isKindVar tv) ]
where
is_poly_tv tv = not $ isEmptyVarSet $ tyVarsOfType $ tyVarKind tv
------------------------------
reifyClassInstances :: Class -> [ClsInst] -> TcM [TH.Dec]
reifyClassInstances cls insts
= mapM (reifyClassInstance (mkIsPolyTvs tvs)) insts
where
tvs = classTyVars cls
reifyClassInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> ClsInst -> TcM TH.Dec
reifyClassInstance is_poly_tvs i
= do { cxt <- reifyCxt theta
; let types_only = filterOut isKind types
; thtypes <- reifyTypes types_only
; annot_thtypes <- zipWith3M annotThType is_poly_tvs types_only thtypes
; let head_ty = mkThAppTs (TH.ConT (reifyName cls)) annot_thtypes
; return $ (TH.InstanceD cxt head_ty []) }
where
(_tvs, theta, cls, types) = tcSplitDFunTy (idType dfun)
dfun = instanceDFunId i
------------------------------
reifyFamilyInstances :: TyCon -> [FamInst] -> TcM [TH.Dec]
reifyFamilyInstances fam_tc fam_insts
= mapM (reifyFamilyInstance (mkIsPolyTvs fam_tvs)) fam_insts
where
fam_tvs = tyConTyVars fam_tc
reifyFamilyInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> FamInst -> TcM TH.Dec
reifyFamilyInstance is_poly_tvs (FamInst { fi_flavor = flavor
, fi_fam = fam
, fi_tys = lhs
, fi_rhs = rhs })
= case flavor of
SynFamilyInst ->
-- remove kind patterns (#8884)
do { let lhs_types_only = filterOut isKind lhs
; th_lhs <- reifyTypes lhs_types_only
; annot_th_lhs <- zipWith3M annotThType is_poly_tvs lhs_types_only
th_lhs
; th_rhs <- reifyType rhs
; return (TH.TySynInstD (reifyName fam)
(TH.TySynEqn annot_th_lhs th_rhs)) }
DataFamilyInst rep_tc ->
do { let tvs = tyConTyVars rep_tc
fam' = reifyName fam
-- eta-expand lhs types, because sometimes data/newtype
-- instances are eta-reduced; See Trac #9692
-- See Note [Eta reduction for data family axioms]
-- in TcInstDcls
(_rep_tc, rep_tc_args) = splitTyConApp rhs
etad_tyvars = dropList rep_tc_args tvs
eta_expanded_lhs = lhs `chkAppend` mkTyVarTys etad_tyvars
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons rep_tc)
; let types_only = filterOut isKind eta_expanded_lhs
; th_tys <- reifyTypes types_only
; annot_th_tys <- zipWith3M annotThType is_poly_tvs types_only th_tys
; return (if isNewTyCon rep_tc
then TH.NewtypeInstD [] fam' annot_th_tys (head cons) []
else TH.DataInstD [] fam' annot_th_tys cons []) }
------------------------------
reifyType :: TypeRep.Type -> TcM TH.Type
-- Monadic only because of failure
reifyType ty@(ForAllTy _ _) = reify_for_all ty
reifyType (LitTy t) = do { r <- reifyTyLit t; return (TH.LitT r) }
reifyType (TyVarTy tv) = return (TH.VarT (reifyName tv))
reifyType (TyConApp tc tys) = reify_tc_app tc tys -- Do not expand type synonyms here
reifyType (AppTy t1 t2) = do { [r1,r2] <- reifyTypes [t1,t2] ; return (r1 `TH.AppT` r2) }
reifyType ty@(FunTy t1 t2)
| isPredTy t1 = reify_for_all ty -- Types like ((?x::Int) => Char -> Char)
| otherwise = do { [r1,r2] <- reifyTypes [t1,t2] ; return (TH.ArrowT `TH.AppT` r1 `TH.AppT` r2) }
reify_for_all :: TypeRep.Type -> TcM TH.Type
reify_for_all ty
= do { cxt' <- reifyCxt cxt;
; tau' <- reifyType tau
; tvs' <- reifyTyVars tvs
; return (TH.ForallT tvs' cxt' tau') }
where
(tvs, cxt, tau) = tcSplitSigmaTy ty
reifyTyLit :: TypeRep.TyLit -> TcM TH.TyLit
reifyTyLit (NumTyLit n) = return (TH.NumTyLit n)
reifyTyLit (StrTyLit s) = return (TH.StrTyLit (unpackFS s))
reifyTypes :: [Type] -> TcM [TH.Type]
reifyTypes = mapM reifyType
reifyKind :: Kind -> TcM TH.Kind
reifyKind ki
= do { let (kis, ki') = splitKindFunTys ki
; ki'_rep <- reifyNonArrowKind ki'
; kis_rep <- mapM reifyKind kis
; return (foldr (TH.AppT . TH.AppT TH.ArrowT) ki'_rep kis_rep) }
where
reifyNonArrowKind k | isLiftedTypeKind k = return TH.StarT
| isConstraintKind k = return TH.ConstraintT
reifyNonArrowKind (TyVarTy v) = return (TH.VarT (reifyName v))
reifyNonArrowKind (ForAllTy _ k) = reifyKind k
reifyNonArrowKind (TyConApp kc kis) = reify_kc_app kc kis
reifyNonArrowKind (AppTy k1 k2) = do { k1' <- reifyKind k1
; k2' <- reifyKind k2
; return (TH.AppT k1' k2')
}
reifyNonArrowKind k = noTH (sLit "this kind") (ppr k)
reify_kc_app :: TyCon -> [TypeRep.Kind] -> TcM TH.Kind
reify_kc_app kc kis
= fmap (mkThAppTs r_kc) (mapM reifyKind kis)
where
r_kc | Just tc <- isPromotedTyCon_maybe kc
, isTupleTyCon tc = TH.TupleT (tyConArity kc)
| kc `hasKey` listTyConKey = TH.ListT
| otherwise = TH.ConT (reifyName kc)
reifyCxt :: [PredType] -> TcM [TH.Pred]
reifyCxt = mapM reifyPred
reifyFunDep :: ([TyVar], [TyVar]) -> TH.FunDep
reifyFunDep (xs, ys) = TH.FunDep (map reifyName xs) (map reifyName ys)
reifyFamFlavour :: TyCon -> TcM (Either TH.FamFlavour [TH.TySynEqn])
reifyFamFlavour tc
| isOpenTypeFamilyTyCon tc = return $ Left TH.TypeFam
| isDataFamilyTyCon tc = return $ Left TH.DataFam
-- this doesn't really handle abstract closed families, but let's not worry
-- about that now
| Just ax <- isClosedSynFamilyTyCon_maybe tc
= do { eqns <- brListMapM reifyAxBranch $ coAxiomBranches ax
; return $ Right eqns }
| otherwise
= panic "TcSplice.reifyFamFlavour: not a type family"
reifyTyVars :: [TyVar]
-> TcM [TH.TyVarBndr]
reifyTyVars tvs = mapM reify_tv $ filter isTypeVar tvs
where
-- even if the kind is *, we need to include a kind annotation,
-- in case a poly-kind would be inferred without the annotation.
-- See #8953 or test th/T8953
reify_tv tv = TH.KindedTV name <$> reifyKind kind
where
kind = tyVarKind tv
name = reifyName tv
{-
Note [Kind annotations on TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A poly-kinded tycon sometimes needs a kind annotation to be unambiguous.
For example:
type family F a :: k
type instance F Int = (Proxy :: * -> *)
type instance F Bool = (Proxy :: (* -> *) -> *)
It's hard to figure out where these annotations should appear, so we do this:
Suppose the tycon is applied to n arguments. We strip off the first n
arguments of the tycon's kind. If there are any variables left in the result
kind, we put on a kind annotation. But we must be slightly careful: it's
possible that the tycon's kind will have fewer than n arguments, in the case
that the concrete application instantiates a result kind variable with an
arrow kind. So, if we run out of arguments, we conservatively put on a kind
annotation anyway. This should be a rare case, indeed. Here is an example:
data T1 :: k1 -> k2 -> *
data T2 :: k1 -> k2 -> *
type family G (a :: k) :: k
type instance G T1 = T2
type instance F Char = (G T1 Bool :: (* -> *) -> *) -- F from above
Here G's kind is (forall k. k -> k), and the desugared RHS of that last
instance of F is (G (* -> (* -> *) -> *) (T1 * (* -> *)) Bool). According to
the algoritm above, there are 3 arguments to G so we should peel off 3
arguments in G's kind. But G's kind has only two arguments. This is the
rare special case, and we conservatively choose to put the annotation
in.
See #8953 and test th/T8953.
-}
reify_tc_app :: TyCon -> [TypeRep.Type] -> TcM TH.Type
reify_tc_app tc tys
= do { tys' <- reifyTypes (removeKinds tc_kind tys)
; maybe_sig_t (mkThAppTs r_tc tys') }
where
arity = tyConArity tc
tc_kind = tyConKind tc
r_tc | isTupleTyCon tc = if isPromotedDataCon tc
then TH.PromotedTupleT arity
else TH.TupleT arity
| tc `hasKey` listTyConKey = TH.ListT
| tc `hasKey` nilDataConKey = TH.PromotedNilT
| tc `hasKey` consDataConKey = TH.PromotedConsT
| tc `hasKey` eqTyConKey = TH.EqualityT
| otherwise = TH.ConT (reifyName tc)
-- See Note [Kind annotations on TyConApps]
maybe_sig_t th_type
| needs_kind_sig
= do { let full_kind = typeKind (mkTyConApp tc tys)
; th_full_kind <- reifyKind full_kind
; return (TH.SigT th_type th_full_kind) }
| otherwise
= return th_type
needs_kind_sig
| Just result_ki <- peel_off_n_args tc_kind (length tys)
= not $ isEmptyVarSet $ kiVarsOfKind result_ki
| otherwise
= True
peel_off_n_args :: Kind -> Arity -> Maybe Kind
peel_off_n_args k 0 = Just k
peel_off_n_args k n
| Just (_, res_k) <- splitForAllTy_maybe k
= peel_off_n_args res_k (n-1)
| Just (_, res_k) <- splitFunTy_maybe k
= peel_off_n_args res_k (n-1)
| otherwise
= Nothing
removeKinds :: Kind -> [TypeRep.Type] -> [TypeRep.Type]
removeKinds (FunTy k1 k2) (h:t)
| isSuperKind k1 = removeKinds k2 t
| otherwise = h : removeKinds k2 t
removeKinds (ForAllTy v k) (h:t)
| isSuperKind (varType v) = removeKinds k t
| otherwise = h : removeKinds k t
removeKinds _ tys = tys
reifyPred :: TypeRep.PredType -> TcM TH.Pred
reifyPred ty
-- We could reify the implicit paramter as a class but it seems
-- nicer to support them properly...
| isIPPred ty = noTH (sLit "implicit parameters") (ppr ty)
| otherwise = reifyType ty
------------------------------
reifyName :: NamedThing n => n -> TH.Name
reifyName thing
| isExternalName name = mk_varg pkg_str mod_str occ_str
| otherwise = TH.mkNameU occ_str (getKey (getUnique name))
-- Many of the things we reify have local bindings, and
-- NameL's aren't supposed to appear in binding positions, so
-- we use NameU. When/if we start to reify nested things, that
-- have free variables, we may need to generate NameL's for them.
where
name = getName thing
mod = ASSERT( isExternalName name ) nameModule name
pkg_str = packageKeyString (modulePackageKey mod)
mod_str = moduleNameString (moduleName mod)
occ_str = occNameString occ
occ = nameOccName name
mk_varg | OccName.isDataOcc occ = TH.mkNameG_d
| OccName.isVarOcc occ = TH.mkNameG_v
| OccName.isTcOcc occ = TH.mkNameG_tc
| otherwise = pprPanic "reifyName" (ppr name)
------------------------------
reifyFixity :: Name -> TcM TH.Fixity
reifyFixity name
= do { fix <- lookupFixityRn name
; return (conv_fix fix) }
where
conv_fix (BasicTypes.Fixity i d) = TH.Fixity i (conv_dir d)
conv_dir BasicTypes.InfixR = TH.InfixR
conv_dir BasicTypes.InfixL = TH.InfixL
conv_dir BasicTypes.InfixN = TH.InfixN
reifyStrict :: DataCon.HsSrcBang -> TH.Strict
reifyStrict HsNoBang = TH.NotStrict
reifyStrict (HsSrcBang _ _ False) = TH.NotStrict
reifyStrict (HsSrcBang _ (Just True) True) = TH.Unpacked
reifyStrict (HsSrcBang _ _ True) = TH.IsStrict
reifyStrict HsStrict = TH.IsStrict
reifyStrict (HsUnpack {}) = TH.Unpacked
------------------------------
lookupThAnnLookup :: TH.AnnLookup -> TcM CoreAnnTarget
lookupThAnnLookup (TH.AnnLookupName th_nm) = fmap NamedTarget (lookupThName th_nm)
lookupThAnnLookup (TH.AnnLookupModule (TH.Module pn mn))
= return $ ModuleTarget $
mkModule (stringToPackageKey $ TH.pkgString pn) (mkModuleName $ TH.modString mn)
reifyAnnotations :: Data a => TH.AnnLookup -> TcM [a]
reifyAnnotations th_name
= do { name <- lookupThAnnLookup th_name
; topEnv <- getTopEnv
; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing
; tcg <- getGblEnv
; let selectedEpsHptAnns = findAnns deserializeWithData epsHptAnns name
; let selectedTcgAnns = findAnns deserializeWithData (tcg_ann_env tcg) name
; return (selectedEpsHptAnns ++ selectedTcgAnns) }
------------------------------
modToTHMod :: Module -> TH.Module
modToTHMod m = TH.Module (TH.PkgName $ packageKeyString $ modulePackageKey m)
(TH.ModName $ moduleNameString $ moduleName m)
reifyModule :: TH.Module -> TcM TH.ModuleInfo
reifyModule (TH.Module (TH.PkgName pkgString) (TH.ModName mString)) = do
this_mod <- getModule
let reifMod = mkModule (stringToPackageKey pkgString) (mkModuleName mString)
if (reifMod == this_mod) then reifyThisModule else reifyFromIface reifMod
where
reifyThisModule = do
usages <- fmap (map modToTHMod . moduleEnvKeys . imp_mods) getImports
return $ TH.ModuleInfo usages
reifyFromIface reifMod = do
iface <- loadInterfaceForModule (ptext (sLit "reifying module from TH for") <+> ppr reifMod) reifMod
let usages = [modToTHMod m | usage <- mi_usages iface,
Just m <- [usageToModule (modulePackageKey reifMod) usage] ]
return $ TH.ModuleInfo usages
usageToModule :: PackageKey -> Usage -> Maybe Module
usageToModule _ (UsageFile {}) = Nothing
usageToModule this_pkg (UsageHomeModule { usg_mod_name = mn }) = Just $ mkModule this_pkg mn
usageToModule _ (UsagePackageModule { usg_mod = m }) = Just m
------------------------------
mkThAppTs :: TH.Type -> [TH.Type] -> TH.Type
mkThAppTs fun_ty arg_tys = foldl TH.AppT fun_ty arg_tys
noTH :: LitString -> SDoc -> TcM a
noTH s d = failWithTc (hsep [ptext (sLit "Can't represent") <+> ptext s <+>
ptext (sLit "in Template Haskell:"),
nest 2 d])
ppr_th :: TH.Ppr a => a -> SDoc
ppr_th x = text (TH.pprint x)
{-
Note [Reifying data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Template Haskell syntax is rich enough to express even GADTs,
provided we do so in the equality-predicate form. So a GADT
like
data T a where
MkT1 :: a -> T [a]
MkT2 :: T Int
will appear in TH syntax like this
data T a = forall b. (a ~ [b]) => MkT1 b
| (a ~ Int) => MkT2
-}
#endif /* GHCI */
| gcampax/ghc | compiler/typecheck/TcSplice.hs | bsd-3-clause | 63,028 | 0 | 10 | 19,324 | 571 | 359 | 212 | 18 | 1 |
module Main where
import System.Environment
import Server
main :: IO ()
main = do
args <- getArgs
if length args /= 1
then putStr help
else case args of
[cfg] -> startServer cfg
_ -> putStr help
-- | Help message
help :: String
help = "Students Big Brother Server v0.1.0 \n\
\Usage: \n\
\ students-big-brother-server <server_config.json>\n"
| geo2a/students-big-brother | students-big-brother-server/app/Main.hs | bsd-3-clause | 385 | 0 | 11 | 101 | 91 | 48 | 43 | 13 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module Client.CEntityT where
import Control.Lens (makeLenses)
import Linear (V3(..))
import Game.EntityStateT
import Types
makeLenses ''CEntityT
newCEntityT :: CEntityT
newCEntityT = CEntityT
{ _ceBaseline = newEntityStateT Nothing
, _ceCurrent = newEntityStateT Nothing
, _cePrev = newEntityStateT Nothing
, _ceServerFrame = 0
, _ceTrailCount = 0
, _ceLerpOrigin = V3 0 0 0
, _ceFlyStopTime = 0
}
| ksaveljev/hake-2 | src/Client/CEntityT.hs | bsd-3-clause | 539 | 0 | 7 | 168 | 115 | 68 | 47 | 16 | 1 |
{-|
Module : $Header$
Description : Adapter for communicating with Slack via the webhook based Events API
Copyright : (c) Justus Adam, 2016
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
See http://marvin.readthedocs.io/en/latest/adapters.html#events-api for documentation about this adapter.
-}
module Marvin.Adapter.Slack.EventsAPI
( SlackAdapter, EventsAPI
, SlackUserId, SlackChannelId
, MkSlack
, SlackRemoteFile(..), SlackLocalFile(..)
, HasTitle(..), HasPublicPermalink(..), HasEditable(..), HasPublic(..), HasUser(..), HasPrivateUrl(..), HasComment(..)
) where
import Control.Concurrent.Async.Lifted
import Control.Concurrent.Chan.Lifted
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Logger
import Data.Aeson
import Data.Aeson.Types
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.Encoding as L
import Lens.Micro.Platform
import Marvin.Adapter
import Marvin.Adapter.Slack.Internal.Common
import Marvin.Adapter.Slack.Internal.Types
import Marvin.Interpolate.All
import Network.HTTP.Types
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Handler.WarpTLS
import Network.Wreq
eventAPIeventParser :: Value -> Parser (T.Text, Either L.Text (InternalType EventsAPI))
eventAPIeventParser = withObject "expected object" $ \o -> do
token <- o .: "token"
type_ <- o .: "type"
(token,) <$> case (type_ :: T.Text) of
"url_verification" -> Left <$> o .: "challenge"
"event_callback" -> Right <$> (o .: "event" >>= eventParser)
_ -> fail "unknown wrapper event type"
runEventReceiver :: Chan (InternalType EventsAPI) -> AdapterM (SlackAdapter EventsAPI) ()
runEventReceiver evChan = do
useTLS <- fromMaybe True <$> lookupFromAdapterConfig "use-tls"
server <- if useTLS
then do
certfile <- requireFromAdapterConfig "certfile"
keyfile <- requireFromAdapterConfig "keyfile"
return $ runTLS $ tlsSettings certfile keyfile
else return runSettings
port <- fromMaybe 7000 <$> lookupFromAdapterConfig "port"
expectedToken <- requireFromAdapterConfig "token"
let warpSet = setPort port defaultSettings
logFn <- askLoggerIO
liftIO $ server warpSet $ \req resp -> flip runLoggingT logFn $
let
respond status rheaders body = liftIO $ resp $ responseLBS status rheaders body
in if requestMethod req == methodPost
then do
bod <- liftIO $ lazyRequestBody req
case eitherDecode bod >>= parseEither eventAPIeventParser of
Left err -> do
logErrorN $(isT "Unreadable JSON event: '#{err}'")
respond notAcceptable406 [] ""
Right (token,_) | token /= expectedToken -> do
logErrorN $(isT "Recieved incorrect token: '#{token}'")
respond unauthorized401 [] ""
Right (_, Left challenge) -> do
logInfoN $(isT "Recieved challenge event: '#{challenge}'")
respond ok200 [] (L.encodeUtf8 challenge)
Right (_, Right ev) -> do
writeChan evChan ev
respond ok200 [] ""
else respond methodNotAllowed405 [] ""
sendMessageLoop :: AdapterM (SlackAdapter EventsAPI) ()
sendMessageLoop = do
outChan <- view (adapter.outChannel)
forever $ do
(SlackChannelId chan, msg) <- readChan outChan
either (\err -> logErrorN $(isT "Sending message failed: #{err}")) (const $ return ()) =<<
execAPIMethod
(const $ return ())
"chat.postMessage"
[ "channel" := chan
, "text" := msg
]
-- | Recieve events as a server via HTTP webhook
data EventsAPI
instance MkSlack EventsAPI where
mkAdapterId = "slack-events"
initIOConnections inChan = do
a <- async $ runEventReceiver inChan
link a
sendMessageLoop
| JustusAdam/marvin | src/Marvin/Adapter/Slack/EventsAPI.hs | bsd-3-clause | 4,540 | 0 | 24 | 1,466 | 1,007 | 524 | 483 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Image_Loader where
import Graphics.Blank
import Wiki -- (578,400)
main :: IO ()
main = do
blankCanvas 3000 $ \ context -> do
url1 <- staticURL context "type/jpeg" "images/Haskell.jpg"
url2 <- staticURL context "type/jpeg" "images/House.jpg"
send context $ do
img1 <- newImage url1
img2 <- newImage url2
drawImage(img1,[69,50,97,129])
drawImage(img2,[200,50])
wiki $ snapShot context "images/Image_Loader.png"
wiki $ close context
| ku-fpg/blank-canvas | wiki-suite/Image_Loader.hs | bsd-3-clause | 584 | 0 | 16 | 174 | 166 | 82 | 84 | 16 | 1 |
#!/usr/bin/runghc
import Distribution.Simple
main = defaultMain
| baldo/derive-trie | Setup.hs | bsd-3-clause | 65 | 1 | 4 | 7 | 15 | 7 | 8 | 2 | 1 |
module Deflisp.Core.Show where
import Deflisp.Core.Types
import Debug.Trace
import qualified Data.Map as Map
instance Show LispFunk where
show (LibraryFunction name _) = "library function: " ++ name
show (VarArgFunction _ _ _ _) = "vararg function"
show (UserFunction _ _ _) = "user function"
show (Macros _ _) = "macros"
show (VariadicMacros _ _ _) = "variadic macros"
instance Show LispExpression where
show (LispNumber n) = show n
show (LispSymbol n) = n
show (LispKeyword n) = n
show (ReservedKeyword n) = show n
show (LispList n) = "(" ++ (unwords (map show n)) ++ ")"
show (LispMap n) = "(" ++ (unwords (map show (Map.toList n))) ++ ")"
show (LispVector n) = "[" ++ (unwords (map show n)) ++ "]"
show (LispString n) = n
show (LispBool n) = show n
show (LispFunction a) = show a
show (LispNil) = "nil"
show (LispIO _) = "io"
-- show a | (trace a) False = undefined
-- instance Show (IO LispExpression) where
-- show (IO (LispNumber n)) = show n
-- instance Eq LispExpression where
-- (LispNumber a) == (LispNumber b) = a == b
-- (LispBool a) == (LispBool b) = a == b
-- (LispSymbol a) == (LispSymbol b) = a == b
-- (LispList a) == (LispList b) = a == b
-- (LispVector a) == (LispVector b) = a == b
-- (LispString a) == (LispString b) = a == b
-- LispNil == LispNil = True
-- _ == _ = False
| ifesdjeen/deflisp | src/DefLisp/Core/Show.hs | bsd-3-clause | 1,361 | 0 | 14 | 312 | 397 | 208 | 189 | 23 | 0 |
module Language
( initialInterpreterState
, parse
, interpret
, setInterpreterVariables
, updateSystemVars
, module Language.Ast
) where
import Control.Monad ( forM_ )
import Control.Monad.Trans ( liftIO )
import qualified Data.Map.Strict as M
import Lens.Simple ( set )
import Gfx.Context ( GfxContext )
import Language.Ast ( Identifier
, Program
, Value(..)
)
import Language.Ast.Transformers ( transform )
import Language.Interpreter ( interpretLanguage )
import Language.Interpreter.StdLib ( addStdLib )
import qualified Language.Interpreter.Types as LT
import Language.Interpreter.Types ( InterpreterState
, externals
, getGlobalNames
, gfxContext
, runInterpreterM
, setGlobal
, setSystemVars
, systemVars
)
import Language.Parser ( parseProgram )
import Language.Parser.Errors ( ParserError )
import Logging ( logInfo )
parse :: String -> Either ParserError Program
parse = parseProgram
initialInterpreterState
:: [(Identifier, Value)]
-> [(FilePath, Program)]
-> GfxContext
-> IO InterpreterState
initialInterpreterState systemVariables userCode ctx =
let langState = set gfxContext ctx LT.empty
setup = do
setSystemVars systemVariables
addStdLib
globals <- getGlobalNames
mapM (load globals) userCode
in snd <$> runInterpreterM setup langState
where
load globals (fp, code) = do
liftIO $ logInfo ("Loading " ++ fp)
interpretLanguage $ transform globals code
updateSystemVars
:: [(Identifier, Value)] -> InterpreterState -> InterpreterState
updateSystemVars newSysVars = set systemVars (M.fromList newSysVars)
setInterpreterVariables
:: [(Identifier, Value)]
-> M.Map String Value
-> InterpreterState
-> IO InterpreterState
setInterpreterVariables globals externalVars is =
let setVars = forM_ globals (uncurry setGlobal)
in do
(_, newState) <- runInterpreterM setVars is
return $ set externals externalVars newState
interpret
:: InterpreterState -> Program -> IO (Either String Value, InterpreterState)
interpret initialState program =
let run = do
globals <- getGlobalNames
interpretLanguage (transform globals program)
in runInterpreterM run initialState
| rumblesan/improviz | src/Language.hs | bsd-3-clause | 3,026 | 0 | 13 | 1,221 | 598 | 326 | 272 | 68 | 1 |
module System.Build.Access.Version where
class Version r where
setVersion ::
r
-> r
unsetVersion ::
r
-> r
| tonymorris/lastik | System/Build/Access/Version.hs | bsd-3-clause | 130 | 0 | 7 | 40 | 35 | 20 | 15 | 8 | 0 |
{-# LANGUAGE JavaScriptFFI #-}
module GHCJS.Three.HasName
(HasName(..)
) where
import GHCJS.Types
import GHCJS.Three.Monad
import Data.JSString (pack, unpack)
-- | get name
foreign import javascript unsafe "($1)['name']"
thr_getName :: JSVal -> Three JSString
-- | set name
foreign import javascript unsafe "($2)['name'] = $1"
thr_setName :: JSString -> JSVal -> Three ()
class ThreeJSVal o => HasName o where
getName :: o -> Three String
getName o = unpack <$> thr_getName (toJSVal o)
setName :: String -> o -> Three ()
setName n o = thr_setName (pack n) (toJSVal o)
| manyoo/ghcjs-three | src/GHCJS/Three/HasName.hs | bsd-3-clause | 605 | 11 | 8 | 126 | 181 | 99 | 82 | 15 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Equality
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : Rank2Types
--
----------------------------------------------------------------------------
module Control.Lens.Equality
(
-- * Type Equality
Equality, Equality'
, AnEquality, AnEquality'
, runEq
, substEq
, mapEq
, fromEq
, simply
-- * Implementation Details
, Identical(..)
) where
import Control.Lens.Type
import Data.Functor.Identity
#ifdef HLINT
{-# ANN module "HLint: ignore Use id" #-}
{-# ANN module "HLint: ignore Eta reduce" #-}
#endif
-- $setup
-- >>> import Control.Lens
-----------------------------------------------------------------------------
-- Equality
-----------------------------------------------------------------------------
-- | Provides witness that @(s ~ a, b ~ t)@ holds.
data Identical a b s t where
Identical :: Identical a b a b
-- | When you see this as an argument to a function, it expects an 'Equality'.
type AnEquality s t a b = Identical a (Identity b) a (Identity b) -> Identical a (Identity b) s (Identity t)
-- | A 'Simple' 'AnEquality'.
type AnEquality' s a = AnEquality s s a a
-- | Extract a witness of type 'Equality'.
runEq :: AnEquality s t a b -> Identical s t a b
runEq l = case l Identical of Identical -> Identical
{-# INLINE runEq #-}
-- | Substituting types with 'Equality'.
substEq :: AnEquality s t a b -> ((s ~ a, t ~ b) => r) -> r
substEq l = case runEq l of
Identical -> \r -> r
{-# INLINE substEq #-}
-- | We can use 'Equality' to do substitution into anything.
mapEq :: AnEquality s t a b -> f s -> f a
mapEq l r = substEq l r
{-# INLINE mapEq #-}
-- | 'Equality' is symmetric.
fromEq :: AnEquality s t a b -> Equality b a t s
fromEq l = substEq l id
{-# INLINE fromEq #-}
-- | This is an adverb that can be used to modify many other 'Lens' combinators to make them require
-- simple lenses, simple traversals, simple prisms or simple isos as input.
simply :: (Optic' p f s a -> r) -> Optic' p f s a -> r
simply = id
{-# INLINE simply #-}
| hvr/lens | src/Control/Lens/Equality.hs | bsd-3-clause | 2,358 | 0 | 10 | 452 | 433 | 250 | 183 | 36 | 1 |
module Distribution.Client.Dependency.Modular
( modularResolver, SolverConfig(..)) where
-- Here, we try to map between the external epm solver
-- interface and the internal interface that the solver actually
-- expects. There are a number of type conversions to perform: we
-- have to convert the package indices to the uniform index used
-- by the solver; we also have to convert the initial constraints;
-- and finally, we have to convert back the resulting install
-- plan.
import Data.Map as M
( fromListWith )
import Distribution.Client.Dependency.Modular.Assignment
( Assignment, toCPs )
import Distribution.Client.Dependency.Modular.Dependency
( RevDepMap )
import Distribution.Client.Dependency.Modular.ConfiguredConversion
( convCP )
import Distribution.Client.Dependency.Modular.IndexConversion
( convPIs )
import Distribution.Client.Dependency.Modular.Log
( logToProgress )
import Distribution.Client.Dependency.Modular.Package
( PN )
import Distribution.Client.Dependency.Modular.Solver
( SolverConfig(..), solve )
import Distribution.Client.Dependency.Types
( DependencyResolver, PackageConstraint(..) )
import Distribution.Client.InstallPlan
( PlanPackage )
import Distribution.System
( Platform(..) )
-- | Ties the two worlds together: classic epm vs. the modular
-- solver. Performs the necessary translations before and after.
modularResolver :: SolverConfig -> DependencyResolver
modularResolver sc (Platform arch os) cinfo iidx sidx pprefs pcs pns =
fmap (uncurry postprocess) $ -- convert install plan
logToProgress (maxBackjumps sc) $ -- convert log format into progress format
solve sc idx pprefs gcs pns
where
-- Indices have to be converted into solver-specific uniform index.
idx = convPIs os arch cinfo (shadowPkgs sc) (strongFlags sc) iidx sidx
-- Constraints have to be converted into a finite map indexed by PN.
gcs = M.fromListWith (++) (map (\ pc -> (pcName pc, [pc])) pcs)
-- Results have to be converted into an install plan.
postprocess :: Assignment -> RevDepMap -> [PlanPackage]
postprocess a rdm = map (convCP iidx sidx) (toCPs a rdm)
-- Helper function to extract the PN from a constraint.
pcName :: PackageConstraint -> PN
pcName (PackageConstraintVersion pn _) = pn
pcName (PackageConstraintInstalled pn ) = pn
pcName (PackageConstraintSource pn ) = pn
pcName (PackageConstraintFlags pn _) = pn
pcName (PackageConstraintStanzas pn _) = pn
| typelead/epm | epm/Distribution/Client/Dependency/Modular.hs | bsd-3-clause | 2,611 | 0 | 13 | 551 | 476 | 281 | 195 | 39 | 5 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2006
\section[RnEnv]{Environment manipulation for the renamer monad}
-}
{-# LANGUAGE CPP, MultiWayIf #-}
module RnEnv (
newTopSrcBinder,
lookupLocatedTopBndrRn, lookupTopBndrRn,
lookupLocatedOccRn, lookupOccRn, lookupOccRn_maybe,
lookupLocalOccRn_maybe, lookupInfoOccRn,
lookupLocalOccThLvl_maybe,
lookupTypeOccRn, lookupKindOccRn,
lookupGlobalOccRn, lookupGlobalOccRnExport, lookupGlobalOccRn_maybe,
lookupOccRn_overloaded, lookupGlobalOccRn_overloaded,
reportUnboundName, unknownNameSuggestions,
addNameClashErrRn,
HsSigCtxt(..), lookupLocalTcNames, lookupSigOccRn,
lookupSigCtxtOccRn,
lookupFixityRn, lookupFixityRn_help,
lookupFieldFixityRn, lookupTyFixityRn,
lookupInstDeclBndr, lookupRecFieldOcc, lookupFamInstName,
lookupConstructorFields,
lookupSyntaxName, lookupSyntaxNames, lookupIfThenElse,
lookupGreAvailRn,
getLookupOccRn,mkUnboundName, mkUnboundNameRdr, isUnboundName,
addUsedGRE, addUsedGREs, addUsedDataCons,
newLocalBndrRn, newLocalBndrsRn,
bindLocalNames, bindLocalNamesFV,
MiniFixityEnv,
addLocalFixities,
bindLocatedLocalsFV, bindLocatedLocalsRn,
extendTyVarEnvFVRn,
-- Role annotations
RoleAnnotEnv, emptyRoleAnnotEnv, mkRoleAnnotEnv,
lookupRoleAnnot, getRoleAnnots,
checkDupRdrNames, checkShadowedRdrNames,
checkDupNames, checkDupAndShadowedNames, dupNamesErr,
checkTupSize,
addFvRn, mapFvRn, mapMaybeFvRn, mapFvRnCPS,
warnUnusedMatches, warnUnusedTypePatterns,
warnUnusedTopBinds, warnUnusedLocalBinds,
mkFieldEnv,
dataTcOccs, kindSigErr, perhapsForallMsg, unknownSubordinateErr,
HsDocContext(..), pprHsDocContext,
inHsDocContext, withHsDocContext
) where
#include "HsVersions.h"
import LoadIface ( loadInterfaceForName, loadSrcInterface_maybe )
import IfaceEnv
import HsSyn
import RdrName
import HscTypes
import TcEnv
import TcRnMonad
import RdrHsSyn ( setRdrNameSpace )
import TysWiredIn ( starKindTyConName, unicodeStarKindTyConName )
import Name
import NameSet
import NameEnv
import Avail
import Module
import ConLike
import DataCon
import TyCon
import PrelNames ( mkUnboundName, isUnboundName, rOOT_MAIN, forall_tv_RDR )
import ErrUtils ( MsgDoc )
import BasicTypes ( Fixity(..), FixityDirection(..), minPrecedence, defaultFixity )
import SrcLoc
import Outputable
import Util
import Maybes
import BasicTypes ( TopLevelFlag(..) )
import ListSetOps ( removeDups )
import DynFlags
import FastString
import Control.Monad
import Data.List
import Data.Function ( on )
import ListSetOps ( minusList )
import Constants ( mAX_TUPLE_SIZE )
import qualified GHC.LanguageExtensions as LangExt
{-
*********************************************************
* *
Source-code binders
* *
*********************************************************
Note [Signature lazy interface loading]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's lazy interface loading can be a bit confusing, so this Note is an
empirical description of what happens in one interesting case. When
compiling a signature module against an its implementation, we do NOT
load interface files associated with its names until after the type
checking phase. For example:
module ASig where
data T
f :: T -> T
Suppose we compile this with -sig-of "A is ASig":
module B where
data T = T
f T = T
module A(module B) where
import B
During type checking, we'll load A.hi because we need to know what the
RdrEnv for the module is, but we DO NOT load the interface for B.hi!
It's wholly unnecessary: our local definition 'data T' in ASig is all
the information we need to finish type checking. This is contrast to
type checking of ordinary Haskell files, in which we would not have the
local definition "data T" and would need to consult B.hi immediately.
(Also, this situation never occurs for hs-boot files, since you're not
allowed to reexport from another module.)
After type checking, we then check that the types we provided are
consistent with the backing implementation (in checkHiBootOrHsigIface).
At this point, B.hi is loaded, because we need something to compare
against.
I discovered this behavior when trying to figure out why type class
instances for Data.Map weren't in the EPS when I was type checking a
test very much like ASig (sigof02dm): the associated interface hadn't
been loaded yet! (The larger issue is a moot point, since an instance
declared in a signature can never be a duplicate.)
This behavior might change in the future. Consider this
alternate module B:
module B where
{-# DEPRECATED T, f "Don't use" #-}
data T = T
f T = T
One might conceivably want to report deprecation warnings when compiling
ASig with -sig-of B, in which case we need to look at B.hi to find the
deprecation warnings during renaming. At the moment, you don't get any
warning until you use the identifier further downstream. This would
require adjusting addUsedGRE so that during signature compilation,
we do not report deprecation warnings for LocalDef. See also
Note [Handling of deprecations]
-}
newTopSrcBinder :: Located RdrName -> RnM Name
newTopSrcBinder (L loc rdr_name)
| Just name <- isExact_maybe rdr_name
= -- This is here to catch
-- (a) Exact-name binders created by Template Haskell
-- (b) The PrelBase defn of (say) [] and similar, for which
-- the parser reads the special syntax and returns an Exact RdrName
-- We are at a binding site for the name, so check first that it
-- the current module is the correct one; otherwise GHC can get
-- very confused indeed. This test rejects code like
-- data T = (,) Int Int
-- unless we are in GHC.Tup
if isExternalName name then
do { this_mod <- getModule
; unless (this_mod == nameModule name)
(addErrAt loc (badOrigBinding rdr_name))
; return name }
else -- See Note [Binders in Template Haskell] in Convert.hs
do { this_mod <- getModule
; externaliseName this_mod name }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { this_mod <- getModule
; unless (rdr_mod == this_mod || rdr_mod == rOOT_MAIN)
(addErrAt loc (badOrigBinding rdr_name))
-- When reading External Core we get Orig names as binders,
-- but they should agree with the module gotten from the monad
--
-- We can get built-in syntax showing up here too, sadly. If you type
-- data T = (,,,)
-- the constructor is parsed as a type, and then RdrHsSyn.tyConToDataCon
-- uses setRdrNameSpace to make it into a data constructors. At that point
-- the nice Exact name for the TyCon gets swizzled to an Orig name.
-- Hence the badOrigBinding error message.
--
-- Except for the ":Main.main = ..." definition inserted into
-- the Main module; ugh!
-- Because of this latter case, we call newGlobalBinder with a module from
-- the RdrName, not from the environment. In principle, it'd be fine to
-- have an arbitrary mixture of external core definitions in a single module,
-- (apart from module-initialisation issues, perhaps).
; newGlobalBinder rdr_mod rdr_occ loc }
| otherwise
= do { unless (not (isQual rdr_name))
(addErrAt loc (badQualBndrErr rdr_name))
-- Binders should not be qualified; if they are, and with a different
-- module name, we we get a confusing "M.T is not in scope" error later
; stage <- getStage
; env <- getGblEnv
; if isBrackStage stage then
-- We are inside a TH bracket, so make an *Internal* name
-- See Note [Top-level Names in Template Haskell decl quotes] in RnNames
do { uniq <- newUnique
; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) }
else case tcg_impl_rdr_env env of
Just gr ->
-- We're compiling --sig-of, so resolve with respect to this
-- module.
-- See Note [Signature parameters in TcGblEnv and DynFlags]
do { case lookupGlobalRdrEnv gr (rdrNameOcc rdr_name) of
-- Be sure to override the loc so that we get accurate
-- information later
[GRE{ gre_name = n }] -> do
-- NB: Just adding this line will not work:
-- addUsedGRE True gre
-- see Note [Signature lazy interface loading] for
-- more details.
return (setNameLoc n loc)
_ -> do
{ -- NB: cannot use reportUnboundName rdr_name
-- because it looks up in the wrong RdrEnv
-- ToDo: more helpful error messages
; addErr (unknownNameErr (pprNonVarNameSpace
(occNameSpace (rdrNameOcc rdr_name))) rdr_name)
; return (mkUnboundNameRdr rdr_name)
}
}
Nothing ->
-- Normal case
do { this_mod <- getModule
; traceRn (text "newTopSrcBinder" <+> (ppr this_mod $$ ppr rdr_name $$ ppr loc))
; newGlobalBinder this_mod (rdrNameOcc rdr_name) loc } }
{-
*********************************************************
* *
Source code occurrences
* *
*********************************************************
Looking up a name in the RnEnv.
Note [Type and class operator definitions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to reject all of these unless we have -XTypeOperators (Trac #3265)
data a :*: b = ...
class a :*: b where ...
data (:*:) a b = ....
class (:*:) a b where ...
The latter two mean that we are not just looking for a
*syntactically-infix* declaration, but one that uses an operator
OccName. We use OccName.isSymOcc to detect that case, which isn't
terribly efficient, but there seems to be no better way.
-}
lookupTopBndrRn :: RdrName -> RnM Name
lookupTopBndrRn n = do nopt <- lookupTopBndrRn_maybe n
case nopt of
Just n' -> return n'
Nothing -> do traceRn $ (text "lookupTopBndrRn fail" <+> ppr n)
unboundName WL_LocalTop n
lookupLocatedTopBndrRn :: Located RdrName -> RnM (Located Name)
lookupLocatedTopBndrRn = wrapLocM lookupTopBndrRn
lookupTopBndrRn_maybe :: RdrName -> RnM (Maybe Name)
-- Look up a top-level source-code binder. We may be looking up an unqualified 'f',
-- and there may be several imported 'f's too, which must not confuse us.
-- For example, this is OK:
-- import Foo( f )
-- infix 9 f -- The 'f' here does not need to be qualified
-- f x = x -- Nor here, of course
-- So we have to filter out the non-local ones.
--
-- A separate function (importsFromLocalDecls) reports duplicate top level
-- decls, so here it's safe just to choose an arbitrary one.
--
-- There should never be a qualified name in a binding position in Haskell,
-- but there can be if we have read in an external-Core file.
-- The Haskell parser checks for the illegal qualified name in Haskell
-- source files, so we don't need to do so here.
lookupTopBndrRn_maybe rdr_name
| Just name <- isExact_maybe rdr_name
= do { name' <- lookupExactOcc name; return (Just name') }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
-- This deals with the case of derived bindings, where
-- we don't bother to call newTopSrcBinder first
-- We assume there is no "parent" name
= do { loc <- getSrcSpanM
; n <- newGlobalBinder rdr_mod rdr_occ loc
; return (Just n)}
| otherwise
= do { -- Check for operators in type or class declarations
-- See Note [Type and class operator definitions]
let occ = rdrNameOcc rdr_name
; when (isTcOcc occ && isSymOcc occ)
(do { op_ok <- xoptM LangExt.TypeOperators
; unless op_ok (addErr (opDeclErr rdr_name)) })
; env <- getGlobalRdrEnv
; case filter isLocalGRE (lookupGRE_RdrName rdr_name env) of
[gre] -> return (Just (gre_name gre))
_ -> return Nothing -- Ambiguous (can't happen) or unbound
}
-----------------------------------------------
-- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames].
-- This adds an error if the name cannot be found.
lookupExactOcc :: Name -> RnM Name
lookupExactOcc name
= do { result <- lookupExactOcc_either name
; case result of
Left err -> do { addErr err
; return name }
Right name' -> return name' }
-- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames].
-- This never adds an error, but it may return one.
lookupExactOcc_either :: Name -> RnM (Either MsgDoc Name)
-- See Note [Looking up Exact RdrNames]
lookupExactOcc_either name
| Just thing <- wiredInNameTyThing_maybe name
, Just tycon <- case thing of
ATyCon tc -> Just tc
AConLike (RealDataCon dc) -> Just (dataConTyCon dc)
_ -> Nothing
, isTupleTyCon tycon
= do { checkTupSize (tyConArity tycon)
; return (Right name) }
| isExternalName name
= return (Right name)
| otherwise
= do { env <- getGlobalRdrEnv
; let -- See Note [Splicing Exact names]
main_occ = nameOccName name
demoted_occs = case demoteOccName main_occ of
Just occ -> [occ]
Nothing -> []
gres = [ gre | occ <- main_occ : demoted_occs
, gre <- lookupGlobalRdrEnv env occ
, gre_name gre == name ]
; case gres of
[gre] -> return (Right (gre_name gre))
[] -> -- See Note [Splicing Exact names]
do { lcl_env <- getLocalRdrEnv
; if name `inLocalRdrEnvScope` lcl_env
then return (Right name)
else
#ifdef GHCI
do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; th_topnames <- readTcRef th_topnames_var
; if name `elemNameSet` th_topnames
then return (Right name)
else return (Left exact_nm_err)
}
#else /* !GHCI */
return (Left exact_nm_err)
#endif /* !GHCI */
}
gres -> return (Left (sameNameErr gres)) -- Ugh! See Note [Template Haskell ambiguity]
}
where
exact_nm_err = hang (text "The exact Name" <+> quotes (ppr name) <+> ptext (sLit "is not in scope"))
2 (vcat [ text "Probable cause: you used a unique Template Haskell name (NameU), "
, text "perhaps via newName, but did not bind it"
, text "If that's it, then -ddump-splices might be useful" ])
sameNameErr :: [GlobalRdrElt] -> MsgDoc
sameNameErr [] = panic "addSameNameErr: empty list"
sameNameErr gres@(_ : _)
= hang (text "Same exact name in multiple name-spaces:")
2 (vcat (map pp_one sorted_names) $$ th_hint)
where
sorted_names = sortWith nameSrcLoc (map gre_name gres)
pp_one name
= hang (pprNameSpace (occNameSpace (getOccName name))
<+> quotes (ppr name) <> comma)
2 (text "declared at:" <+> ppr (nameSrcLoc name))
th_hint = vcat [ text "Probable cause: you bound a unique Template Haskell name (NameU),"
, text "perhaps via newName, in different name-spaces."
, text "If that's it, then -ddump-splices might be useful" ]
-----------------------------------------------
lookupInstDeclBndr :: Name -> SDoc -> RdrName -> RnM Name
-- This is called on the method name on the left-hand side of an
-- instance declaration binding. eg. instance Functor T where
-- fmap = ...
-- ^^^^ called on this
-- Regardless of how many unqualified fmaps are in scope, we want
-- the one that comes from the Functor class.
--
-- Furthermore, note that we take no account of whether the
-- name is only in scope qualified. I.e. even if method op is
-- in scope as M.op, we still allow plain 'op' on the LHS of
-- an instance decl
--
-- The "what" parameter says "method" or "associated type",
-- depending on what we are looking up
lookupInstDeclBndr cls what rdr
= do { when (isQual rdr)
(addErr (badQualBndrErr rdr))
-- In an instance decl you aren't allowed
-- to use a qualified name for the method
-- (Although it'd make perfect sense.)
; mb_name <- lookupSubBndrOcc
False -- False => we don't give deprecated
-- warnings when a deprecated class
-- method is defined. We only warn
-- when it's used
cls doc rdr
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr) }
Right nm -> return nm }
where
doc = what <+> text "of class" <+> quotes (ppr cls)
-----------------------------------------------
lookupFamInstName :: Maybe Name -> Located RdrName -> RnM (Located Name)
-- Used for TyData and TySynonym family instances only,
-- See Note [Family instance binders]
lookupFamInstName (Just cls) tc_rdr -- Associated type; c.f RnBinds.rnMethodBind
= wrapLocM (lookupInstDeclBndr cls (text "associated type")) tc_rdr
lookupFamInstName Nothing tc_rdr -- Family instance; tc_rdr is an *occurrence*
= lookupLocatedOccRn tc_rdr
-----------------------------------------------
lookupConstructorFields :: Name -> RnM [FieldLabel]
-- Look up the fields of a given constructor
-- * For constructors from this module, use the record field env,
-- which is itself gathered from the (as yet un-typechecked)
-- data type decls
--
-- * For constructors from imported modules, use the *type* environment
-- since imported modles are already compiled, the info is conveniently
-- right there
lookupConstructorFields con_name
= do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod con_name then
do { field_env <- getRecFieldEnv
; traceTc "lookupCF" (ppr con_name $$ ppr (lookupNameEnv field_env con_name) $$ ppr field_env)
; return (lookupNameEnv field_env con_name `orElse` []) }
else
do { con <- tcLookupDataCon con_name
; traceTc "lookupCF 2" (ppr con)
; return (dataConFieldLabels con) } }
-----------------------------------------------
-- Used for record construction and pattern matching
-- When the -XDisambiguateRecordFields flag is on, take account of the
-- constructor name to disambiguate which field to use; it's just the
-- same as for instance decls
--
-- NB: Consider this:
-- module Foo where { data R = R { fld :: Int } }
-- module Odd where { import Foo; fld x = x { fld = 3 } }
-- Arguably this should work, because the reference to 'fld' is
-- unambiguous because there is only one field id 'fld' in scope.
-- But currently it's rejected.
lookupRecFieldOcc :: Maybe Name -- Nothing => just look it up as usual
-- Just tycon => use tycon to disambiguate
-> SDoc -> RdrName
-> RnM Name
lookupRecFieldOcc parent doc rdr_name
| Just tc_name <- parent
= do { mb_name <- lookupSubBndrOcc True tc_name doc rdr_name
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr_name) }
Right n -> return n }
| otherwise
= lookupGlobalOccRn rdr_name
lookupSubBndrOcc :: Bool
-> Name -- Parent
-> SDoc
-> RdrName
-> RnM (Either MsgDoc Name)
-- Find all the things the rdr-name maps to
-- and pick the one with the right parent namep
lookupSubBndrOcc warn_if_deprec the_parent doc rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n <- lookupExactOcc n
; return (Right n) }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Right n) }
| isUnboundName the_parent
-- Avoid an error cascade from malformed decls:
-- instance Int where { foo = e }
-- We have already generated an error in rnLHsInstDecl
= return (Right (mkUnboundNameRdr rdr_name))
| otherwise
= do { env <- getGlobalRdrEnv
; let gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name)
-- NB: lookupGlobalRdrEnv, not lookupGRE_RdrName!
-- The latter does pickGREs, but we want to allow 'x'
-- even if only 'M.x' is in scope
; traceRn (text "lookupSubBndrOcc" <+> vcat [ppr the_parent, ppr rdr_name, ppr gres, ppr (pick_gres rdr_name gres)])
; case pick_gres rdr_name gres of
(gre:_) -> do { addUsedGRE warn_if_deprec gre
-- Add a usage; this is an *occurrence* site
-- Note [Usage for sub-bndrs]
; return (Right (gre_name gre)) }
-- If there is more than one local GRE for the
-- same OccName 'f', that will be reported separately
-- as a duplicate top-level binding for 'f'
[] -> do { ns <- lookupQualifiedNameGHCi rdr_name
; case ns of
(n:_) -> return (Right n) -- Unlikely to be more than one...?
[] -> return (Left (unknownSubordinateErr doc rdr_name))
} }
where
-- If Parent = NoParent, just do a normal lookup
-- If Parent = Parent p then find all GREs that
-- (a) have parent p
-- (b) for Unqual, are in scope qualified or unqualified
-- for Qual, are in scope with that qualification
pick_gres rdr_name gres
| isUnqual rdr_name = filter right_parent gres
| otherwise = filter right_parent (pickGREs rdr_name gres)
right_parent (GRE { gre_par = p })
| ParentIs parent <- p = parent == the_parent
| FldParent { par_is = parent } <- p = parent == the_parent
| otherwise = False
{-
Note [Family instance binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data family F a
data instance F T = X1 | X2
The 'data instance' decl has an *occurrence* of F (and T), and *binds*
X1 and X2. (This is unlike a normal data type declaration which would
bind F too.) So we want an AvailTC F [X1,X2].
Now consider a similar pair:
class C a where
data G a
instance C S where
data G S = Y1 | Y2
The 'data G S' *binds* Y1 and Y2, and has an *occurrence* of G.
But there is a small complication: in an instance decl, we don't use
qualified names on the LHS; instead we use the class to disambiguate.
Thus:
module M where
import Blib( G )
class C a where
data G a
instance C S where
data G S = Y1 | Y2
Even though there are two G's in scope (M.G and Blib.G), the occurrence
of 'G' in the 'instance C S' decl is unambiguous, because C has only
one associated type called G. This is exactly what happens for methods,
and it is only consistent to do the same thing for types. That's the
role of the function lookupTcdName; the (Maybe Name) give the class of
the encloseing instance decl, if any.
Note [Looking up Exact RdrNames]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exact RdrNames are generated by Template Haskell. See Note [Binders
in Template Haskell] in Convert.
For data types and classes have Exact system Names in the binding
positions for constructors, TyCons etc. For example
[d| data T = MkT Int |]
when we splice in and Convert to HsSyn RdrName, we'll get
data (Exact (system Name "T")) = (Exact (system Name "MkT")) ...
These System names are generated by Convert.thRdrName
But, constructors and the like need External Names, not System Names!
So we do the following
* In RnEnv.newTopSrcBinder we spot Exact RdrNames that wrap a
non-External Name, and make an External name for it. This is
the name that goes in the GlobalRdrEnv
* When looking up an occurrence of an Exact name, done in
RnEnv.lookupExactOcc, we find the Name with the right unique in the
GlobalRdrEnv, and use the one from the envt -- it will be an
External Name in the case of the data type/constructor above.
* Exact names are also use for purely local binders generated
by TH, such as \x_33. x_33
Both binder and occurrence are Exact RdrNames. The occurrence
gets looked up in the LocalRdrEnv by RnEnv.lookupOccRn, and
misses, because lookupLocalRdrEnv always returns Nothing for
an Exact Name. Now we fall through to lookupExactOcc, which
will find the Name is not in the GlobalRdrEnv, so we just use
the Exact supplied Name.
Note [Splicing Exact names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the splice $(do { x <- newName "x"; return (VarE x) })
This will generate a (HsExpr RdrName) term that mentions the
Exact RdrName "x_56" (or whatever), but does not bind it. So
when looking such Exact names we want to check that it's in scope,
otherwise the type checker will get confused. To do this we need to
keep track of all the Names in scope, and the LocalRdrEnv does just that;
we consult it with RdrName.inLocalRdrEnvScope.
There is another wrinkle. With TH and -XDataKinds, consider
$( [d| data Nat = Zero
data T = MkT (Proxy 'Zero) |] )
After splicing, but before renaming we get this:
data Nat_77{tc} = Zero_78{d}
data T_79{tc} = MkT_80{d} (Proxy 'Zero_78{tc}) |] )
The occurrence of 'Zero in the data type for T has the right unique,
but it has a TcClsName name-space in its OccName. (This is set by
the ctxt_ns argument of Convert.thRdrName.) When we check that is
in scope in the GlobalRdrEnv, we need to look up the DataName namespace
too. (An alternative would be to make the GlobalRdrEnv also have
a Name -> GRE mapping.)
Note [Template Haskell ambiguity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The GlobalRdrEnv invariant says that if
occ -> [gre1, ..., gren]
then the gres have distinct Names (INVARIANT 1 of GlobalRdrEnv).
This is guaranteed by extendGlobalRdrEnvRn (the dups check in add_gre).
So how can we get multiple gres in lookupExactOcc_maybe? Because in
TH we might use the same TH NameU in two different name spaces.
eg (Trac #7241):
$(newName "Foo" >>= \o -> return [DataD [] o [] [RecC o []] [''Show]])
Here we generate a type constructor and data constructor with the same
unique, but differnt name spaces.
It'd be nicer to rule this out in extendGlobalRdrEnvRn, but that would
mean looking up the OccName in every name-space, just in case, and that
seems a bit brutal. So it's just done here on lookup. But we might
need to revisit that choice.
Note [Usage for sub-bndrs]
~~~~~~~~~~~~~~~~~~~~~~~~~~
If you have this
import qualified M( C( f ) )
instance M.C T where
f x = x
then is the qualified import M.f used? Obviously yes.
But the RdrName used in the instance decl is unqualified. In effect,
we fill in the qualification by looking for f's whose class is M.C
But when adding to the UsedRdrNames we must make that qualification
explicit (saying "used M.f"), otherwise we get "Redundant import of M.f".
So we make up a suitable (fake) RdrName. But be careful
import qualifed M
import M( C(f) )
instance C T where
f x = x
Here we want to record a use of 'f', not of 'M.f', otherwise
we'll miss the fact that the qualified import is redundant.
--------------------------------------------------
-- Occurrences
--------------------------------------------------
-}
getLookupOccRn :: RnM (Name -> Maybe Name)
getLookupOccRn
= do local_env <- getLocalRdrEnv
return (lookupLocalRdrOcc local_env . nameOccName)
mkUnboundNameRdr :: RdrName -> Name
mkUnboundNameRdr rdr = mkUnboundName (rdrNameOcc rdr)
lookupLocatedOccRn :: Located RdrName -> RnM (Located Name)
lookupLocatedOccRn = wrapLocM lookupOccRn
lookupLocalOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- Just look in the local environment
lookupLocalOccRn_maybe rdr_name
= do { local_env <- getLocalRdrEnv
; return (lookupLocalRdrEnv local_env rdr_name) }
lookupLocalOccThLvl_maybe :: Name -> RnM (Maybe (TopLevelFlag, ThLevel))
-- Just look in the local environment
lookupLocalOccThLvl_maybe name
= do { lcl_env <- getLclEnv
; return (lookupNameEnv (tcl_th_bndrs lcl_env) name) }
-- lookupOccRn looks up an occurrence of a RdrName
lookupOccRn :: RdrName -> RnM Name
lookupOccRn rdr_name
= do { mb_name <- lookupOccRn_maybe rdr_name
; case mb_name of
Just name -> return name
Nothing -> reportUnboundName rdr_name }
lookupKindOccRn :: RdrName -> RnM Name
-- Looking up a name occurring in a kind
lookupKindOccRn rdr_name
= do { typeintype <- xoptM LangExt.TypeInType
; if | typeintype -> lookupTypeOccRn rdr_name
-- With -XNoTypeInType, treat any usage of * in kinds as in scope
-- this is a dirty hack, but then again so was the old * kind.
| is_star rdr_name -> return starKindTyConName
| is_uni_star rdr_name -> return unicodeStarKindTyConName
| otherwise -> lookupOccRn rdr_name }
-- lookupPromotedOccRn looks up an optionally promoted RdrName.
lookupTypeOccRn :: RdrName -> RnM Name
-- see Note [Demotion]
lookupTypeOccRn rdr_name
= do { mb_name <- lookupOccRn_maybe rdr_name
; case mb_name of {
Just name -> return name ;
Nothing -> do { dflags <- getDynFlags
; lookup_demoted rdr_name dflags } } }
lookup_demoted :: RdrName -> DynFlags -> RnM Name
lookup_demoted rdr_name dflags
| Just demoted_rdr <- demoteRdrName rdr_name
-- Maybe it's the name of a *data* constructor
= do { data_kinds <- xoptM LangExt.DataKinds
; mb_demoted_name <- lookupOccRn_maybe demoted_rdr
; case mb_demoted_name of
Nothing -> unboundNameX WL_Any rdr_name star_info
Just demoted_name
| data_kinds ->
do { whenWOptM Opt_WarnUntickedPromotedConstructors $
addWarn (Reason Opt_WarnUntickedPromotedConstructors)
(untickedPromConstrWarn demoted_name)
; return demoted_name }
| otherwise -> unboundNameX WL_Any rdr_name suggest_dk }
| otherwise
= reportUnboundName rdr_name
where
suggest_dk = text "A data constructor of that name is in scope; did you mean DataKinds?"
untickedPromConstrWarn name =
text "Unticked promoted constructor" <> colon <+> quotes (ppr name) <> dot
$$
hsep [ text "Use"
, quotes (char '\'' <> ppr name)
, text "instead of"
, quotes (ppr name) <> dot ]
star_info
| is_star rdr_name || is_uni_star rdr_name
= if xopt LangExt.TypeInType dflags
then text "NB: With TypeInType, you must import" <+>
ppr rdr_name <+> text "from Data.Kind"
else empty
| otherwise
= empty
is_star, is_uni_star :: RdrName -> Bool
is_star = (fsLit "*" ==) . occNameFS . rdrNameOcc
is_uni_star = (fsLit "★" ==) . occNameFS . rdrNameOcc
{-
Note [Demotion]
~~~~~~~~~~~~~~~
When the user writes:
data Nat = Zero | Succ Nat
foo :: f Zero -> Int
'Zero' in the type signature of 'foo' is parsed as:
HsTyVar ("Zero", TcClsName)
When the renamer hits this occurrence of 'Zero' it's going to realise
that it's not in scope. But because it is renaming a type, it knows
that 'Zero' might be a promoted data constructor, so it will demote
its namespace to DataName and do a second lookup.
The final result (after the renamer) will be:
HsTyVar ("Zero", DataName)
-}
-- Use this version to get tracing
--
-- lookupOccRn_maybe, lookupOccRn_maybe' :: RdrName -> RnM (Maybe Name)
-- lookupOccRn_maybe rdr_name
-- = do { mb_res <- lookupOccRn_maybe' rdr_name
-- ; gbl_rdr_env <- getGlobalRdrEnv
-- ; local_rdr_env <- getLocalRdrEnv
-- ; traceRn $ text "lookupOccRn_maybe" <+>
-- vcat [ ppr rdr_name <+> ppr (getUnique (rdrNameOcc rdr_name))
-- , ppr mb_res
-- , text "Lcl env" <+> ppr local_rdr_env
-- , text "Gbl env" <+> ppr [ (getUnique (nameOccName (gre_name (head gres'))),gres') | gres <- occEnvElts gbl_rdr_env
-- , let gres' = filter isLocalGRE gres, not (null gres') ] ]
-- ; return mb_res }
lookupOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- lookupOccRn looks up an occurrence of a RdrName
lookupOccRn_maybe rdr_name
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of {
Just name -> return (Just name) ;
Nothing -> do
; lookupGlobalOccRn_maybe rdr_name } }
lookupGlobalOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- Looks up a RdrName occurrence in the top-level
-- environment, including using lookupQualifiedNameGHCi
-- for the GHCi case
-- No filter function; does not report an error on failure
-- Uses addUsedRdrName to record use and deprecations
lookupGlobalOccRn_maybe rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n' <- lookupExactOcc n; return (Just n') }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Just n) }
| otherwise
= do { mb_gre <- lookupGreRn_maybe rdr_name
; case mb_gre of {
Just gre -> return (Just (gre_name gre)) ;
Nothing ->
do { ns <- lookupQualifiedNameGHCi rdr_name
-- This test is not expensive,
-- and only happens for failed lookups
; case ns of
(n:_) -> return (Just n) -- Unlikely to be more than one...?
[] -> return Nothing } } }
lookupGlobalOccRn :: RdrName -> RnM Name
-- lookupGlobalOccRn is like lookupOccRn, except that it looks in the global
-- environment. Adds an error message if the RdrName is not in scope.
lookupGlobalOccRn rdr_name
= do { mb_name <- lookupGlobalOccRn_maybe rdr_name
; case mb_name of
Just n -> return n
Nothing -> do { traceRn (text "lookupGlobalOccRn" <+> ppr rdr_name)
; unboundName WL_Global rdr_name } }
-- like lookupGlobalOccRn but suggests adding 'type' keyword
-- to export type constructors mistaken for data constructors
lookupGlobalOccRnExport :: RdrName -> RnM Name
lookupGlobalOccRnExport rdr_name
= do { mb_name <- lookupGlobalOccRn_maybe rdr_name
; case mb_name of
Just n -> return n
Nothing -> do { env <- getGlobalRdrEnv
; let tycon = setOccNameSpace tcClsName (rdrNameOcc rdr_name)
msg = case lookupOccEnv env tycon of
Just (gre : _) -> make_msg gre
_ -> Outputable.empty
make_msg gre = hang
(hsep [text "Note: use",
quotes (text "type"),
text "keyword to export type constructor",
quotes (ppr (gre_name gre))])
2 (vcat [pprNameProvenance gre,
text "(requires TypeOperators extension)"])
; unboundNameX WL_Global rdr_name msg } }
lookupInfoOccRn :: RdrName -> RnM [Name]
-- lookupInfoOccRn is intended for use in GHCi's ":info" command
-- It finds all the GREs that RdrName could mean, not complaining
-- about ambiguity, but rather returning them all
-- C.f. Trac #9881
lookupInfoOccRn rdr_name
| Just n <- isExact_maybe rdr_name -- e.g. (->)
= return [n]
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return [n] }
| otherwise
= do { rdr_env <- getGlobalRdrEnv
; let ns = map gre_name (lookupGRE_RdrName rdr_name rdr_env)
; qual_ns <- lookupQualifiedNameGHCi rdr_name
; return (ns ++ (qual_ns `minusList` ns)) }
-- | Like 'lookupOccRn_maybe', but with a more informative result if
-- the 'RdrName' happens to be a record selector:
--
-- * Nothing -> name not in scope (no error reported)
-- * Just (Left x) -> name uniquely refers to x,
-- or there is a name clash (reported)
-- * Just (Right xs) -> name refers to one or more record selectors;
-- if overload_ok was False, this list will be
-- a singleton.
lookupOccRn_overloaded :: Bool -> RdrName -> RnM (Maybe (Either Name [FieldOcc Name]))
lookupOccRn_overloaded overload_ok rdr_name
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of {
Just name -> return (Just (Left name)) ;
Nothing -> do
{ mb_name <- lookupGlobalOccRn_overloaded overload_ok rdr_name
; case mb_name of {
Just name -> return (Just name) ;
Nothing -> do
{ ns <- lookupQualifiedNameGHCi rdr_name
-- This test is not expensive,
-- and only happens for failed lookups
; case ns of
(n:_) -> return $ Just $ Left n -- Unlikely to be more than one...?
[] -> return Nothing } } } } }
lookupGlobalOccRn_overloaded :: Bool -> RdrName -> RnM (Maybe (Either Name [FieldOcc Name]))
lookupGlobalOccRn_overloaded overload_ok rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n' <- lookupExactOcc n; return (Just (Left n')) }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Just (Left n)) }
| otherwise
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> return Nothing
[gre] | isRecFldGRE gre
-> do { addUsedGRE True gre
; let
fld_occ :: FieldOcc Name
fld_occ
= FieldOcc (noLoc rdr_name) (gre_name gre)
; return (Just (Right [fld_occ])) }
| otherwise
-> do { addUsedGRE True gre
; return (Just (Left (gre_name gre))) }
gres | all isRecFldGRE gres && overload_ok
-- Don't record usage for ambiguous selectors
-- until we know which is meant
-> return
(Just (Right
(map (FieldOcc (noLoc rdr_name) . gre_name)
gres)))
gres -> do { addNameClashErrRn rdr_name gres
; return (Just (Left (gre_name (head gres)))) } }
--------------------------------------------------
-- Lookup in the Global RdrEnv of the module
--------------------------------------------------
lookupGreRn_maybe :: RdrName -> RnM (Maybe GlobalRdrElt)
-- Look up the RdrName in the GlobalRdrEnv
-- Exactly one binding: records it as "used", return (Just gre)
-- No bindings: return Nothing
-- Many bindings: report "ambiguous", return an arbitrary (Just gre)
-- (This API is a bit strange; lookupGRERn2_maybe is simpler.
-- But it works and I don't want to fiddle too much.)
-- Uses addUsedRdrName to record use and deprecations
lookupGreRn_maybe rdr_name
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> return Nothing
[gre] -> do { addUsedGRE True gre
; return (Just gre) }
gres -> do { addNameClashErrRn rdr_name gres
; traceRn (text "name clash" <+> (ppr rdr_name $$ ppr gres $$ ppr env))
; return (Just (head gres)) } }
lookupGreRn2_maybe :: RdrName -> RnM (Maybe GlobalRdrElt)
-- Look up the RdrName in the GlobalRdrEnv
-- Exactly one binding: record it as "used", return (Just gre)
-- No bindings: report "not in scope", return Nothing
-- Many bindings: report "ambiguous", return Nothing
-- Uses addUsedRdrName to record use and deprecations
lookupGreRn2_maybe rdr_name
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> do { _ <- unboundName WL_Global rdr_name
; return Nothing }
[gre] -> do { addUsedGRE True gre
; return (Just gre) }
gres -> do { addNameClashErrRn rdr_name gres
; traceRn (text "name clash" <+> (ppr rdr_name $$ ppr gres $$ ppr env))
; return Nothing } }
lookupGreAvailRn :: RdrName -> RnM (Name, AvailInfo)
-- Used in export lists
-- If not found or ambiguous, add error message, and fake with UnboundName
-- Uses addUsedRdrName to record use and deprecations
lookupGreAvailRn rdr_name
= do { mb_gre <- lookupGreRn2_maybe rdr_name
; case mb_gre of {
Just gre -> return (gre_name gre, availFromGRE gre) ;
Nothing ->
do { traceRn (text "lookupGreRn" <+> ppr rdr_name)
; let name = mkUnboundNameRdr rdr_name
; return (name, avail name) } } }
{-
*********************************************************
* *
Deprecations
* *
*********************************************************
Note [Handling of deprecations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* We report deprecations at each *occurrence* of the deprecated thing
(see Trac #5867)
* We do not report deprecations for locally-defined names. For a
start, we may be exporting a deprecated thing. Also we may use a
deprecated thing in the defn of another deprecated things. We may
even use a deprecated thing in the defn of a non-deprecated thing,
when changing a module's interface.
* addUsedGREs: we do not report deprecations for sub-binders:
- the ".." completion for records
- the ".." in an export item 'T(..)'
- the things exported by a module export 'module M'
-}
addUsedDataCons :: GlobalRdrEnv -> TyCon -> RnM ()
-- Remember use of in-scope data constructors (Trac #7969)
addUsedDataCons rdr_env tycon
= addUsedGREs [ gre
| dc <- tyConDataCons tycon
, gre : _ <- [lookupGRE_Name rdr_env (dataConName dc) ] ]
addUsedGRE :: Bool -> GlobalRdrElt -> RnM ()
-- Called for both local and imported things
-- Add usage *and* warn if deprecated
addUsedGRE warn_if_deprec gre
= do { when warn_if_deprec (warnIfDeprecated gre)
; unless (isLocalGRE gre) $
do { env <- getGblEnv
; traceRn (text "addUsedGRE" <+> ppr gre)
; updMutVar (tcg_used_gres env) (gre :) } }
addUsedGREs :: [GlobalRdrElt] -> RnM ()
-- Record uses of any *imported* GREs
-- Used for recording used sub-bndrs
-- NB: no call to warnIfDeprecated; see Note [Handling of deprecations]
addUsedGREs gres
| null imp_gres = return ()
| otherwise = do { env <- getGblEnv
; traceRn (text "addUsedGREs" <+> ppr imp_gres)
; updMutVar (tcg_used_gres env) (imp_gres ++) }
where
imp_gres = filterOut isLocalGRE gres
warnIfDeprecated :: GlobalRdrElt -> RnM ()
warnIfDeprecated gre@(GRE { gre_name = name, gre_imp = iss })
| (imp_spec : _) <- iss
= do { dflags <- getDynFlags
; this_mod <- getModule
; when (wopt Opt_WarnWarningsDeprecations dflags &&
not (nameIsLocalOrFrom this_mod name)) $
-- See Note [Handling of deprecations]
do { iface <- loadInterfaceForName doc name
; case lookupImpDeprec iface gre of
Just txt -> addWarn (Reason Opt_WarnWarningsDeprecations)
(mk_msg imp_spec txt)
Nothing -> return () } }
| otherwise
= return ()
where
occ = greOccName gre
name_mod = ASSERT2( isExternalName name, ppr name ) nameModule name
doc = text "The name" <+> quotes (ppr occ) <+> ptext (sLit "is mentioned explicitly")
mk_msg imp_spec txt
= sep [ sep [ text "In the use of"
<+> pprNonVarNameSpace (occNameSpace occ)
<+> quotes (ppr occ)
, parens imp_msg <> colon ]
, ppr txt ]
where
imp_mod = importSpecModule imp_spec
imp_msg = text "imported from" <+> ppr imp_mod <> extra
extra | imp_mod == moduleName name_mod = Outputable.empty
| otherwise = text ", but defined in" <+> ppr name_mod
lookupImpDeprec :: ModIface -> GlobalRdrElt -> Maybe WarningTxt
lookupImpDeprec iface gre
= mi_warn_fn iface (greOccName gre) `mplus` -- Bleat if the thing,
case gre_par gre of -- or its parent, is warn'd
ParentIs p -> mi_warn_fn iface (nameOccName p)
FldParent { par_is = p } -> mi_warn_fn iface (nameOccName p)
NoParent -> Nothing
PatternSynonym -> Nothing
{-
Note [Used names with interface not loaded]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's (just) possible to find a used
Name whose interface hasn't been loaded:
a) It might be a WiredInName; in that case we may not load
its interface (although we could).
b) It might be GHC.Real.fromRational, or GHC.Num.fromInteger
These are seen as "used" by the renamer (if -XRebindableSyntax)
is on), but the typechecker may discard their uses
if in fact the in-scope fromRational is GHC.Read.fromRational,
(see tcPat.tcOverloadedLit), and the typechecker sees that the type
is fixed, say, to GHC.Base.Float (see Inst.lookupSimpleInst).
In that obscure case it won't force the interface in.
In both cases we simply don't permit deprecations;
this is, after all, wired-in stuff.
*********************************************************
* *
GHCi support
* *
*********************************************************
A qualified name on the command line can refer to any module at
all: we try to load the interface if we don't already have it, just
as if there was an "import qualified M" declaration for every
module.
If we fail we just return Nothing, rather than bleating
about "attempting to use module ‘D’ (./D.hs) which is not loaded"
which is what loadSrcInterface does.
Note [Safe Haskell and GHCi]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We DONT do this Safe Haskell as we need to check imports. We can
and should instead check the qualified import but at the moment
this requires some refactoring so leave as a TODO
-}
lookupQualifiedNameGHCi :: RdrName -> RnM [Name]
lookupQualifiedNameGHCi rdr_name
= -- We want to behave as we would for a source file import here,
-- and respect hiddenness of modules/packages, hence loadSrcInterface.
do { dflags <- getDynFlags
; is_ghci <- getIsGHCi
; go_for_it dflags is_ghci }
where
go_for_it dflags is_ghci
| Just (mod,occ) <- isQual_maybe rdr_name
, is_ghci
, gopt Opt_ImplicitImportQualified dflags -- Enables this GHCi behaviour
, not (safeDirectImpsReq dflags) -- See Note [Safe Haskell and GHCi]
= do { res <- loadSrcInterface_maybe doc mod False Nothing
; case res of
Succeeded iface
-> return [ name
| avail <- mi_exports iface
, name <- availNames avail
, nameOccName name == occ ]
_ -> -- Either we couldn't load the interface, or
-- we could but we didn't find the name in it
do { traceRn (text "lookupQualifiedNameGHCi" <+> ppr rdr_name)
; return [] } }
| otherwise
= do { traceRn (text "lookupQualifedNameGHCi: off" <+> ppr rdr_name)
; return [] }
doc = text "Need to find" <+> ppr rdr_name
{-
Note [Looking up signature names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
lookupSigOccRn is used for type signatures and pragmas
Is this valid?
module A
import M( f )
f :: Int -> Int
f x = x
It's clear that the 'f' in the signature must refer to A.f
The Haskell98 report does not stipulate this, but it will!
So we must treat the 'f' in the signature in the same way
as the binding occurrence of 'f', using lookupBndrRn
However, consider this case:
import M( f )
f :: Int -> Int
g x = x
We don't want to say 'f' is out of scope; instead, we want to
return the imported 'f', so that later on the reanamer will
correctly report "misplaced type sig".
Note [Signatures for top level things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
data HsSigCtxt = ... | TopSigCtxt NameSet | ....
* The NameSet says what is bound in this group of bindings.
We can't use isLocalGRE from the GlobalRdrEnv, because of this:
f x = x
$( ...some TH splice... )
f :: Int -> Int
When we encounter the signature for 'f', the binding for 'f'
will be in the GlobalRdrEnv, and will be a LocalDef. Yet the
signature is mis-placed
* For type signatures the NameSet should be the names bound by the
value bindings; for fixity declarations, the NameSet should also
include class sigs and record selectors
infix 3 `f` -- Yes, ok
f :: C a => a -> a -- No, not ok
class C a where
f :: a -> a
-}
data HsSigCtxt
= TopSigCtxt NameSet -- At top level, binding these names
-- See Note [Signatures for top level things]
| LocalBindCtxt NameSet -- In a local binding, binding these names
| ClsDeclCtxt Name -- Class decl for this class
| InstDeclCtxt NameSet -- Instance decl whose user-written method
-- bindings are for these methods
| HsBootCtxt NameSet -- Top level of a hs-boot file, binding these names
| RoleAnnotCtxt NameSet -- A role annotation, with the names of all types
-- in the group
lookupSigOccRn :: HsSigCtxt
-> Sig RdrName
-> Located RdrName -> RnM (Located Name)
lookupSigOccRn ctxt sig = lookupSigCtxtOccRn ctxt (hsSigDoc sig)
-- | Lookup a name in relation to the names in a 'HsSigCtxt'
lookupSigCtxtOccRn :: HsSigCtxt
-> SDoc -- ^ description of thing we're looking up,
-- like "type family"
-> Located RdrName -> RnM (Located Name)
lookupSigCtxtOccRn ctxt what
= wrapLocM $ \ rdr_name ->
do { mb_name <- lookupBindGroupOcc ctxt what rdr_name
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr_name) }
Right name -> return name }
lookupBindGroupOcc :: HsSigCtxt
-> SDoc
-> RdrName -> RnM (Either MsgDoc Name)
-- Looks up the RdrName, expecting it to resolve to one of the
-- bound names passed in. If not, return an appropriate error message
--
-- See Note [Looking up signature names]
lookupBindGroupOcc ctxt what rdr_name
| Just n <- isExact_maybe rdr_name
= lookupExactOcc_either n -- allow for the possibility of missing Exacts;
-- see Note [dataTcOccs and Exact Names]
-- Maybe we should check the side conditions
-- but it's a pain, and Exact things only show
-- up when you know what you are doing
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n' <- lookupOrig rdr_mod rdr_occ
; return (Right n') }
| otherwise
= case ctxt of
HsBootCtxt ns -> lookup_top (`elemNameSet` ns)
TopSigCtxt ns -> lookup_top (`elemNameSet` ns)
RoleAnnotCtxt ns -> lookup_top (`elemNameSet` ns)
LocalBindCtxt ns -> lookup_group ns
ClsDeclCtxt cls -> lookup_cls_op cls
InstDeclCtxt ns -> lookup_top (`elemNameSet` ns)
where
lookup_cls_op cls
= lookupSubBndrOcc True cls doc rdr_name
where
doc = text "method of class" <+> quotes (ppr cls)
lookup_top keep_me
= do { env <- getGlobalRdrEnv
; let all_gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name)
; case filter (keep_me . gre_name) all_gres of
[] | null all_gres -> bale_out_with Outputable.empty
| otherwise -> bale_out_with local_msg
(gre:_) -> return (Right (gre_name gre)) }
lookup_group bound_names -- Look in the local envt (not top level)
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of
Just n
| n `elemNameSet` bound_names -> return (Right n)
| otherwise -> bale_out_with local_msg
Nothing -> bale_out_with Outputable.empty }
bale_out_with msg
= return (Left (sep [ text "The" <+> what
<+> text "for" <+> quotes (ppr rdr_name)
, nest 2 $ text "lacks an accompanying binding"]
$$ nest 2 msg))
local_msg = parens $ text "The" <+> what <+> ptext (sLit "must be given where")
<+> quotes (ppr rdr_name) <+> text "is declared"
---------------
lookupLocalTcNames :: HsSigCtxt -> SDoc -> RdrName -> RnM [(RdrName, Name)]
-- GHC extension: look up both the tycon and data con or variable.
-- Used for top-level fixity signatures and deprecations.
-- Complain if neither is in scope.
-- See Note [Fixity signature lookup]
lookupLocalTcNames ctxt what rdr_name
= do { mb_gres <- mapM lookup (dataTcOccs rdr_name)
; let (errs, names) = splitEithers mb_gres
; when (null names) $ addErr (head errs) -- Bleat about one only
; return names }
where
lookup rdr = do { name <- lookupBindGroupOcc ctxt what rdr
; return (fmap ((,) rdr) name) }
dataTcOccs :: RdrName -> [RdrName]
-- Return both the given name and the same name promoted to the TcClsName
-- namespace. This is useful when we aren't sure which we are looking at.
-- See also Note [dataTcOccs and Exact Names]
dataTcOccs rdr_name
| isDataOcc occ || isVarOcc occ
= [rdr_name, rdr_name_tc]
| otherwise
= [rdr_name]
where
occ = rdrNameOcc rdr_name
rdr_name_tc = setRdrNameSpace rdr_name tcName
{-
Note [dataTcOccs and Exact Names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exact RdrNames can occur in code generated by Template Haskell, and generally
those references are, well, exact. However, the TH `Name` type isn't expressive
enough to always track the correct namespace information, so we sometimes get
the right Unique but wrong namespace. Thus, we still have to do the double-lookup
for Exact RdrNames.
There is also an awkward situation for built-in syntax. Example in GHCi
:info []
This parses as the Exact RdrName for nilDataCon, but we also want
the list type constructor.
Note that setRdrNameSpace on an Exact name requires the Name to be External,
which it always is for built in syntax.
*********************************************************
* *
Fixities
* *
*********************************************************
Note [Fixity signature lookup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A fixity declaration like
infixr 2 ?
can refer to a value-level operator, e.g.:
(?) :: String -> String -> String
or a type-level operator, like:
data (?) a b = A a | B b
so we extend the lookup of the reader name '?' to the TcClsName namespace, as
well as the original namespace.
The extended lookup is also used in other places, like resolution of
deprecation declarations, and lookup of names in GHCi.
-}
--------------------------------
type MiniFixityEnv = FastStringEnv (Located Fixity)
-- Mini fixity env for the names we're about
-- to bind, in a single binding group
--
-- It is keyed by the *FastString*, not the *OccName*, because
-- the single fixity decl infix 3 T
-- affects both the data constructor T and the type constrctor T
--
-- We keep the location so that if we find
-- a duplicate, we can report it sensibly
--------------------------------
-- Used for nested fixity decls to bind names along with their fixities.
-- the fixities are given as a UFM from an OccName's FastString to a fixity decl
addLocalFixities :: MiniFixityEnv -> [Name] -> RnM a -> RnM a
addLocalFixities mini_fix_env names thing_inside
= extendFixityEnv (mapMaybe find_fixity names) thing_inside
where
find_fixity name
= case lookupFsEnv mini_fix_env (occNameFS occ) of
Just (L _ fix) -> Just (name, FixItem occ fix)
Nothing -> Nothing
where
occ = nameOccName name
{-
--------------------------------
lookupFixity is a bit strange.
* Nested local fixity decls are put in the local fixity env, which we
find with getFixtyEnv
* Imported fixities are found in the HIT or PIT
* Top-level fixity decls in this module may be for Names that are
either Global (constructors, class operations)
or Local/Exported (everything else)
(See notes with RnNames.getLocalDeclBinders for why we have this split.)
We put them all in the local fixity environment
-}
lookupFixityRn :: Name -> RnM Fixity
lookupFixityRn name = lookupFixityRn' name (nameOccName name)
lookupFixityRn' :: Name -> OccName -> RnM Fixity
lookupFixityRn' name = fmap snd . lookupFixityRn_help' name
-- | 'lookupFixityRn_help' returns @(True, fixity)@ if it finds a 'Fixity'
-- in a local environment or from an interface file. Otherwise, it returns
-- @(False, fixity)@ (e.g., for unbound 'Name's or 'Name's without
-- user-supplied fixity declarations).
lookupFixityRn_help :: Name
-> RnM (Bool, Fixity)
lookupFixityRn_help name =
lookupFixityRn_help' name (nameOccName name)
lookupFixityRn_help' :: Name
-> OccName
-> RnM (Bool, Fixity)
lookupFixityRn_help' name occ
| isUnboundName name
= return (False, Fixity (show minPrecedence) minPrecedence InfixL)
-- Minimise errors from ubound names; eg
-- a>0 `foo` b>0
-- where 'foo' is not in scope, should not give an error (Trac #7937)
| otherwise
= do { local_fix_env <- getFixityEnv
; case lookupNameEnv local_fix_env name of {
Just (FixItem _ fix) -> return (True, fix) ;
Nothing ->
do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod name
-- Local (and interactive) names are all in the
-- fixity env, and don't have entries in the HPT
then return (False, defaultFixity)
else lookup_imported } } }
where
lookup_imported
-- For imported names, we have to get their fixities by doing a
-- loadInterfaceForName, and consulting the Ifaces that comes back
-- from that, because the interface file for the Name might not
-- have been loaded yet. Why not? Suppose you import module A,
-- which exports a function 'f', thus;
-- module CurrentModule where
-- import A( f )
-- module A( f ) where
-- import B( f )
-- Then B isn't loaded right away (after all, it's possible that
-- nothing from B will be used). When we come across a use of
-- 'f', we need to know its fixity, and it's then, and only
-- then, that we load B.hi. That is what's happening here.
--
-- loadInterfaceForName will find B.hi even if B is a hidden module,
-- and that's what we want.
= do { iface <- loadInterfaceForName doc name
; let mb_fix = mi_fix_fn iface occ
; let msg = case mb_fix of
Nothing ->
text "looking up name" <+> ppr name
<+> text "in iface, but found no fixity for it."
<+> text "Using default fixity instead."
Just f ->
text "looking up name in iface and found:"
<+> vcat [ppr name, ppr f]
; traceRn (text "lookupFixityRn_either:" <+> msg)
; return (maybe (False, defaultFixity) (\f -> (True, f)) mb_fix) }
doc = text "Checking fixity for" <+> ppr name
---------------
lookupTyFixityRn :: Located Name -> RnM Fixity
lookupTyFixityRn (L _ n) = lookupFixityRn n
-- | Look up the fixity of a (possibly ambiguous) occurrence of a record field
-- selector. We use 'lookupFixityRn'' so that we can specifiy the 'OccName' as
-- the field label, which might be different to the 'OccName' of the selector
-- 'Name' if @DuplicateRecordFields@ is in use (Trac #1173). If there are
-- multiple possible selectors with different fixities, generate an error.
lookupFieldFixityRn :: AmbiguousFieldOcc Name -> RnM Fixity
lookupFieldFixityRn (Unambiguous (L _ rdr) n)
= lookupFixityRn' n (rdrNameOcc rdr)
lookupFieldFixityRn (Ambiguous (L _ rdr) _) = get_ambiguous_fixity rdr
where
get_ambiguous_fixity :: RdrName -> RnM Fixity
get_ambiguous_fixity rdr_name = do
traceRn $ text "get_ambiguous_fixity" <+> ppr rdr_name
rdr_env <- getGlobalRdrEnv
let elts = lookupGRE_RdrName rdr_name rdr_env
fixities <- groupBy ((==) `on` snd) . zip elts
<$> mapM lookup_gre_fixity elts
case fixities of
-- There should always be at least one fixity.
-- Something's very wrong if there are no fixity candidates, so panic
[] -> panic "get_ambiguous_fixity: no candidates for a given RdrName"
[ (_, fix):_ ] -> return fix
ambigs -> addErr (ambiguous_fixity_err rdr_name ambigs)
>> return (Fixity(show minPrecedence) minPrecedence InfixL)
lookup_gre_fixity gre = lookupFixityRn' (gre_name gre) (greOccName gre)
ambiguous_fixity_err rn ambigs
= vcat [ text "Ambiguous fixity for record field" <+> quotes (ppr rn)
, hang (text "Conflicts: ") 2 . vcat .
map format_ambig $ concat ambigs ]
format_ambig (elt, fix) = hang (ppr fix)
2 (pprNameProvenance elt)
{- *********************************************************************
* *
Role annotations
* *
********************************************************************* -}
type RoleAnnotEnv = NameEnv (LRoleAnnotDecl Name)
mkRoleAnnotEnv :: [LRoleAnnotDecl Name] -> RoleAnnotEnv
mkRoleAnnotEnv role_annot_decls
= mkNameEnv [ (name, ra_decl)
| ra_decl <- role_annot_decls
, let name = roleAnnotDeclName (unLoc ra_decl)
, not (isUnboundName name) ]
-- Some of the role annots will be unbound;
-- we don't wish to include these
emptyRoleAnnotEnv :: RoleAnnotEnv
emptyRoleAnnotEnv = emptyNameEnv
lookupRoleAnnot :: RoleAnnotEnv -> Name -> Maybe (LRoleAnnotDecl Name)
lookupRoleAnnot = lookupNameEnv
getRoleAnnots :: [Name] -> RoleAnnotEnv -> ([LRoleAnnotDecl Name], RoleAnnotEnv)
getRoleAnnots bndrs role_env
= ( mapMaybe (lookupRoleAnnot role_env) bndrs
, delListFromNameEnv role_env bndrs )
{-
************************************************************************
* *
Rebindable names
Dealing with rebindable syntax is driven by the
Opt_RebindableSyntax dynamic flag.
In "deriving" code we don't want to use rebindable syntax
so we switch off the flag locally
* *
************************************************************************
Haskell 98 says that when you say "3" you get the "fromInteger" from the
Standard Prelude, regardless of what is in scope. However, to experiment
with having a language that is less coupled to the standard prelude, we're
trying a non-standard extension that instead gives you whatever "Prelude.fromInteger"
happens to be in scope. Then you can
import Prelude ()
import MyPrelude as Prelude
to get the desired effect.
At the moment this just happens for
* fromInteger, fromRational on literals (in expressions and patterns)
* negate (in expressions)
* minus (arising from n+k patterns)
* "do" notation
We store the relevant Name in the HsSyn tree, in
* HsIntegral/HsFractional/HsIsString
* NegApp
* NPlusKPat
* HsDo
respectively. Initially, we just store the "standard" name (PrelNames.fromIntegralName,
fromRationalName etc), but the renamer changes this to the appropriate user
name if Opt_NoImplicitPrelude is on. That is what lookupSyntaxName does.
We treat the orignal (standard) names as free-vars too, because the type checker
checks the type of the user thing against the type of the standard thing.
-}
lookupIfThenElse :: RnM (Maybe (SyntaxExpr Name), FreeVars)
-- Different to lookupSyntaxName because in the non-rebindable
-- case we desugar directly rather than calling an existing function
-- Hence the (Maybe (SyntaxExpr Name)) return type
lookupIfThenElse
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on
then return (Nothing, emptyFVs)
else do { ite <- lookupOccRn (mkVarUnqual (fsLit "ifThenElse"))
; return ( Just (mkRnSyntaxExpr ite)
, unitFV ite ) } }
lookupSyntaxName :: Name -- The standard name
-> RnM (SyntaxExpr Name, FreeVars) -- Possibly a non-standard name
lookupSyntaxName std_name
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on then
return (mkRnSyntaxExpr std_name, emptyFVs)
else
-- Get the similarly named thing from the local environment
do { usr_name <- lookupOccRn (mkRdrUnqual (nameOccName std_name))
; return (mkRnSyntaxExpr usr_name, unitFV usr_name) } }
lookupSyntaxNames :: [Name] -- Standard names
-> RnM ([HsExpr Name], FreeVars) -- See comments with HsExpr.ReboundNames
-- this works with CmdTop, which wants HsExprs, not SyntaxExprs
lookupSyntaxNames std_names
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on then
return (map (HsVar . noLoc) std_names, emptyFVs)
else
do { usr_names <- mapM (lookupOccRn . mkRdrUnqual . nameOccName) std_names
; return (map (HsVar . noLoc) usr_names, mkFVs usr_names) } }
{-
*********************************************************
* *
\subsection{Binding}
* *
*********************************************************
-}
newLocalBndrRn :: Located RdrName -> RnM Name
-- Used for non-top-level binders. These should
-- never be qualified.
newLocalBndrRn (L loc rdr_name)
| Just name <- isExact_maybe rdr_name
= return name -- This happens in code generated by Template Haskell
-- See Note [Binders in Template Haskell] in Convert.hs
| otherwise
= do { unless (isUnqual rdr_name)
(addErrAt loc (badQualBndrErr rdr_name))
; uniq <- newUnique
; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) }
newLocalBndrsRn :: [Located RdrName] -> RnM [Name]
newLocalBndrsRn = mapM newLocalBndrRn
---------------------
bindLocatedLocalsRn :: [Located RdrName]
-> ([Name] -> RnM a)
-> RnM a
bindLocatedLocalsRn rdr_names_w_loc enclosed_scope
= do { checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
-- Make fresh Names and extend the environment
; names <- newLocalBndrsRn rdr_names_w_loc
; bindLocalNames names (enclosed_scope names) }
bindLocalNames :: [Name] -> RnM a -> RnM a
bindLocalNames names enclosed_scope
= do { lcl_env <- getLclEnv
; let th_level = thLevel (tcl_th_ctxt lcl_env)
th_bndrs' = extendNameEnvList (tcl_th_bndrs lcl_env)
[ (n, (NotTopLevel, th_level)) | n <- names ]
rdr_env' = extendLocalRdrEnvList (tcl_rdr lcl_env) names
; setLclEnv (lcl_env { tcl_th_bndrs = th_bndrs'
, tcl_rdr = rdr_env' })
enclosed_scope }
bindLocalNamesFV :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars)
bindLocalNamesFV names enclosed_scope
= do { (result, fvs) <- bindLocalNames names enclosed_scope
; return (result, delFVs names fvs) }
-------------------------------------
-- binLocalsFVRn is the same as bindLocalsRn
-- except that it deals with free vars
bindLocatedLocalsFV :: [Located RdrName]
-> ([Name] -> RnM (a,FreeVars)) -> RnM (a, FreeVars)
bindLocatedLocalsFV rdr_names enclosed_scope
= bindLocatedLocalsRn rdr_names $ \ names ->
do (thing, fvs) <- enclosed_scope names
return (thing, delFVs names fvs)
-------------------------------------
extendTyVarEnvFVRn :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars)
-- This function is used only in rnSourceDecl on InstDecl
extendTyVarEnvFVRn tyvars thing_inside = bindLocalNamesFV tyvars thing_inside
-------------------------------------
checkDupRdrNames :: [Located RdrName] -> RnM ()
-- Check for duplicated names in a binding group
checkDupRdrNames rdr_names_w_loc
= mapM_ (dupNamesErr getLoc) dups
where
(_, dups) = removeDups (\n1 n2 -> unLoc n1 `compare` unLoc n2) rdr_names_w_loc
checkDupNames :: [Name] -> RnM ()
-- Check for duplicated names in a binding group
checkDupNames names = check_dup_names (filterOut isSystemName names)
-- See Note [Binders in Template Haskell] in Convert
check_dup_names :: [Name] -> RnM ()
check_dup_names names
= mapM_ (dupNamesErr nameSrcSpan) dups
where
(_, dups) = removeDups (\n1 n2 -> nameOccName n1 `compare` nameOccName n2) names
---------------------
checkShadowedRdrNames :: [Located RdrName] -> RnM ()
checkShadowedRdrNames loc_rdr_names
= do { envs <- getRdrEnvs
; checkShadowedOccs envs get_loc_occ filtered_rdrs }
where
filtered_rdrs = filterOut (isExact . unLoc) loc_rdr_names
-- See Note [Binders in Template Haskell] in Convert
get_loc_occ (L loc rdr) = (loc,rdrNameOcc rdr)
checkDupAndShadowedNames :: (GlobalRdrEnv, LocalRdrEnv) -> [Name] -> RnM ()
checkDupAndShadowedNames envs names
= do { check_dup_names filtered_names
; checkShadowedOccs envs get_loc_occ filtered_names }
where
filtered_names = filterOut isSystemName names
-- See Note [Binders in Template Haskell] in Convert
get_loc_occ name = (nameSrcSpan name, nameOccName name)
-------------------------------------
checkShadowedOccs :: (GlobalRdrEnv, LocalRdrEnv)
-> (a -> (SrcSpan, OccName))
-> [a] -> RnM ()
checkShadowedOccs (global_env,local_env) get_loc_occ ns
= whenWOptM Opt_WarnNameShadowing $
do { traceRn (text "shadow" <+> ppr (map get_loc_occ ns))
; mapM_ check_shadow ns }
where
check_shadow n
| startsWithUnderscore occ = return () -- Do not report shadowing for "_x"
-- See Trac #3262
| Just n <- mb_local = complain [text "bound at" <+> ppr (nameSrcLoc n)]
| otherwise = do { gres' <- filterM is_shadowed_gre gres
; complain (map pprNameProvenance gres') }
where
(loc,occ) = get_loc_occ n
mb_local = lookupLocalRdrOcc local_env occ
gres = lookupGRE_RdrName (mkRdrUnqual occ) global_env
-- Make an Unqualified RdrName and look that up, so that
-- we don't find any GREs that are in scope qualified-only
complain [] = return ()
complain pp_locs = addWarnAt (Reason Opt_WarnNameShadowing)
loc
(shadowedNameWarn occ pp_locs)
is_shadowed_gre :: GlobalRdrElt -> RnM Bool
-- Returns False for record selectors that are shadowed, when
-- punning or wild-cards are on (cf Trac #2723)
is_shadowed_gre gre | isRecFldGRE gre
= do { dflags <- getDynFlags
; return $ not (xopt LangExt.RecordPuns dflags
|| xopt LangExt.RecordWildCards dflags) }
is_shadowed_gre _other = return True
{-
************************************************************************
* *
What to do when a lookup fails
* *
************************************************************************
-}
data WhereLooking = WL_Any -- Any binding
| WL_Global -- Any top-level binding (local or imported)
| WL_LocalTop -- Any top-level binding in this module
reportUnboundName :: RdrName -> RnM Name
reportUnboundName rdr = unboundName WL_Any rdr
unboundName :: WhereLooking -> RdrName -> RnM Name
unboundName wl rdr = unboundNameX wl rdr Outputable.empty
unboundNameX :: WhereLooking -> RdrName -> SDoc -> RnM Name
unboundNameX where_look rdr_name extra
= do { dflags <- getDynFlags
; let show_helpful_errors = gopt Opt_HelpfulErrors dflags
what = pprNonVarNameSpace (occNameSpace (rdrNameOcc rdr_name))
err = unknownNameErr what rdr_name $$ extra
; if not show_helpful_errors
then addErr err
else do { local_env <- getLocalRdrEnv
; global_env <- getGlobalRdrEnv
; impInfo <- getImports
; let suggestions = unknownNameSuggestions_ where_look
dflags global_env local_env impInfo rdr_name
; addErr (err $$ suggestions) }
; return (mkUnboundNameRdr rdr_name) }
unknownNameErr :: SDoc -> RdrName -> SDoc
unknownNameErr what rdr_name
= vcat [ hang (text "Not in scope:")
2 (what <+> quotes (ppr rdr_name))
, extra ]
where
extra | rdr_name == forall_tv_RDR = perhapsForallMsg
| otherwise = Outputable.empty
type HowInScope = Either SrcSpan ImpDeclSpec
-- Left loc => locally bound at loc
-- Right ispec => imported as specified by ispec
-- | Called from the typechecker (TcErrors) when we find an unbound variable
unknownNameSuggestions :: DynFlags
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions = unknownNameSuggestions_ WL_Any
unknownNameSuggestions_ :: WhereLooking -> DynFlags
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions_ where_look dflags global_env local_env imports tried_rdr_name =
similarNameSuggestions where_look dflags global_env local_env tried_rdr_name $$
importSuggestions dflags imports tried_rdr_name
similarNameSuggestions :: WhereLooking -> DynFlags
-> GlobalRdrEnv -> LocalRdrEnv
-> RdrName -> SDoc
similarNameSuggestions where_look dflags global_env
local_env tried_rdr_name
= case suggest of
[] -> Outputable.empty
[p] -> perhaps <+> pp_item p
ps -> sep [ perhaps <+> text "one of these:"
, nest 2 (pprWithCommas pp_item ps) ]
where
all_possibilities :: [(String, (RdrName, HowInScope))]
all_possibilities
= [ (showPpr dflags r, (r, Left loc))
| (r,loc) <- local_possibilities local_env ]
++ [ (showPpr dflags r, rp) | (r, rp) <- global_possibilities global_env ]
suggest = fuzzyLookup (showPpr dflags tried_rdr_name) all_possibilities
perhaps = text "Perhaps you meant"
pp_item :: (RdrName, HowInScope) -> SDoc
pp_item (rdr, Left loc) = pp_ns rdr <+> quotes (ppr rdr) <+> loc' -- Locally defined
where loc' = case loc of
UnhelpfulSpan l -> parens (ppr l)
RealSrcSpan l -> parens (text "line" <+> int (srcSpanStartLine l))
pp_item (rdr, Right is) = pp_ns rdr <+> quotes (ppr rdr) <+> -- Imported
parens (text "imported from" <+> ppr (is_mod is))
pp_ns :: RdrName -> SDoc
pp_ns rdr | ns /= tried_ns = pprNameSpace ns
| otherwise = Outputable.empty
where ns = rdrNameSpace rdr
tried_occ = rdrNameOcc tried_rdr_name
tried_is_sym = isSymOcc tried_occ
tried_ns = occNameSpace tried_occ
tried_is_qual = isQual tried_rdr_name
correct_name_space occ = nameSpacesRelated (occNameSpace occ) tried_ns
&& isSymOcc occ == tried_is_sym
-- Treat operator and non-operators as non-matching
-- This heuristic avoids things like
-- Not in scope 'f'; perhaps you meant '+' (from Prelude)
local_ok = case where_look of { WL_Any -> True; _ -> False }
local_possibilities :: LocalRdrEnv -> [(RdrName, SrcSpan)]
local_possibilities env
| tried_is_qual = []
| not local_ok = []
| otherwise = [ (mkRdrUnqual occ, nameSrcSpan name)
| name <- localRdrEnvElts env
, let occ = nameOccName name
, correct_name_space occ]
gre_ok :: GlobalRdrElt -> Bool
gre_ok = case where_look of
WL_LocalTop -> isLocalGRE
_ -> \_ -> True
global_possibilities :: GlobalRdrEnv -> [(RdrName, (RdrName, HowInScope))]
global_possibilities global_env
| tried_is_qual = [ (rdr_qual, (rdr_qual, how))
| gre <- globalRdrEnvElts global_env
, gre_ok gre
, let name = gre_name gre
occ = nameOccName name
, correct_name_space occ
, (mod, how) <- quals_in_scope gre
, let rdr_qual = mkRdrQual mod occ ]
| otherwise = [ (rdr_unqual, pair)
| gre <- globalRdrEnvElts global_env
, gre_ok gre
, let name = gre_name gre
occ = nameOccName name
rdr_unqual = mkRdrUnqual occ
, correct_name_space occ
, pair <- case (unquals_in_scope gre, quals_only gre) of
(how:_, _) -> [ (rdr_unqual, how) ]
([], pr:_) -> [ pr ] -- See Note [Only-quals]
([], []) -> [] ]
-- Note [Only-quals]
-- The second alternative returns those names with the same
-- OccName as the one we tried, but live in *qualified* imports
-- e.g. if you have:
--
-- > import qualified Data.Map as Map
-- > foo :: Map
--
-- then we suggest @Map.Map@.
--------------------
unquals_in_scope :: GlobalRdrElt -> [HowInScope]
unquals_in_scope (GRE { gre_name = n, gre_lcl = lcl, gre_imp = is })
| lcl = [ Left (nameSrcSpan n) ]
| otherwise = [ Right ispec
| i <- is, let ispec = is_decl i
, not (is_qual ispec) ]
--------------------
quals_in_scope :: GlobalRdrElt -> [(ModuleName, HowInScope)]
-- Ones for which the qualified version is in scope
quals_in_scope (GRE { gre_name = n, gre_lcl = lcl, gre_imp = is })
| lcl = case nameModule_maybe n of
Nothing -> []
Just m -> [(moduleName m, Left (nameSrcSpan n))]
| otherwise = [ (is_as ispec, Right ispec)
| i <- is, let ispec = is_decl i ]
--------------------
quals_only :: GlobalRdrElt -> [(RdrName, HowInScope)]
-- Ones for which *only* the qualified version is in scope
quals_only (GRE { gre_name = n, gre_imp = is })
= [ (mkRdrQual (is_as ispec) (nameOccName n), Right ispec)
| i <- is, let ispec = is_decl i, is_qual ispec ]
-- | Generate helpful suggestions if a qualified name Mod.foo is not in scope.
importSuggestions :: DynFlags -> ImportAvails -> RdrName -> SDoc
importSuggestions _dflags imports rdr_name
| not (isQual rdr_name || isUnqual rdr_name) = Outputable.empty
| null interesting_imports
, Just name <- mod_name
= hsep
[ text "No module named"
, quotes (ppr name)
, text "is imported."
]
| is_qualified
, null helpful_imports
, [(mod,_)] <- interesting_imports
= hsep
[ text "Module"
, quotes (ppr mod)
, text "does not export"
, quotes (ppr occ_name) <> dot
]
| is_qualified
, null helpful_imports
, mods <- map fst interesting_imports
= hsep
[ text "Neither"
, quotedListWithNor (map ppr mods)
, text "exports"
, quotes (ppr occ_name) <> dot
]
| [(mod,imv)] <- helpful_imports_non_hiding
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to the import list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_non_hiding)
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to one of these import lists:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_non_hiding
])
| [(mod,imv)] <- helpful_imports_hiding
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the explicit hiding list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_hiding)
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the hiding clauses"
, text "in one of these imports:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_hiding
])
| otherwise
= Outputable.empty
where
is_qualified = isQual rdr_name
(mod_name, occ_name) = case rdr_name of
Unqual occ_name -> (Nothing, occ_name)
Qual mod_name occ_name -> (Just mod_name, occ_name)
_ -> error "importSuggestions: dead code"
-- What import statements provide "Mod" at all
-- or, if this is an unqualified name, are not qualified imports
interesting_imports = [ (mod, imp)
| (mod, mod_imports) <- moduleEnvToList (imp_mods imports)
, Just imp <- return $ pick mod_imports
]
-- We want to keep only one for each original module; preferably one with an
-- explicit import list (for no particularly good reason)
pick :: [ImportedModsVal] -> Maybe ImportedModsVal
pick = listToMaybe . sortBy (compare `on` prefer) . filter select
where select imv = case mod_name of Just name -> imv_name imv == name
Nothing -> not (imv_qualified imv)
prefer imv = (imv_is_hiding imv, imv_span imv)
-- Which of these would export a 'foo'
-- (all of these are restricted imports, because if they were not, we
-- wouldn't have an out-of-scope error in the first place)
helpful_imports = filter helpful interesting_imports
where helpful (_,imv)
= not . null $ lookupGlobalRdrEnv (imv_all_exports imv) occ_name
-- Which of these do that because of an explicit hiding list resp. an
-- explicit import list
(helpful_imports_hiding, helpful_imports_non_hiding)
= partition (imv_is_hiding . snd) helpful_imports
{-
************************************************************************
* *
\subsection{Free variable manipulation}
* *
************************************************************************
-}
-- A useful utility
addFvRn :: FreeVars -> RnM (thing, FreeVars) -> RnM (thing, FreeVars)
addFvRn fvs1 thing_inside = do { (res, fvs2) <- thing_inside
; return (res, fvs1 `plusFV` fvs2) }
mapFvRn :: (a -> RnM (b, FreeVars)) -> [a] -> RnM ([b], FreeVars)
mapFvRn f xs = do stuff <- mapM f xs
case unzip stuff of
(ys, fvs_s) -> return (ys, plusFVs fvs_s)
mapMaybeFvRn :: (a -> RnM (b, FreeVars)) -> Maybe a -> RnM (Maybe b, FreeVars)
mapMaybeFvRn _ Nothing = return (Nothing, emptyFVs)
mapMaybeFvRn f (Just x) = do { (y, fvs) <- f x; return (Just y, fvs) }
-- because some of the rename functions are CPSed:
-- maps the function across the list from left to right;
-- collects all the free vars into one set
mapFvRnCPS :: (a -> (b -> RnM c) -> RnM c)
-> [a] -> ([b] -> RnM c) -> RnM c
mapFvRnCPS _ [] cont = cont []
mapFvRnCPS f (x:xs) cont = f x $ \ x' ->
mapFvRnCPS f xs $ \ xs' ->
cont (x':xs')
{-
************************************************************************
* *
\subsection{Envt utility functions}
* *
************************************************************************
-}
warnUnusedTopBinds :: [GlobalRdrElt] -> RnM ()
warnUnusedTopBinds gres
= whenWOptM Opt_WarnUnusedTopBinds
$ do env <- getGblEnv
let isBoot = tcg_src env == HsBootFile
let noParent gre = case gre_par gre of
NoParent -> True
PatternSynonym -> True
_ -> False
-- Don't warn about unused bindings with parents in
-- .hs-boot files, as you are sometimes required to give
-- unused bindings (trac #3449).
-- HOWEVER, in a signature file, you are never obligated to put a
-- definition in the main text. Thus, if you define something
-- and forget to export it, we really DO want to warn.
gres' = if isBoot then filter noParent gres
else gres
warnUnusedGREs gres'
warnUnusedLocalBinds, warnUnusedMatches, warnUnusedTypePatterns
:: [Name] -> FreeVars -> RnM ()
warnUnusedLocalBinds = check_unused Opt_WarnUnusedLocalBinds
warnUnusedMatches = check_unused Opt_WarnUnusedMatches
warnUnusedTypePatterns = check_unused Opt_WarnUnusedTypePatterns
check_unused :: WarningFlag -> [Name] -> FreeVars -> RnM ()
check_unused flag bound_names used_names
= whenWOptM flag (warnUnused flag (filterOut (`elemNameSet` used_names)
bound_names))
-------------------------
-- Helpers
warnUnusedGREs :: [GlobalRdrElt] -> RnM ()
warnUnusedGREs gres = mapM_ warnUnusedGRE gres
warnUnused :: WarningFlag -> [Name] -> RnM ()
warnUnused flag names = do
fld_env <- mkFieldEnv <$> getGlobalRdrEnv
mapM_ (warnUnused1 flag fld_env) names
warnUnused1 :: WarningFlag -> NameEnv (FieldLabelString, Name) -> Name -> RnM ()
warnUnused1 flag fld_env name
= when (reportable name) $
addUnusedWarning flag
occ (nameSrcSpan name)
(text "Defined but not used")
where
occ = case lookupNameEnv fld_env name of
Just (fl, _) -> mkVarOccFS fl
Nothing -> nameOccName name
warnUnusedGRE :: GlobalRdrElt -> RnM ()
warnUnusedGRE gre@(GRE { gre_name = name, gre_lcl = lcl, gre_imp = is })
| lcl = do fld_env <- mkFieldEnv <$> getGlobalRdrEnv
warnUnused1 Opt_WarnUnusedTopBinds fld_env name
| otherwise = when (reportable name) (mapM_ warn is)
where
occ = greOccName gre
warn spec = addUnusedWarning Opt_WarnUnusedTopBinds occ span msg
where
span = importSpecLoc spec
pp_mod = quotes (ppr (importSpecModule spec))
msg = text "Imported from" <+> pp_mod <+> ptext (sLit "but not used")
-- | Make a map from selector names to field labels and parent tycon
-- names, to be used when reporting unused record fields.
mkFieldEnv :: GlobalRdrEnv -> NameEnv (FieldLabelString, Name)
mkFieldEnv rdr_env = mkNameEnv [ (gre_name gre, (lbl, par_is (gre_par gre)))
| gres <- occEnvElts rdr_env
, gre <- gres
, Just lbl <- [greLabel gre]
]
reportable :: Name -> Bool
reportable name
| isWiredInName name = False -- Don't report unused wired-in names
-- Otherwise we get a zillion warnings
-- from Data.Tuple
| otherwise = not (startsWithUnderscore (nameOccName name))
addUnusedWarning :: WarningFlag -> OccName -> SrcSpan -> SDoc -> RnM ()
addUnusedWarning flag occ span msg
= addWarnAt (Reason flag) span $
sep [msg <> colon,
nest 2 $ pprNonVarNameSpace (occNameSpace occ)
<+> quotes (ppr occ)]
addNameClashErrRn :: RdrName -> [GlobalRdrElt] -> RnM ()
addNameClashErrRn rdr_name gres
| all isLocalGRE gres && not (all isRecFldGRE gres)
-- If there are two or more *local* defns, we'll have reported
= return () -- that already, and we don't want an error cascade
| otherwise
= addErr (vcat [text "Ambiguous occurrence" <+> quotes (ppr rdr_name),
text "It could refer to" <+> vcat (msg1 : msgs)])
where
(np1:nps) = gres
msg1 = ptext (sLit "either") <+> mk_ref np1
msgs = [text " or" <+> mk_ref np | np <- nps]
mk_ref gre = sep [nom <> comma, pprNameProvenance gre]
where nom = case gre_par gre of
FldParent { par_lbl = Just lbl } -> text "the field" <+> quotes (ppr lbl)
_ -> quotes (ppr (gre_name gre))
shadowedNameWarn :: OccName -> [SDoc] -> SDoc
shadowedNameWarn occ shadowed_locs
= sep [text "This binding for" <+> quotes (ppr occ)
<+> text "shadows the existing binding" <> plural shadowed_locs,
nest 2 (vcat shadowed_locs)]
perhapsForallMsg :: SDoc
perhapsForallMsg
= vcat [ text "Perhaps you intended to use ExplicitForAll or similar flag"
, text "to enable explicit-forall syntax: forall <tvs>. <type>"]
unknownSubordinateErr :: SDoc -> RdrName -> SDoc
unknownSubordinateErr doc op -- Doc is "method of class" or
-- "field of constructor"
= quotes (ppr op) <+> text "is not a (visible)" <+> doc
badOrigBinding :: RdrName -> SDoc
badOrigBinding name
= text "Illegal binding of built-in syntax:" <+> ppr (rdrNameOcc name)
-- The rdrNameOcc is because we don't want to print Prelude.(,)
dupNamesErr :: Outputable n => (n -> SrcSpan) -> [n] -> RnM ()
dupNamesErr get_loc names
= addErrAt big_loc $
vcat [text "Conflicting definitions for" <+> quotes (ppr (head names)),
locations]
where
locs = map get_loc names
big_loc = foldr1 combineSrcSpans locs
locations = text "Bound at:" <+> vcat (map ppr (sort locs))
kindSigErr :: Outputable a => a -> SDoc
kindSigErr thing
= hang (text "Illegal kind signature for" <+> quotes (ppr thing))
2 (text "Perhaps you intended to use KindSignatures")
badQualBndrErr :: RdrName -> SDoc
badQualBndrErr rdr_name
= text "Qualified name in binding position:" <+> ppr rdr_name
opDeclErr :: RdrName -> SDoc
opDeclErr n
= hang (text "Illegal declaration of a type or class operator" <+> quotes (ppr n))
2 (text "Use TypeOperators to declare operators in type and declarations")
checkTupSize :: Int -> RnM ()
checkTupSize tup_size
| tup_size <= mAX_TUPLE_SIZE
= return ()
| otherwise
= addErr (sep [text "A" <+> int tup_size <> ptext (sLit "-tuple is too large for GHC"),
nest 2 (parens (text "max size is" <+> int mAX_TUPLE_SIZE)),
nest 2 (text "Workaround: use nested tuples or define a data type")])
{-
************************************************************************
* *
\subsection{Contexts for renaming errors}
* *
************************************************************************
-}
-- AZ:TODO: Change these all to be Name instead of RdrName.
-- Merge TcType.UserTypeContext in to it.
data HsDocContext
= TypeSigCtx SDoc
| PatCtx
| SpecInstSigCtx
| DefaultDeclCtx
| ForeignDeclCtx (Located RdrName)
| DerivDeclCtx
| RuleCtx FastString
| TyDataCtx (Located RdrName)
| TySynCtx (Located RdrName)
| TyFamilyCtx (Located RdrName)
| FamPatCtx (Located RdrName) -- The patterns of a type/data family instance
| ConDeclCtx [Located Name]
| ClassDeclCtx (Located RdrName)
| ExprWithTySigCtx
| TypBrCtx
| HsTypeCtx
| GHCiCtx
| SpliceTypeCtx (LHsType RdrName)
| ClassInstanceCtx
| VectDeclCtx (Located RdrName)
| GenericCtx SDoc -- Maybe we want to use this more!
withHsDocContext :: HsDocContext -> SDoc -> SDoc
withHsDocContext ctxt doc = doc $$ inHsDocContext ctxt
inHsDocContext :: HsDocContext -> SDoc
inHsDocContext ctxt = text "In" <+> pprHsDocContext ctxt
pprHsDocContext :: HsDocContext -> SDoc
pprHsDocContext (GenericCtx doc) = doc
pprHsDocContext (TypeSigCtx doc) = text "the type signature for" <+> doc
pprHsDocContext PatCtx = text "a pattern type-signature"
pprHsDocContext SpecInstSigCtx = text "a SPECIALISE instance pragma"
pprHsDocContext DefaultDeclCtx = text "a `default' declaration"
pprHsDocContext DerivDeclCtx = text "a deriving declaration"
pprHsDocContext (RuleCtx name) = text "the transformation rule" <+> ftext name
pprHsDocContext (TyDataCtx tycon) = text "the data type declaration for" <+> quotes (ppr tycon)
pprHsDocContext (FamPatCtx tycon) = text "a type pattern of family instance for" <+> quotes (ppr tycon)
pprHsDocContext (TySynCtx name) = text "the declaration for type synonym" <+> quotes (ppr name)
pprHsDocContext (TyFamilyCtx name) = text "the declaration for type family" <+> quotes (ppr name)
pprHsDocContext (ClassDeclCtx name) = text "the declaration for class" <+> quotes (ppr name)
pprHsDocContext ExprWithTySigCtx = text "an expression type signature"
pprHsDocContext TypBrCtx = text "a Template-Haskell quoted type"
pprHsDocContext HsTypeCtx = text "a type argument"
pprHsDocContext GHCiCtx = text "GHCi input"
pprHsDocContext (SpliceTypeCtx hs_ty) = text "the spliced type" <+> quotes (ppr hs_ty)
pprHsDocContext ClassInstanceCtx = text "TcSplice.reifyInstances"
pprHsDocContext (ForeignDeclCtx name)
= text "the foreign declaration for" <+> quotes (ppr name)
pprHsDocContext (ConDeclCtx [name])
= text "the definition of data constructor" <+> quotes (ppr name)
pprHsDocContext (ConDeclCtx names)
= text "the definition of data constructors" <+> interpp'SP names
pprHsDocContext (VectDeclCtx tycon)
= text "the VECTORISE pragma for type constructor" <+> quotes (ppr tycon)
| tjakway/ghcjvm | compiler/rename/RnEnv.hs | bsd-3-clause | 97,750 | 601 | 39 | 28,916 | 15,586 | 8,250 | 7,336 | 1,207 | 10 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
module Test.Pos.Chain.Txp.Bi
( tests
) where
import Universum
import qualified Data.Map as M
import Data.Typeable (typeRep)
import Hedgehog (Gen, Property)
import qualified Hedgehog as H
import qualified Hedgehog.Gen as Gen
import Pos.Binary.Class (Bi, Case (..), LengthOf, SizeOverride (..),
szCases)
import Pos.Chain.Txp (Tx (..), TxAux (..), TxIn (..),
TxInWitness (..), TxOut (..), TxOutAux (..),
TxSigData (..))
import Pos.Core.Attributes (Attributes (..), mkAttributes)
import Pos.Core.Common (AddrAttributes (..), Script (..))
import Pos.Crypto (ProtocolMagic (..), ProtocolMagicId (..),
RequiresNetworkMagic (..), SignTag (..), Signature, sign)
import Test.Pos.Binary.Helpers (SizeTestConfig (..), scfg, sizeTest)
import Test.Pos.Binary.Helpers.GoldenRoundTrip (goldenTestBi,
roundTripsBiBuildable, roundTripsBiShow)
import Test.Pos.Chain.Txp.Example (exampleHashTx,
exampleRedeemSignature, exampleTxId, exampleTxInList,
exampleTxInUnknown, exampleTxInUtxo, exampleTxOut,
exampleTxOutList, exampleTxProof, exampleTxSig,
exampleTxSigData, exampleTxWitness)
import Test.Pos.Chain.Txp.Gen (genTx, genTxAttributes, genTxAux,
genTxHash, genTxId, genTxIn, genTxInList, genTxInWitness,
genTxOut, genTxOutAux, genTxOutList, genTxPayload,
genTxProof, genTxSig, genTxSigData, genTxWitness)
import Test.Pos.Core.ExampleHelpers (examplePublicKey,
exampleRedeemPublicKey, exampleSecretKey, feedPM)
import Test.Pos.Util.Golden (discoverGolden, eachOf)
import Test.Pos.Util.Tripping (discoverRoundTrip)
--------------------------------------------------------------------------------
-- Tx
--------------------------------------------------------------------------------
golden_Tx :: Property
golden_Tx = goldenTestBi tx "test/golden/bi/txp/Tx"
where
tx = UnsafeTx exampleTxInList exampleTxOutList (mkAttributes ())
roundTripTx :: Property
roundTripTx = eachOf 50 genTx roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxAttributes
--------------------------------------------------------------------------------
golden_TxAttributes :: Property
golden_TxAttributes = goldenTestBi txA "test/golden/bi/txp/TxAttributes"
where
txA = mkAttributes ()
roundTripTxAttributes :: Property
roundTripTxAttributes = eachOf 10 genTxAttributes roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxAux
--------------------------------------------------------------------------------
roundTripTxAux :: Property
roundTripTxAux = eachOf 100 (feedPM genTxAux) roundTripsBiBuildable
--------------------------------------------------------------------------------
-- Tx Hash
--------------------------------------------------------------------------------
golden_HashTx :: Property
golden_HashTx = goldenTestBi exampleHashTx "test/golden/bi/txp/HashTx"
roundTripHashTx :: Property
roundTripHashTx = eachOf 50 genTxHash roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxIn
--------------------------------------------------------------------------------
golden_TxInUtxo :: Property
golden_TxInUtxo = goldenTestBi exampleTxInUtxo "test/golden/bi/txp/TxIn_Utxo"
golden_TxInUnknown :: Property
golden_TxInUnknown = goldenTestBi exampleTxInUnknown "test/golden/bi/txp/TxIn_Unknown"
roundTripTxIn :: Property
roundTripTxIn = eachOf 100 genTxIn roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxId
--------------------------------------------------------------------------------
golden_TxId :: Property
golden_TxId = goldenTestBi exampleTxId "test/golden/bi/txp/TxId"
roundTripTxId :: Property
roundTripTxId = eachOf 50 genTxId roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxInList
--------------------------------------------------------------------------------
golden_TxInList :: Property
golden_TxInList = goldenTestBi exampleTxInList "test/golden/bi/txp/TxInList"
roundTripTxInList :: Property
roundTripTxInList = eachOf 50 genTxInList roundTripsBiShow
--------------------------------------------------------------------------------
-- TxInWitness
--------------------------------------------------------------------------------
golden_PkWitness :: Property
golden_PkWitness = goldenTestBi pkWitness "test/golden/bi/txp/TxInWitness_PkWitness"
where
pkWitness = PkWitness examplePublicKey exampleTxSig
golden_ScriptWitness :: Property
golden_ScriptWitness = goldenTestBi scriptWitness "test/golden/bi/txp/TxInWitness_ScriptWitness"
where
scriptWitness = ScriptWitness validatorScript redeemerScript
validatorScript = Script 47 "serialized script"
redeemerScript = Script 47 "serialized script"
golden_RedeemWitness :: Property
golden_RedeemWitness = goldenTestBi redeemWitness "test/golden/bi/txp/TxInWitness_RedeemWitness"
where
redeemWitness = RedeemWitness exampleRedeemPublicKey exampleRedeemSignature
golden_UnknownWitnessType :: Property
golden_UnknownWitnessType = goldenTestBi unkWitType "test/golden/bi/txp/TxInWitness_UnknownWitnessType"
where
unkWitType = UnknownWitnessType 47 "forty seven"
roundTripTxInWitness :: Property
roundTripTxInWitness = eachOf 50 (feedPM genTxInWitness) roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxOutList
--------------------------------------------------------------------------------
golden_TxOutList :: Property
golden_TxOutList = goldenTestBi exampleTxOutList "test/golden/bi/txp/TxOutList"
roundTripTxOutList :: Property
roundTripTxOutList = eachOf 50 genTxOutList roundTripsBiShow
--------------------------------------------------------------------------------
-- TxOut
--------------------------------------------------------------------------------
golden_TxOut :: Property
golden_TxOut = goldenTestBi exampleTxOut "test/golden/bi/txp/TxOut"
roundTripTxOut :: Property
roundTripTxOut = eachOf 50 genTxOut roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxOutAux
--------------------------------------------------------------------------------
golden_TxOutAux :: Property
golden_TxOutAux = goldenTestBi txOutAux "test/golden/bi/txp/TxOutAux"
where
txOutAux = TxOutAux exampleTxOut
roundTripTxOutAux :: Property
roundTripTxOutAux = eachOf 50 genTxOutAux roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxPayload
--------------------------------------------------------------------------------
roundTripTxPayload :: Property
roundTripTxPayload = eachOf 50 (feedPM genTxPayload) roundTripsBiShow
--------------------------------------------------------------------------------
-- TxProof
--------------------------------------------------------------------------------
golden_TxProof :: Property
golden_TxProof = goldenTestBi exampleTxProof "test/golden/bi/txp/TxProof"
roundTripTxProof :: Property
roundTripTxProof = eachOf 50 (feedPM genTxProof) roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxSig
--------------------------------------------------------------------------------
golden_TxSig :: Property
golden_TxSig = goldenTestBi txSigGold "test/golden/bi/txp/TxSig"
where
txSigGold = sign pm SignForTestingOnly
exampleSecretKey exampleTxSigData
pm = ProtocolMagic { getProtocolMagicId = ProtocolMagicId 0
, getRequiresNetworkMagic = RequiresNoMagic
}
roundTripTxSig :: Property
roundTripTxSig = eachOf 50 (feedPM genTxSig) roundTripsBiBuildable
--------------------------------------------------------------------------------
-- TxSigData
--------------------------------------------------------------------------------
golden_TxSigData :: Property
golden_TxSigData = goldenTestBi exampleTxSigData "test/golden/bi/txp/TxSigData"
roundTripTxSigData :: Property
roundTripTxSigData = eachOf 50 genTxSigData roundTripsBiShow
--------------------------------------------------------------------------------
-- TxWitness
--------------------------------------------------------------------------------
golden_TxWitness :: Property
golden_TxWitness = goldenTestBi exampleTxWitness "test/golden/bi/txp/TxWitness"
roundTripTxWitness :: Property
roundTripTxWitness = eachOf 20 (feedPM genTxWitness) roundTripsBiShow
sizeEstimates :: H.Group
sizeEstimates =
let check :: (Show a, Bi a) => Gen a -> Property
check g = sizeTest $ scfg { gen = g }
pm = ProtocolMagic { getProtocolMagicId = ProtocolMagicId 0
, getRequiresNetworkMagic = RequiresNoMagic
}
knownTxIn (TxInUnknown _ _) = False
knownTxIn _ = True
-- Explicit bounds for types, based on the generators from Gen.
attrUnitSize = (typeRep (Proxy @(Attributes ()))
, SizeConstant 1)
attrAddrSize = (typeRep (Proxy @(Attributes AddrAttributes)),
SizeConstant (szCases [ Case "min" 1, Case "max" 1024 ]))
txSigSize = (typeRep (Proxy @(Signature TxSigData))
, SizeConstant 66)
scriptSize = (typeRep (Proxy @Script),
SizeConstant $ szCases [ Case "loScript" 1
, Case "hiScript" 255 ])
in H.Group "Encoded size bounds for core types."
[ ("TxId" , check genTxId)
, ("Tx" , sizeTest $ scfg
{ gen = genTx
, addlCtx = M.fromList [ attrUnitSize, attrAddrSize ]
, computedCtx = \tx -> M.fromList
[ (typeRep (Proxy @(LengthOf [TxIn])),
SizeConstant (fromIntegral $ length $ _txInputs tx))
, (typeRep (Proxy @(LengthOf [TxOut])),
SizeConstant (fromIntegral $ length $ _txOutputs tx))
]
})
, ("TxIn" , check (Gen.filter knownTxIn genTxIn))
, ("TxOut" , sizeTest $ scfg
{ gen = genTxOut
, addlCtx = M.fromList [ attrAddrSize ]
})
, ("TxAux" , sizeTest $ scfg
{ gen = genTxAux pm
, addlCtx = M.fromList [ attrUnitSize
, attrAddrSize
, scriptSize
, txSigSize ]
, computedCtx = \(TxAux tx witness) -> M.fromList
[ (typeRep (Proxy @(LengthOf [TxIn])),
SizeConstant (fromIntegral $ length $ _txInputs tx))
, (typeRep (Proxy @(LengthOf (Vector TxInWitness))),
SizeConstant (fromIntegral $ length witness))
, (typeRep (Proxy @(LengthOf [TxOut])),
SizeConstant (fromIntegral $ length $ _txOutputs tx))
]
})
, ("TxInWitness" , sizeTest $ scfg
{ gen = genTxInWitness pm
, addlCtx = M.fromList [ txSigSize, scriptSize ]
})
, ("TxSigData" , check genTxSigData)
, ("Signature TxSigData" , sizeTest $ scfg
{ gen = genTxSig pm
, addlCtx = M.fromList [ txSigSize ]
})
]
-----------------------------------------------------------------------
-- Main test export
-----------------------------------------------------------------------
tests :: IO Bool
tests = and <$> sequence
[ H.checkSequential $$discoverGolden
, H.checkParallel $$discoverRoundTrip
, H.checkParallel sizeEstimates
]
| input-output-hk/pos-haskell-prototype | chain/test/Test/Pos/Chain/Txp/Bi.hs | mit | 12,445 | 0 | 24 | 2,650 | 2,044 | 1,177 | 867 | 172 | 2 |
-- |
-- Copyright : (c) 2010, 2011 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <[email protected]>
-- Portability : GHC only
--
-- Support for interaction with the console: argument parsing.
module Main.Console (
defaultMain
-- * Static information about the Tamarin prover
, programName
-- * Constructing interaction modes for Tamarin prover
, TamarinMode
, tamarinMode
, helpAndExit
-- * Argument parsing
, Arguments
, ArgKey
, ArgVal
-- ** Setting arguments
, updateArg
, addEmptyArg
, helpFlag
-- ** Retrieving arguments
, getArg
, findArg
, argExists
-- * Pretty printing and console output
, lineWidth
, shortLineWidth
, renderDoc
) where
import Data.Maybe
import Data.Version (showVersion)
import Safe
import Control.Monad
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Text
import System.Exit
import qualified Text.PrettyPrint.Class as PP
import Paths_tamarin_prover (version)
------------------------------------------------------------------------------
-- Static constants for the tamarin-prover
------------------------------------------------------------------------------
-- | Program name
programName :: String
programName = "tamarin-prover"
-- | Version string
versionStr :: String
versionStr = unlines
[ concat
[ programName
, " "
, showVersion version
, ", (C) Benedikt Schmidt, Simon Meier, Jannik Dreier, Ralf Sasse, ETH Zurich 2010-2015"
]
, ""
, "This program comes with ABSOLUTELY NO WARRANTY. It is free software, and you"
, "are welcome to redistribute it according to its LICENSE, see"
, "'https://github.com/tamarin-prover/tamarin-prover/blob/master/LICENSE'."
]
-- | Line width to use.
lineWidth :: Int
lineWidth = 110
shortLineWidth :: Int
shortLineWidth = 78
------------------------------------------------------------------------------
-- A simple generic representation of arguments
------------------------------------------------------------------------------
-- | A name of an argument.
type ArgKey = String
-- | A value of an argument.
type ArgVal = String
-- | It is most convenient to view arguments just as 'String' based key-value
-- pairs. If there are multiple values for the same key, then the left-most
-- one is preferred.
type Arguments = [(ArgKey,ArgVal)]
-- | Does an argument exist.
argExists :: String -> Arguments -> Bool
argExists a = isJust . findArg a
-- | Find the value(s) corresponding to the given key.
findArg :: MonadPlus m => ArgKey -> Arguments -> m ArgVal
findArg a' as = msum [ return v | (a,v) <- as, a == a' ]
-- | Find the value corresponding to the given key. Throw an error if no value
-- exists.
getArg :: ArgKey -> Arguments -> ArgVal
getArg a =
fromMaybe (error $ "getArg: argument '" ++ a ++ "' not found") . findArg a
-- | Add an argument to the from of the list of arguments.
addArg :: ArgKey -> ArgVal -> Arguments -> Arguments
addArg a v = ((a,v):)
-- | Add an argument with the empty string as the value.
addEmptyArg :: String -> Arguments -> Arguments
addEmptyArg a = addArg a ""
-- | Update an argument.
updateArg :: ArgKey -> ArgVal -> Arguments -> Either a Arguments
updateArg a v = Right . addArg a v
-- | Add the help flag.
helpFlag :: Flag Arguments
helpFlag = flagHelpSimple (addEmptyArg "help")
------------------------------------------------------------------------------
-- Modes for using the Tamarin prover
------------------------------------------------------------------------------
-- | A representation of an interaction mode with the Tamarin prover.
data TamarinMode = TamarinMode
{ tmName :: String
, tmCmdArgsMode :: Mode Arguments
-- ^ Run is given a reference to the mode. This enables changing the
-- static information of a mode and keeping the same 'run' function.
-- We use this for implementing the 'main' mode.
, tmRun :: TamarinMode -> Arguments -> IO ()
, tmIsMainMode :: Bool
}
-- | Smart constructor for a 'TamarinMode'.
tamarinMode :: String -> Help
-> (Mode Arguments -> Mode Arguments) -- ^ Changes to default mode.
-> (TamarinMode -> Arguments -> IO ())
-> TamarinMode
tamarinMode name help adaptMode run0 = TamarinMode
{ tmName = name
, tmCmdArgsMode = adaptMode $ Mode
{ modeGroupModes = toGroup []
, modeNames = [name]
, modeValue = []
, modeCheck = updateArg "mode" name
, modeExpandAt = False
, modeReform = const Nothing-- no reform possibility
, modeHelp = help
, modeHelpSuffix = []
, modeArgs = ([], Nothing) -- no positional arguments
, modeGroupFlags = toGroup [] -- no flags
}
, tmRun = run
, tmIsMainMode = False
}
where
run thisMode as
| argExists "help" as = helpAndExit thisMode Nothing
| argExists "version" as = putStrLn versionStr
| otherwise = run0 thisMode as
-- | Disply help message of a tamarin mode and exit.
helpAndExit :: TamarinMode -> Maybe String -> IO ()
helpAndExit tmode mayMsg = do
putStrLn $ showText (Wrap lineWidth)
$ helpText header HelpFormatOne (tmCmdArgsMode tmode)
-- output example info
when (tmIsMainMode tmode) $ do
putStrLn $ unlines
[ separator
, "See 'https://github.com/tamarin-prover/tamarin-prover/blob/master/README.md'"
, "for usage instructions and pointers to examples."
, separator
]
end
where
separator = replicate shortLineWidth '-'
(header, end) = case mayMsg of
Nothing -> ([], return ())
Just msg -> (["error: " ++ msg], exitFailure)
-- | Main function.
defaultMain :: TamarinMode -> [TamarinMode] -> IO ()
defaultMain firstMode otherModes = do
as <- processArgs $ tmCmdArgsMode mainMode
case findArg "mode" as of
Nothing -> error $ "defaultMain: impossible - mode not set"
Just name -> headNote "defaultMain: impossible - no mode found" $ do
tmode <- (mainMode : otherModes)
guard (tmName tmode == name)
return $ tmRun tmode tmode as
where
mainMode = firstMode
{ tmName = programName
, tmCmdArgsMode = (tmCmdArgsMode firstMode)
{ modeNames = [programName]
, modeCheck = updateArg "mode" programName
, modeGroupModes = toGroup (map tmCmdArgsMode $ otherModes)
, modeGroupFlags = (modeGroupFlags $ tmCmdArgsMode firstMode)
{ groupNamed =
[ ("About"
, [ helpFlag
, flagVersion (addEmptyArg "version")
] )
]
}
}
, tmIsMainMode = True
}
------------------------------------------------------------------------------
-- Pretty printing
------------------------------------------------------------------------------
-- | Render a pretty-printing document.
renderDoc :: PP.Doc -> String
renderDoc = PP.renderStyle (PP.defaultStyle { PP.lineLength = lineWidth })
| ekr/tamarin-prover | src/Main/Console.hs | gpl-3.0 | 7,294 | 0 | 18 | 1,849 | 1,296 | 728 | 568 | 128 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.AutoScaling.EnterStandby
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Moves the specified instances into 'Standby' mode.
--
-- For more information, see
-- <http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/AutoScalingInServiceState.html Auto Scaling InService State>
-- in the /Auto Scaling Developer Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_EnterStandby.html AWS API Reference> for EnterStandby.
module Network.AWS.AutoScaling.EnterStandby
(
-- * Creating a Request
enterStandby
, EnterStandby
-- * Request Lenses
, esInstanceIds
, esAutoScalingGroupName
, esShouldDecrementDesiredCapacity
-- * Destructuring the Response
, enterStandbyResponse
, EnterStandbyResponse
-- * Response Lenses
, ersActivities
, ersResponseStatus
) where
import Network.AWS.AutoScaling.Types
import Network.AWS.AutoScaling.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'enterStandby' smart constructor.
data EnterStandby = EnterStandby'
{ _esInstanceIds :: !(Maybe [Text])
, _esAutoScalingGroupName :: !Text
, _esShouldDecrementDesiredCapacity :: !Bool
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'EnterStandby' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'esInstanceIds'
--
-- * 'esAutoScalingGroupName'
--
-- * 'esShouldDecrementDesiredCapacity'
enterStandby
:: Text -- ^ 'esAutoScalingGroupName'
-> Bool -- ^ 'esShouldDecrementDesiredCapacity'
-> EnterStandby
enterStandby pAutoScalingGroupName_ pShouldDecrementDesiredCapacity_ =
EnterStandby'
{ _esInstanceIds = Nothing
, _esAutoScalingGroupName = pAutoScalingGroupName_
, _esShouldDecrementDesiredCapacity = pShouldDecrementDesiredCapacity_
}
-- | One or more instances to move into 'Standby' mode. You must specify at
-- least one instance ID.
esInstanceIds :: Lens' EnterStandby [Text]
esInstanceIds = lens _esInstanceIds (\ s a -> s{_esInstanceIds = a}) . _Default . _Coerce;
-- | The name of the Auto Scaling group.
esAutoScalingGroupName :: Lens' EnterStandby Text
esAutoScalingGroupName = lens _esAutoScalingGroupName (\ s a -> s{_esAutoScalingGroupName = a});
-- | Specifies whether the instances moved to 'Standby' mode count as part of
-- the Auto Scaling group\'s desired capacity. If set, the desired capacity
-- for the Auto Scaling group decrements by the number of instances moved
-- to 'Standby' mode.
esShouldDecrementDesiredCapacity :: Lens' EnterStandby Bool
esShouldDecrementDesiredCapacity = lens _esShouldDecrementDesiredCapacity (\ s a -> s{_esShouldDecrementDesiredCapacity = a});
instance AWSRequest EnterStandby where
type Rs EnterStandby = EnterStandbyResponse
request = postQuery autoScaling
response
= receiveXMLWrapper "EnterStandbyResult"
(\ s h x ->
EnterStandbyResponse' <$>
(x .@? "Activities" .!@ mempty >>=
may (parseXMLList "member"))
<*> (pure (fromEnum s)))
instance ToHeaders EnterStandby where
toHeaders = const mempty
instance ToPath EnterStandby where
toPath = const "/"
instance ToQuery EnterStandby where
toQuery EnterStandby'{..}
= mconcat
["Action" =: ("EnterStandby" :: ByteString),
"Version" =: ("2011-01-01" :: ByteString),
"InstanceIds" =:
toQuery (toQueryList "member" <$> _esInstanceIds),
"AutoScalingGroupName" =: _esAutoScalingGroupName,
"ShouldDecrementDesiredCapacity" =:
_esShouldDecrementDesiredCapacity]
-- | /See:/ 'enterStandbyResponse' smart constructor.
data EnterStandbyResponse = EnterStandbyResponse'
{ _ersActivities :: !(Maybe [Activity])
, _ersResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'EnterStandbyResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ersActivities'
--
-- * 'ersResponseStatus'
enterStandbyResponse
:: Int -- ^ 'ersResponseStatus'
-> EnterStandbyResponse
enterStandbyResponse pResponseStatus_ =
EnterStandbyResponse'
{ _ersActivities = Nothing
, _ersResponseStatus = pResponseStatus_
}
-- | The activities related to moving instances into 'Standby' mode.
ersActivities :: Lens' EnterStandbyResponse [Activity]
ersActivities = lens _ersActivities (\ s a -> s{_ersActivities = a}) . _Default . _Coerce;
-- | The response status code.
ersResponseStatus :: Lens' EnterStandbyResponse Int
ersResponseStatus = lens _ersResponseStatus (\ s a -> s{_ersResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/EnterStandby.hs | mpl-2.0 | 5,587 | 0 | 15 | 1,154 | 752 | 448 | 304 | 93 | 1 |
module Examples.UsablePath
( upExamples
) where
import Utils
import Algebra.Matrix
import Algebra.Semiring
import Algebra.Optimum
import Policy.UsablePath
upExamples :: Int -> Matrix UsablePath
upExamples 0 = M (toArray 5 [(U 0), (U 0), (U 0), (U 1), (U 0)
,(U 0), (U 0), (U 0), (U 1), (U 0)
,(U 0), (U 0), (U 0), (U 0), (U 1)
,(U 1), (U 1), (U 0), (U 0), (U 0)
,(U 0), (U 0), (U 1), (U 0), (U 0)])
upExamples 1 = M (toArray 5 [(U 0), (U 0), (U 0), (U 1), (U 0)
,(U 1), (U 0), (U 1), (U 0), (U 0)
,(U 0), (U 0), (U 0), (U 1), (U 1)
,(U 0), (U 0), (U 0), (U 0), (U 1)
,(U 0), (U 1), (U 0), (U 0), (U 0)])
upExamples 2 = M (toArray 7 [(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)
,(U 1), (U 0), (U 0), (U 1), (U 1), (U 0), (U 0)
,(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)
,(U 0), (U 1), (U 1), (U 0), (U 1), (U 1), (U 0)
,(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)
,(U 0), (U 0), (U 1), (U 1), (U 0), (U 0), (U 1)
,(U 0), (U 0), (U 0), (U 0), (U 1), (U 1), (U 0)])
upExamples 3 = M (toArray 7 [(U 0), (U 1), (U 1), (U 0), (U 0), (U 0), (U 0)
,(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)
,(U 1), (U 0), (U 0), (U 1), (U 0), (U 1), (U 0)
,(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)
,(U 0), (U 1), (U 0), (U 1), (U 0), (U 0), (U 1)
,(U 0), (U 0), (U 1), (U 1), (U 0), (U 0), (U 1)
,(U 0), (U 0), (U 0), (U 0), (U 0), (U 0), (U 0)])
upExamples _ = error "Undefined example of Usable"
| sdynerow/Semirings-Library | haskell/Examples/UsablePath.hs | apache-2.0 | 1,609 | 0 | 10 | 558 | 1,465 | 808 | 657 | 33 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Customizable HTML Report</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/customreport/src/main/javahelp/org/zaproxy/zap/extension/customreport/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 973 | 79 | 66 | 158 | 416 | 210 | 206 | -1 | -1 |
module HermBanded
where
import Driver
import Monadic
import qualified Test.QuickCheck.BLAS as Test
import Data.Matrix.Herm
import Data.Matrix.Banded
import Data.Vector.Dense
import Data.Vector.Dense.ST
import Data.Matrix.Dense.ST
import Test.Matrix.Herm.Banded
type V = Vector E
type B = Banded E
type HB = Herm Banded E
prop_herm_apply (HermBandedMV (h :: HB) a x) =
h <*> x ~== a <*> x
prop_herm_sapply k (HermBandedMV (h :: HB) a x) =
sapplyVector k h x ~== sapplyVector k a x
prop_herm_herm_apply (HermBandedMV (h :: HB) a x) =
herm h <*> x ~== h <*> x
prop_doSapplyAddVector alpha beta (HermBandedMV (a :: HB) _ x) = monadicST $ do
forAllM (Test.vector (numRows a)) $ \y -> do
y' <- run $ (unsafeThawVector y :: ST s (STVector s E))
y'' <- run $ freezeVector y'
run $ doSApplyAddVector alpha a x beta y'
assert $ y ~== a <*> (alpha *> x) + (beta *> y'')
prop_herm_applyMatrix (HermBandedMM (h :: HB) a b) =
h <**> b ~== a <**> b
prop_herm_sapplyMatrix k (HermBandedMM (h :: HB) a b) =
sapplyMatrix k h b ~== sapplyMatrix k a b
prop_herm_herm_applyMatrix (HermBandedMM (h :: HB) _ b) =
herm h <**> b ~== h <**> b
prop_doSapplyAddMatrix alpha beta (HermBandedMM (a :: HB) _ b) = monadicST $ do
forAllM (Test.matrix (numRows a, numCols b)) $ \c -> do
c' <- run $ unsafeThawMatrix c
c'' <- run $ freezeMatrix c'
run $ doSApplyAddMatrix alpha a b beta c'
assert $ c ~== a <**> (alpha *> b) + (beta *> c'')
tests_HermBanded =
[ testProperty "herm apply" prop_herm_apply
, testProperty "herm sapply" prop_herm_sapply
, testProperty "herm herm apply" prop_herm_herm_apply
, testProperty "doSApplyAddVector" prop_doSapplyAddVector
, testProperty "herm applyMatrix" prop_herm_applyMatrix
, testProperty "herm sapplyMatrix" prop_herm_sapplyMatrix
, testProperty "herm herm applyMatrix" prop_herm_herm_applyMatrix
, testProperty "doSApplyAddMatrix" prop_doSapplyAddMatrix
]
| patperry/hs-linear-algebra | tests-old/HermBanded.hs | bsd-3-clause | 2,038 | 0 | 17 | 468 | 707 | 363 | 344 | -1 | -1 |
{-# LANGUAGE Rank2Types, FlexibleContexts #-}
module Tests.Utils
where
import Data.Word
import Control.Monad.ST
import Foreign.C.Error
import System.Directory
import System.IO
import System.IO.Unsafe (unsafePerformIO)
import System.FilePath
import Test.QuickCheck hiding (numTests)
import Test.QuickCheck.Monadic
import qualified Data.ByteString as BS
import qualified Data.Map as M
import Halfs.BlockMap
import Halfs.Classes
import Halfs.CoreAPI (mount, newfs, unmount)
import Halfs.Directory
import Halfs.Errors
import Halfs.HalfsState
import Halfs.Monad
import Halfs.MonadUtils
import Halfs.Protection
import Halfs.SuperBlock
import Halfs.Utils (divCeil, withDHLock)
import System.Device.BlockDevice
import System.Device.File
import System.Device.Memory
import System.Device.ST
import Tests.Instances
import Tests.Types
-- import Debug.Trace
type DevCtor = BDGeom -> IO (Maybe (BlockDevice IO))
type HalfsM b r l m a = HalfsT HalfsError (Maybe (HalfsState b r l m)) m a
--------------------------------------------------------------------------------
-- Utility functions
fileDev :: DevCtor
fileDev g = withFileStore
True
("./pseudo.dsk")
(bdgSecSz g)
(bdgSecCnt g)
(`newFileBlockDevice` (bdgSecSz g))
memDev :: DevCtor
memDev g = newMemoryBlockDevice (bdgSecCnt g) (bdgSecSz g)
-- | Create an STArray-backed block device. This function transforms
-- the ST-based block device to an IO block device for interface
-- consistency within this module.
staDev :: DevCtor
staDev g =
stToIO (newSTBlockDevice (bdgSecCnt g) (bdgSecSz g)) >>=
return . maybe Nothing (\dev ->
Just BlockDevice {
bdBlockSize = bdBlockSize dev
, bdNumBlocks = bdNumBlocks dev
, bdReadBlock = \i -> stToIO $ bdReadBlock dev i
, bdWriteBlock = \i v -> stToIO $ bdWriteBlock dev i v
, bdFlush = stToIO $ bdFlush dev
, bdShutdown = stToIO $ bdShutdown dev
})
rescaledDev :: BDGeom -- ^ geometry for underlying device
-> BDGeom -- ^ new device geometry
-> DevCtor -- ^ ctor for underlying device
-> IO (Maybe (BlockDevice IO))
rescaledDev oldG newG ctor =
maybe (fail "Invalid BlockDevice") (newRescaledBlockDevice (bdgSecSz newG))
`fmap` ctor oldG
monadicBCMIOProp :: PropertyM (BCM IO) a -> Property
monadicBCMIOProp = monadic (unsafePerformIO . runBCM)
withFileStore :: Bool -> FilePath -> Word64 -> Word64 -> (FilePath -> IO a)
-> IO a
withFileStore temp fp secSize secCnt act = do
(fname, h) <-
if temp
then openBinaryTempFile
(let d = takeDirectory "." in if null d then "." else d)
(takeFileName fp)
else (,) fp `fmap` openBinaryFile fp ReadWriteMode
let chunkSz = 2^(20::Int)
(numChunks, numBytes) = fromIntegral (secSize * secCnt) `divMod` chunkSz
chunk = BS.replicate chunkSz 0
replicateM_ numChunks (BS.hPut h chunk)
BS.hPut h (BS.replicate numBytes 0)
hClose h
rslt <- act fname
when temp $ removeFile fname
return rslt
whenDev :: (Monad m) => (a -> m b) -> (a -> m ()) -> Maybe a -> m b
whenDev act cleanup =
maybe (fail "Invalid BlockDevice") $ \x -> do
y <- act x
cleanup x
return y
mkMemDevExec :: forall m.
Bool
-> String
-> Int
-> String
-> (BDGeom -> BlockDevice IO -> PropertyM IO m)
-> (Args, Property)
mkMemDevExec quick pfx =
let numTests n = (,) $ if quick then stdArgs{maxSuccess = n} else stdArgs
doProp = (`whenDev` run . bdShutdown)
in
\n s pr ->
numTests n $ label (pfx ++ ": " ++ s) $ monadicIO $
forAllM arbBDGeom $ \g ->
run (memDev g) >>= doProp (pr g)
mkNewFS :: HalfsCapable b t r l m =>
BlockDevice m -> PropertyM m (Either HalfsError SuperBlock)
mkNewFS dev = runHNoEnv $ newfs dev rootUser rootGroup rootDirPerms
mountOK :: HalfsCapable b t r l m =>
BlockDevice m
-> PropertyM m (HalfsState b r l m)
mountOK dev = do
runHNoEnv (defaultMount dev)
>>= either (fail . (++) "Unexpected mount failure: " . show) return
unmountOK :: HalfsCapable b t r l m =>
HalfsState b r l m -> PropertyM m ()
unmountOK fs =
runH fs unmount >>=
either (fail . (++) "Unexpected unmount failure: " . show)
(const $ return ())
sreadRef :: HalfsCapable b t r l m => r a -> PropertyM m a
sreadRef = ($!) (run . readRef)
runH :: HalfsCapable b t r l m =>
HalfsState b r l m
-> HalfsM b r l m a
-> PropertyM m (Either HalfsError a)
runH fs = run . runHalfs fs
runHNoEnv :: HalfsCapable b t r l m =>
HalfsM b r l m a
-> PropertyM m (Either HalfsError a)
runHNoEnv = run . runHalfsNoEnv
execE :: (Monad m ,Show a) =>
String -> String -> m (Either a b) -> PropertyM m b
execE nm descrip act =
run act >>= \ea -> case ea of
Left e ->
fail $ "Unexpected error in " ++ nm ++ " ("
++ descrip ++ "): " ++ show e
Right x -> return x
execH :: Monad m =>
String
-> env
-> String
-> HalfsT HalfsError (Maybe env) m b
-> PropertyM m b
execH nm env descrip = execE nm descrip . runHalfs env
execHNoEnv :: Monad m =>
String
-> String
-> HalfsT HalfsError (Maybe env) m b
-> PropertyM m b
execHNoEnv nm descrip = execE nm descrip . runHalfsNoEnv
expectErr :: HalfsCapable b t r l m =>
(HalfsError -> Bool)
-> String
-> HalfsM b r l m a
-> HalfsState b r l m
-> PropertyM m ()
expectErr expectedP rsn act fs =
runH fs act >>= \e -> case e of
Left err | expectedP err -> return ()
Left err -> unexpectedErr err
Right _ -> fail rsn
unexpectedErr :: (Monad m, Show a) => a -> PropertyM m ()
unexpectedErr = fail . (++) "Expected failure, but not: " . show
expectErrno :: Monad m => Errno -> Either HalfsError a -> PropertyM m ()
expectErrno e (Left (HE_ErrnoAnnotated _ errno)) = assert (errno == e)
expectErrno _ _ = assert False
checkFileStat :: (HalfsCapable b t r l m, Integral a) =>
FileStat t
-> a -- expected filesize
-> FileType -- expected filetype
-> FileMode -- expected filemode
-> UserID -- expected userid
-> GroupID -- expected groupid
-> a -- expected allocated block count
-> (t -> Bool) -- access time predicate
-> (t -> Bool) -- modification time predicate
-> (t -> Bool) -- status change time predicate
-> PropertyM m ()
checkFileStat st expFileSz expFileTy expMode
expUsr expGrp expNumBlocks accessp modifyp changep = do
mapM_ assert
[ fsSize st == fromIntegral expFileSz
, fsType st == expFileTy
, fsMode st == expMode
, fsUID st == expUsr
, fsGID st == expGrp
, fsNumBlocks st == fromIntegral expNumBlocks
, accessp (fsAccessTime st)
, modifyp (fsModifyTime st)
, changep (fsChangeTime st)
]
assertMsg :: Monad m => String -> String -> Bool -> PropertyM m ()
assertMsg _ _ True = return ()
assertMsg ctx dtls False = do
fail $ "(" ++ ctx ++ ": " ++ dtls ++ ")"
-- Using the current allocation scheme and inode/cont distinction,
-- determine how many blocks (of the given size, in bytes) are required
-- to store the given data size, in bytes.
calcExpBlockCount :: Integral a =>
Word64 -- block size
-> Word64 -- addresses (#blocks) per inode
-> Word64 -- addresses (#blocks) per cont
-> a -- data size
-> a -- expected number of blocks
calcExpBlockCount bs api apc dataSz = fromIntegral $
if dsz > bpi
then 1 -- inode block
+ api -- number of blocks in full inode
+ (dsz - bpi) `divCeil` bpc -- number of blocks required for conts
+ (dsz - bpi) `divCeil` bs -- number of blocks rquired for data
else 1 -- inode block
+ (dsz `divCeil` bs) -- number of blocks required for data
where
dsz = fromIntegral dataSz
bpi = api * bs
bpc = apc * bs
defaultUser :: UserID
defaultUser = rootUser
defaultGroup :: GroupID
defaultGroup = rootGroup
rootDirPerms, defaultDirPerms, defaultFilePerms :: FileMode
rootDirPerms = FileMode [Read,Write,Execute] [] []
defaultDirPerms = FileMode [Read,Write,Execute] [Read, Execute] [Read, Execute]
defaultFilePerms = FileMode [Read,Write] [Read] [Read]
defaultMount :: HalfsCapable b t r l m =>
BlockDevice m -> HalfsM b r l m (HalfsState b r l m)
defaultMount dev = mount dev defaultUser defaultGroup defaultDirPerms
--------------------------------------------------------------------------------
-- Block utilization checking combinators
rscUtil :: HalfsCapable b t r l m =>
(Word64 -> Word64 -> Bool) -- ^ predicate on after/before block cnts
-> HalfsState b r l m -- ^ the filesystem state
-> PropertyM m a -- ^ the action to check
-> PropertyM m ()
rscUtil p fs act = do b <- getFree fs; _ <- act; a <- getFree fs; assert (p a b)
where getFree = sreadRef . bmNumFree . hsBlockMap
blocksUnallocd :: HalfsCapable b t r l m =>
Word64 -- ^ expected #blocks unallocated
-> HalfsState b r l m -- ^ the filesystem state
-> PropertyM m a -- ^ the action to check
-> PropertyM m ()
blocksUnallocd x = rscUtil (\a b -> a >= b && a - b == x)
blocksAllocd :: HalfsCapable b t r l m =>
Word64 -- ^ expected #blocks unallocated
-> HalfsState b r l m -- ^ the filesystem state
-> PropertyM m a -- ^ the action to check
-> PropertyM m ()
blocksAllocd x = rscUtil (\a b -> b >= a && b - a == x)
zeroOrMoreBlocksAllocd :: HalfsCapable b t r l m =>
HalfsState b r l m -- ^ the filesystem state
-> PropertyM m a -- ^ the action to check
-> PropertyM m ()
zeroOrMoreBlocksAllocd = rscUtil (<=)
--------------------------------------------------------------------------------
-- Debugging helpers
dumpfs :: HalfsCapable b t r l m =>
HalfsM b r l m String
dumpfs = do
sbRef <- hasks hsSuperBlock
dump <- dumpfs' 2 "/\n" =<< rootDir `fmap` readRef sbRef
return $ "=== fs dump begin ===\n"
++ dump
++ "=== fs dump end ===\n"
where
dumpfs' i ipfx inr = do
contents <- withDirectory inr $ \dh -> do
withDHLock dh $ readRef (dhContents dh)
foldM (\dumpAcc (path, dirEnt) -> do
sub <- if deType dirEnt == Directory
&& path /= "."
&& path /= ".."
then dumpfs' (i+2) "" (deInode dirEnt)
else return ""
return $ dumpAcc
++ replicate i ' '
++ path
++ let inr' = deInode dirEnt in
case deType dirEnt of
RegularFile -> " (" ++ show inr' ++ ") (file)\n"
Directory -> " (" ++ show inr' ++ ") (directory)\n" ++ sub
Symlink -> " (" ++ show inr' ++ ") (symlink)\n"
_ -> error "unknown file type"
)
ipfx (M.toList contents)
| hackern/halfs | test/src/Tests/Utils.hs | bsd-3-clause | 11,787 | 0 | 23 | 3,801 | 3,506 | 1,797 | 1,709 | 269 | 5 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
#if __GLASGOW_HASKELL__ >= 706
{-# LANGUAGE PolyKinds #-}
#endif
#include "overlapping-compat.h"
-- TODO: Drop this when we remove support for Data.Attoparsec.Number
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Data.Aeson.Types.ToJSON
(
-- * Core JSON classes
ToJSON(..)
-- * Liftings to unary and binary type constructors
, ToJSON1(..)
, toJSON1
, toEncoding1
, ToJSON2(..)
, toJSON2
, toEncoding2
-- * Generic JSON classes
, GToJSON(..)
, GToEncoding(..)
, ToArgs(..)
, genericToJSON
, genericToEncoding
, genericLiftToJSON
, genericLiftToEncoding
-- * Classes and types for map keys
, ToJSONKey(..)
, ToJSONKeyFunction(..)
, toJSONKeyText
, contramapToJSONKeyFunction
-- * Object key-value pairs
, KeyValue(..)
-- * Functions needed for documentation
-- * Encoding functions
, listEncoding
, listValue
) where
import Prelude ()
import Prelude.Compat
import Control.Applicative (Const(..))
import Control.Monad.ST (ST)
import Data.Aeson.Encoding (Encoding, Encoding', Series, dict, emptyArray_)
import Data.Aeson.Encoding.Internal ((>*<), (><))
import Data.Aeson.Internal.Functions (mapHashKeyVal, mapKeyVal)
import Data.Aeson.Types.Generic (AllNullary, False, IsRecord(..), One, ProductSize, Tagged2(..), True, Zero, productSize)
import Data.Aeson.Types.Internal
import Data.Attoparsec.Number (Number(..))
import Data.Bits (unsafeShiftR)
import Data.DList (DList)
import Data.Fixed (Fixed, HasResolution)
import Data.Foldable (toList)
import Data.Functor.Compose (Compose(..))
import Data.Functor.Identity (Identity(..))
import Data.Functor.Product (Product(..))
import Data.Functor.Sum (Sum(..))
import Data.Int (Int16, Int32, Int64, Int8)
import Data.List (intersperse)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Monoid ((<>))
import Data.Proxy (Proxy(..))
import Data.Ratio (Ratio, denominator, numerator)
import Data.Scientific (Scientific)
import Data.Tagged (Tagged(..))
import Data.Text (Text, pack)
import Data.Time (Day, LocalTime, NominalDiffTime, TimeOfDay, UTCTime, ZonedTime)
import Data.Time.Format (FormatTime, formatTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Data.Vector (Vector)
import Data.Version (Version, showVersion)
import Data.Word (Word16, Word32, Word64, Word8)
import Foreign.Storable (Storable)
import GHC.Generics
import Numeric.Natural (Natural)
import qualified Data.Aeson.Encoding as E
import qualified Data.Aeson.Encoding.Internal as E (InArray, colon, comma, econcat, empty, retagEncoding, wrapObject)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.DList as DList
import qualified Data.HashMap.Strict as H
import qualified Data.HashSet as HashSet
import qualified Data.IntMap as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Monoid as Monoid
import qualified Data.Scientific as Scientific
import qualified Data.Semigroup as Semigroup
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Tree as Tree
import qualified Data.Vector as V
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Mutable as VM
import qualified Data.Vector.Primitive as VP
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Unboxed as VU
#if !(MIN_VERSION_bytestring(0,10,0))
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Marshal.Utils (copyBytes)
import Foreign.Ptr (plusPtr)
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
#endif
toJSONPair :: (a -> Value) -> (b -> Value) -> (a, b) -> Value
toJSONPair a b = liftToJSON2 a (listValue a) b (listValue b)
{-# INLINE toJSONPair #-}
realFloatToJSON :: RealFloat a => a -> Value
realFloatToJSON d
| isNaN d || isInfinite d = Null
| otherwise = Number $ Scientific.fromFloatDigits d
{-# INLINE realFloatToJSON #-}
-------------------------------------------------------------------------------
-- Generics
-------------------------------------------------------------------------------
-- | Class of generic representation types that can be converted to
-- JSON.
class GToJSON arity f where
-- | This method (applied to 'defaultOptions') is used as the
-- default generic implementation of 'toJSON' (if the @arity@ is 'Zero')
-- or 'liftToJSON' (if the @arity@ is 'One').
gToJSON :: Options -> ToArgs Value arity a -> f a -> Value
-- | Class of generic representation types that can be converted to
-- a JSON 'Encoding'.
class GToEncoding arity f where
-- | This method (applied to 'defaultOptions') can be used as the
-- default generic implementation of 'toEncoding' (if the @arity@ is 'Zero')
-- or 'liftToEncoding' (if the @arity@ is 'One').
gToEncoding :: Options -> ToArgs Encoding arity a -> f a -> Encoding
-- | A 'ToArgs' value either stores nothing (for 'ToJSON') or it stores the two
-- function arguments that encode occurrences of the type parameter (for
-- 'ToJSON1').
data ToArgs res arity a where
NoToArgs :: ToArgs res Zero a
To1Args :: (a -> res) -> ([a] -> res) -> ToArgs res One a
-- | A configurable generic JSON creator. This function applied to
-- 'defaultOptions' is used as the default for 'toJSON' when the type
-- is an instance of 'Generic'.
genericToJSON :: (Generic a, GToJSON Zero (Rep a))
=> Options -> a -> Value
genericToJSON opts = gToJSON opts NoToArgs . from
-- | A configurable generic JSON creator. This function applied to
-- 'defaultOptions' is used as the default for 'liftToJSON' when the type
-- is an instance of 'Generic1'.
genericLiftToJSON :: (Generic1 f, GToJSON One (Rep1 f))
=> Options -> (a -> Value) -> ([a] -> Value)
-> f a -> Value
genericLiftToJSON opts tj tjl = gToJSON opts (To1Args tj tjl) . from1
-- | A configurable generic JSON encoder. This function applied to
-- 'defaultOptions' is used as the default for 'toEncoding' when the type
-- is an instance of 'Generic'.
genericToEncoding :: (Generic a, GToEncoding Zero (Rep a))
=> Options -> a -> Encoding
genericToEncoding opts = gToEncoding opts NoToArgs . from
-- | A configurable generic JSON encoder. This function applied to
-- 'defaultOptions' is used as the default for 'liftToEncoding' when the type
-- is an instance of 'Generic1'.
genericLiftToEncoding :: (Generic1 f, GToEncoding One (Rep1 f))
=> Options -> (a -> Encoding) -> ([a] -> Encoding)
-> f a -> Encoding
genericLiftToEncoding opts te tel = gToEncoding opts (To1Args te tel) . from1
-------------------------------------------------------------------------------
-- Class
-------------------------------------------------------------------------------
-- | A type that can be converted to JSON.
--
-- An example type and instance:
--
-- @
-- \-- Allow ourselves to write 'Text' literals.
-- {-\# LANGUAGE OverloadedStrings #-}
--
-- data Coord = Coord { x :: Double, y :: Double }
--
-- instance ToJSON Coord where
-- toJSON (Coord x y) = 'object' [\"x\" '.=' x, \"y\" '.=' y]
--
-- toEncoding (Coord x y) = 'pairs' (\"x\" '.=' x '<>' \"y\" '.=' y)
-- @
--
-- Instead of manually writing your 'ToJSON' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so will probably be more efficient than the following two options:
--
-- * The compiler can provide a default generic implementation for
-- 'toJSON'.
--
-- To use the second, simply add a @deriving 'Generic'@ clause to your
-- datatype and declare a 'ToJSON' instance for your datatype without giving
-- definitions for 'toJSON' or 'toEncoding'.
--
-- For example, the previous example can be simplified to a more
-- minimal instance:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Coord = Coord { x :: Double, y :: Double } deriving 'Generic'
--
-- instance ToJSON Coord where
-- toEncoding = 'genericToEncoding' 'defaultOptions'
-- @
--
-- Why do we provide an implementation for 'toEncoding' here? The
-- 'toEncoding' function is a relatively new addition to this class.
-- To allow users of older versions of this library to upgrade without
-- having to edit all of their instances or encounter surprising
-- incompatibilities, the default implementation of 'toEncoding' uses
-- 'toJSON'. This produces correct results, but since it performs an
-- intermediate conversion to a 'Value', it will be less efficient
-- than directly emitting an 'Encoding'. Our one-liner definition of
-- 'toEncoding' above bypasses the intermediate 'Value'.
--
-- If @DefaultSignatures@ doesn't give exactly the results you want,
-- you can customize the generic encoding with only a tiny amount of
-- effort, using 'genericToJSON' and 'genericToEncoding' with your
-- preferred 'Options':
--
-- @
-- instance ToJSON Coord where
-- toJSON = 'genericToJSON' 'defaultOptions'
-- toEncoding = 'genericToEncoding' 'defaultOptions'
-- @
class ToJSON a where
-- | Convert a Haskell value to a JSON-friendly intermediate type.
toJSON :: a -> Value
default toJSON :: (Generic a, GToJSON Zero (Rep a)) => a -> Value
toJSON = genericToJSON defaultOptions
-- | Encode a Haskell value as JSON.
--
-- The default implementation of this method creates an
-- intermediate 'Value' using 'toJSON'. This provides
-- source-level compatibility for people upgrading from older
-- versions of this library, but obviously offers no performance
-- advantage.
--
-- To benefit from direct encoding, you /must/ provide an
-- implementation for this method. The easiest way to do so is by
-- having your types implement 'Generic' using the @DeriveGeneric@
-- extension, and then have GHC generate a method body as follows.
--
-- @
-- instance ToJSON Coord where
-- toEncoding = 'genericToEncoding' 'defaultOptions'
-- @
toEncoding :: a -> Encoding
toEncoding = E.value . toJSON
{-# INLINE toEncoding #-}
toJSONList :: [a] -> Value
toJSONList = listValue toJSON
{-# INLINE toJSONList #-}
toEncodingList :: [a] -> Encoding
toEncodingList = listEncoding toEncoding
{-# INLINE toEncodingList #-}
-------------------------------------------------------------------------------
-- Object key-value pairs
-------------------------------------------------------------------------------
-- | A key-value pair for encoding a JSON object.
class KeyValue kv where
(.=) :: ToJSON v => Text -> v -> kv
infixr 8 .=
instance KeyValue Series where
name .= value = E.pair name (toEncoding value)
{-# INLINE (.=) #-}
instance KeyValue Pair where
name .= value = (name, toJSON value)
{-# INLINE (.=) #-}
-------------------------------------------------------------------------------
-- Classes and types for map keys
-------------------------------------------------------------------------------
-- | Typeclass for types that can be used as the key of a map-like container
-- (like 'Map' or 'HashMap'). For example, since 'Text' has a 'ToJSONKey'
-- instance and 'Char' has a 'ToJSON' instance, we can encode a value of
-- type 'Map' 'Text' 'Char':
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [("foo" :: Text, 'a')]
-- {"foo":"a"}
--
-- Since 'Int' also has a 'ToJSONKey' instance, we can similarly write:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(5 :: Int, 'a')]
-- {"5":"a"}
--
-- JSON documents only accept strings as object keys. For any type
-- from @base@ that has a natural textual representation, it can be
-- expected that its 'ToJSONKey' instance will choose that representation.
--
-- For data types that lack a natural textual representation, an alternative
-- is provided. The map-like container is represented as a JSON array
-- instead of a JSON object. Each value in the array is an array with
-- exactly two values. The first is the key and the second is the value.
--
-- For example, values of type '[Text]' cannot be encoded to a
-- string, so a 'Map' with keys of type '[Text]' is encoded as follows:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(["foo","bar","baz" :: Text], 'a')]
-- [[["foo","bar","baz"],"a"]]
--
-- The default implementation of 'ToJSONKey' chooses this method of
-- encoding a key, using the 'ToJSON' instance of the type.
--
-- To use your own data type as the key in a map, all that is needed
-- is to write a 'ToJSONKey' (and possibly a 'FromJSONKey') instance
-- for it. If the type cannot be trivially converted to and from 'Text',
-- it is recommended that 'ToJSONKeyValue' is used. Since the default
-- implementations of the typeclass methods can build this from a
-- 'ToJSON' instance, there is nothing that needs to be written:
--
-- > data Foo = Foo { fooAge :: Int, fooName :: Text }
-- > deriving (Eq,Ord,Generic)
-- > instance ToJSON Foo
-- > instance ToJSONKey Foo
--
-- That's it. We can now write:
--
-- >>> let m = Map.fromList [(Foo 4 "bar",'a'),(Foo 6 "arg",'b')]
-- >>> LBC8.putStrLn $ encode m
-- [[{"fooName":"bar","fooAge":4},"a"],[{"fooName":"arg","fooAge":6},"b"]]
--
-- The next case to consider is if we have a type that is a
-- newtype wrapper around 'Text'. The recommended approach is to use
-- generalized newtype deriving:
--
-- > newtype RecordId = RecordId { getRecordId :: Text}
-- > deriving (Eq,Ord,ToJSONKey)
--
-- Then we may write:
--
-- >>> LBC8.putStrLn $ encode $ Map.fromList [(RecordId "abc",'a')]
-- {"abc":"a"}
--
-- Simple sum types are a final case worth considering. Suppose we have:
--
-- > data Color = Red | Green | Blue
-- > deriving (Show,Read,Eq,Ord)
--
-- It is possible to get the 'ToJSONKey' instance for free as we did
-- with 'Foo'. However, in this case, we have a natural way to go to
-- and from 'Text' that does not require any escape sequences. So, in
-- this example, 'ToJSONKeyText' will be used instead of 'ToJSONKeyValue'.
-- The 'Show' instance can be used to help write 'ToJSONKey':
--
-- > instance ToJSONKey Color where
-- > toJSONKey = ToJSONKeyText f g
-- > where f = Text.pack . show
-- > g = text . Text.pack . show
-- > -- text function is from Data.Aeson.Encoding
--
-- The situation of needing to turning function @a -> Text@ into
-- a 'ToJSONKeyFunction' is common enough that a special combinator
-- is provided for it. The above instance can be rewritten as:
--
-- > instance ToJSONKey Color where
-- > toJSONKey = toJSONKeyText (Text.pack . show)
--
-- The performance of the above instance can be improved by
-- not using 'String' as an intermediate step when converting to
-- 'Text'. One option for improving performance would be to use
-- template haskell machinery from the @text-show@ package. However,
-- even with the approach, the 'Encoding' (a wrapper around a bytestring
-- builder) is generated by encoding the 'Text' to a 'ByteString',
-- an intermediate step that could be avoided. The fastest possible
-- implementation would be:
--
-- > -- Assuming that OverloadedStrings is enabled
-- > instance ToJSONKey Color where
-- > toJSONKey = ToJSONKeyText f g
-- > where f x = case x of {Red -> "Red";Green ->"Green";Blue -> "Blue"}
-- > g x = case x of {Red -> text "Red";Green -> text "Green";Blue -> text "Blue"}
-- > -- text function is from Data.Aeson.Encoding
--
-- This works because GHC can lift the encoded values out of the case
-- statements, which means that they are only evaluated once. This
-- approach should only be used when there is a serious need to
-- maximize performance.
class ToJSONKey a where
-- | Strategy for rendering the key for a map-like container.
toJSONKey :: ToJSONKeyFunction a
default toJSONKey :: ToJSON a => ToJSONKeyFunction a
toJSONKey = ToJSONKeyValue toJSON toEncoding
-- | This is similar in spirit to the 'showsList' method of 'Show'.
-- It makes it possible to give 'String' keys special treatment
-- without using @OverlappingInstances@. End users should always
-- be able to use the default implementation of this method.
toJSONKeyList :: ToJSONKeyFunction [a]
default toJSONKeyList :: ToJSON a => ToJSONKeyFunction [a]
toJSONKeyList = ToJSONKeyValue toJSON toEncoding
data ToJSONKeyFunction a
= ToJSONKeyText !(a -> Text) !(a -> Encoding' Text)
-- ^ key is encoded to string, produces object
| ToJSONKeyValue !(a -> Value) !(a -> Encoding)
-- ^ key is encoded to value, produces array
-- | Helper for creating textual keys.
--
-- @
-- instance 'ToJSONKey' MyKey where
-- 'toJSONKey' = 'toJSONKeyText' myKeyToText
-- where
-- myKeyToText = Text.pack . show -- or showt from text-show
-- @
toJSONKeyText :: (a -> Text) -> ToJSONKeyFunction a
toJSONKeyText f = ToJSONKeyText f (E.text . f)
-- | TODO: should this be exported?
toJSONKeyTextEnc :: (a -> Encoding' Text) -> ToJSONKeyFunction a
toJSONKeyTextEnc e = ToJSONKeyText tot e
where
-- TODO: dropAround is also used in stringEncoding, which is unfortunate atm
tot = T.dropAround (== '"')
. T.decodeLatin1
. lazyToStrictByteString
. E.encodingToLazyByteString
. e
-- | Contravariant map, as 'ToJSONKeyFunction' is a contravariant functor.
contramapToJSONKeyFunction :: (b -> a) -> ToJSONKeyFunction a -> ToJSONKeyFunction b
contramapToJSONKeyFunction h x = case x of
ToJSONKeyText f g -> ToJSONKeyText (f . h) (g . h)
ToJSONKeyValue f g -> ToJSONKeyValue (f . h) (g . h)
-------------------------------------------------------------------------------
-- Lifings of FromJSON and ToJSON to unary and binary type constructors
-------------------------------------------------------------------------------
-- | Lifting of the 'ToJSON' class to unary type constructors.
--
-- Instead of manually writing your 'ToJSON1' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so will probably be more efficient than the following two options:
--
-- * The compiler can provide a default generic implementation for
-- 'toJSON1'.
--
-- To use the second, simply add a @deriving 'Generic1'@ clause to your
-- datatype and declare a 'ToJSON1' instance for your datatype without giving
-- definitions for 'liftToJSON' or 'liftToEncoding'.
--
-- For example:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Pair = Pair { pairFst :: a, pairSnd :: b } deriving 'Generic1'
--
-- instance ToJSON a => ToJSON1 (Pair a)
-- @
--
-- If @DefaultSignatures@ doesn't give exactly the results you want,
-- you can customize the generic encoding with only a tiny amount of
-- effort, using 'genericLiftToJSON' and 'genericLiftToEncoding' with
-- your preferred 'Options':
--
-- @
-- instance ToJSON a => ToJSON1 (Pair a) where
-- liftToJSON = 'genericLiftToJSON' 'defaultOptions'
-- liftToEncoding = 'genericLiftToEncoding' 'defaultOptions'
-- @
class ToJSON1 f where
liftToJSON :: (a -> Value) -> ([a] -> Value) -> f a -> Value
default liftToJSON :: (Generic1 f, GToJSON One (Rep1 f))
=> (a -> Value) -> ([a] -> Value) -> f a -> Value
liftToJSON = genericLiftToJSON defaultOptions
liftToJSONList :: (a -> Value) -> ([a] -> Value) -> [f a] -> Value
liftToJSONList f g = listValue (liftToJSON f g)
liftToEncoding :: (a -> Encoding) -> ([a] -> Encoding) -> f a -> Encoding
default liftToEncoding :: (Generic1 f, GToEncoding One (Rep1 f))
=> (a -> Encoding) -> ([a] -> Encoding)
-> f a -> Encoding
liftToEncoding = genericLiftToEncoding defaultOptions
liftToEncodingList :: (a -> Encoding) -> ([a] -> Encoding) -> [f a] -> Encoding
liftToEncodingList f g = listEncoding (liftToEncoding f g)
-- | Lift the standard 'toJSON' function through the type constructor.
toJSON1 :: (ToJSON1 f, ToJSON a) => f a -> Value
toJSON1 = liftToJSON toJSON toJSONList
{-# INLINE toJSON1 #-}
-- | Lift the standard 'toEncoding' function through the type constructor.
toEncoding1 :: (ToJSON1 f, ToJSON a) => f a -> Encoding
toEncoding1 = liftToEncoding toEncoding toEncodingList
{-# INLINE toEncoding1 #-}
-- | Lifting of the 'ToJSON' class to binary type constructors.
--
-- Instead of manually writing your 'ToJSON2' instance, "Data.Aeson.TH"
-- provides Template Haskell functions which will derive an instance at compile time.
--
-- The compiler cannot provide a default generic implementation for 'liftToJSON2',
-- unlike 'toJSON' and 'liftToJSON'.
class ToJSON2 f where
liftToJSON2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> f a b -> Value
liftToJSONList2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> [f a b] -> Value
liftToJSONList2 fa ga fb gb = listValue (liftToJSON2 fa ga fb gb)
liftToEncoding2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> f a b -> Encoding
liftToEncodingList2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> [f a b] -> Encoding
liftToEncodingList2 fa ga fb gb = listEncoding (liftToEncoding2 fa ga fb gb)
-- | Lift the standard 'toJSON' function through the type constructor.
toJSON2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Value
toJSON2 = liftToJSON2 toJSON toJSONList toJSON toJSONList
{-# INLINE toJSON2 #-}
-- | Lift the standard 'toEncoding' function through the type constructor.
toEncoding2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Encoding
toEncoding2 = liftToEncoding2 toEncoding toEncodingList toEncoding toEncodingList
{-# INLINE toEncoding2 #-}
-------------------------------------------------------------------------------
-- Encoding functions
-------------------------------------------------------------------------------
-- | Helper function to use with 'liftToEncoding'.
-- Useful when writing own 'ToJSON1' instances.
--
-- @
-- newtype F a = F [a]
--
-- -- This instance encodes String as an array of chars
-- instance 'ToJSON1' F where
-- 'liftToJSON' tj _ (F xs) = 'liftToJSON' tj ('listValue' tj) xs
-- 'liftToEncoding' te _ (F xs) = 'liftToEncoding' te ('listEncoding' te) xs
--
-- instance 'Data.Aeson.FromJSON.FromJSON1' F where
-- 'Data.Aeson.FromJSON.liftParseJSON' p _ v = F \<$\> 'Data.Aeson.FromJSON.liftParseJSON' p ('Data.Aeson.FromJSON.listParser' p) v
-- @
listEncoding :: (a -> Encoding) -> [a] -> Encoding
listEncoding = E.list
{-# INLINE listEncoding #-}
-- | Helper function to use with 'liftToJSON', see 'listEncoding'.
listValue :: (a -> Value) -> [a] -> Value
listValue f = Array . V.fromList . map f
{-# INLINE listValue #-}
-------------------------------------------------------------------------------
-- [] instances
-------------------------------------------------------------------------------
-- These are needed for key-class default definitions
instance ToJSON1 [] where
liftToJSON _ to' = to'
{-# INLINE liftToJSON #-}
liftToEncoding _ to' = to'
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON [a] where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- Generic toJSON / toEncoding
-------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- Generic toJSON
instance OVERLAPPABLE_ (GToJSON arity a) => GToJSON arity (M1 i c a) where
-- Meta-information, which is not handled elsewhere, is ignored:
gToJSON opts targs = gToJSON opts targs . unM1
instance (ToJSON a) => GToJSON arity (K1 i a) where
-- Constant values are encoded using their ToJSON instance:
gToJSON _opts _ = toJSON . unK1
instance GToJSON One Par1 where
-- Direct occurrences of the last type parameter are encoded with the
-- function passed in as an argument:
gToJSON _opts (To1Args tj _) = tj . unPar1
instance (ToJSON1 f) => GToJSON One (Rec1 f) where
-- Recursive occurrences of the last type parameter are encoded using their
-- ToJSON1 instance:
gToJSON _opts (To1Args tj tjl) = liftToJSON tj tjl . unRec1
instance GToJSON arity U1 where
-- Empty constructors are encoded to an empty array:
gToJSON _opts _ _ = emptyArray
instance (ConsToJSON arity a) => GToJSON arity (C1 c a) where
-- Constructors need to be encoded differently depending on whether they're
-- a record or not. This distinction is made by 'consToJSON':
gToJSON opts targs = consToJSON opts targs . unM1
instance ( WriteProduct arity a, WriteProduct arity b
, ProductSize a, ProductSize b
) => GToJSON arity (a :*: b) where
-- Products are encoded to an array. Here we allocate a mutable vector of
-- the same size as the product and write the product's elements to it using
-- 'writeProduct':
gToJSON opts targs p =
Array $ V.create $ do
mv <- VM.unsafeNew lenProduct
writeProduct opts targs mv 0 lenProduct p
return mv
where
lenProduct = (unTagged2 :: Tagged2 (a :*: b) Int -> Int)
productSize
instance ( AllNullary (a :+: b) allNullary
, SumToJSON arity (a :+: b) allNullary
) => GToJSON arity (a :+: b) where
-- If all constructors of a sum datatype are nullary and the
-- 'allNullaryToStringTag' option is set they are encoded to
-- strings. This distinction is made by 'sumToJSON':
gToJSON opts targs = (unTagged :: Tagged allNullary Value -> Value)
. sumToJSON opts targs
instance (ToJSON1 f, GToJSON One g) => GToJSON One (f :.: g) where
-- If an occurrence of the last type parameter is nested inside two
-- composed types, it is encoded by using the outermost type's ToJSON1
-- instance to generically encode the innermost type:
gToJSON opts targs =
let gtj = gToJSON opts targs in
liftToJSON gtj (listValue gtj) . unComp1
--------------------------------------------------------------------------------
-- Generic toEncoding
instance OVERLAPPABLE_ (GToEncoding arity a) => GToEncoding arity (M1 i c a) where
-- Meta-information, which is not handled elsewhere, is ignored:
gToEncoding opts targs = gToEncoding opts targs . unM1
instance (ToJSON a) => GToEncoding arity (K1 i a) where
-- Constant values are encoded using their ToJSON instance:
gToEncoding _opts _ = toEncoding . unK1
instance GToEncoding One Par1 where
-- Direct occurrences of the last type parameter are encoded with the
-- function passed in as an argument:
gToEncoding _opts (To1Args te _) = te . unPar1
instance (ToJSON1 f) => GToEncoding One (Rec1 f) where
-- Recursive occurrences of the last type parameter are encoded using their
-- ToEncoding1 instance:
gToEncoding _opts (To1Args te tel) = liftToEncoding te tel . unRec1
instance GToEncoding arity U1 where
-- Empty constructors are encoded to an empty array:
gToEncoding _opts _ _ = E.emptyArray_
instance (ConsToEncoding arity a) => GToEncoding arity (C1 c a) where
-- Constructors need to be encoded differently depending on whether they're
-- a record or not. This distinction is made by 'consToEncoding':
gToEncoding opts targs = consToEncoding opts targs . unM1
instance ( EncodeProduct arity a
, EncodeProduct arity b
) => GToEncoding arity (a :*: b) where
-- Products are encoded to an array. Here we allocate a mutable vector of
-- the same size as the product and write the product's elements to it using
-- 'encodeProduct':
gToEncoding opts targs p = E.list E.retagEncoding [encodeProduct opts targs p]
instance ( AllNullary (a :+: b) allNullary
, SumToEncoding arity (a :+: b) allNullary
) => GToEncoding arity (a :+: b) where
-- If all constructors of a sum datatype are nullary and the
-- 'allNullaryToStringTag' option is set they are encoded to
-- strings. This distinction is made by 'sumToEncoding':
gToEncoding opts targs
= (unTagged :: Tagged allNullary Encoding -> Encoding)
. sumToEncoding opts targs
instance (ToJSON1 f, GToEncoding One g) => GToEncoding One (f :.: g) where
-- If an occurrence of the last type parameter is nested inside two
-- composed types, it is encoded by using the outermost type's ToJSON1
-- instance to generically encode the innermost type:
gToEncoding opts targs =
let gte = gToEncoding opts targs in
liftToEncoding gte (listEncoding gte) . unComp1
--------------------------------------------------------------------------------
class SumToJSON arity f allNullary where
sumToJSON :: Options -> ToArgs Value arity a
-> f a -> Tagged allNullary Value
instance ( GetConName f
, TaggedObjectPairs arity f
, ObjectWithSingleFieldObj arity f
, TwoElemArrayObj arity f
, UntaggedValueObj arity f
) => SumToJSON arity f True where
sumToJSON opts targs
| allNullaryToStringTag opts = Tagged . String . pack
. constructorTagModifier opts . getConName
| otherwise = Tagged . nonAllNullarySumToJSON opts targs
instance ( TwoElemArrayObj arity f
, TaggedObjectPairs arity f
, ObjectWithSingleFieldObj arity f
, UntaggedValueObj arity f
) => SumToJSON arity f False where
sumToJSON opts targs = Tagged . nonAllNullarySumToJSON opts targs
nonAllNullarySumToJSON :: ( TwoElemArrayObj arity f
, TaggedObjectPairs arity f
, ObjectWithSingleFieldObj arity f
, UntaggedValueObj arity f
) => Options -> ToArgs Value arity a
-> f a -> Value
nonAllNullarySumToJSON opts targs =
case sumEncoding opts of
TaggedObject{..} ->
object . taggedObjectPairs opts targs tagFieldName contentsFieldName
ObjectWithSingleField -> Object . objectWithSingleFieldObj opts targs
TwoElemArray -> Array . twoElemArrayObj opts targs
UntaggedValue -> untaggedValueObj opts targs
--------------------------------------------------------------------------------
class SumToEncoding arity f allNullary where
sumToEncoding :: Options -> ToArgs Encoding arity a
-> f a -> Tagged allNullary Encoding
instance ( GetConName f
, TaggedObjectEnc arity f
, ObjectWithSingleFieldEnc arity f
, TwoElemArrayEnc arity f
, UntaggedValueEnc arity f
) => SumToEncoding arity f True where
sumToEncoding opts targs
| allNullaryToStringTag opts = Tagged . toEncoding .
constructorTagModifier opts . getConName
| otherwise = Tagged . nonAllNullarySumToEncoding opts targs
instance ( TwoElemArrayEnc arity f
, TaggedObjectEnc arity f
, ObjectWithSingleFieldEnc arity f
, UntaggedValueEnc arity f
) => SumToEncoding arity f False where
sumToEncoding opts targs = Tagged . nonAllNullarySumToEncoding opts targs
nonAllNullarySumToEncoding :: ( TwoElemArrayEnc arity f
, TaggedObjectEnc arity f
, ObjectWithSingleFieldEnc arity f
, UntaggedValueEnc arity f
) => Options -> ToArgs Encoding arity a
-> f a -> Encoding
nonAllNullarySumToEncoding opts targs =
case sumEncoding opts of
TaggedObject{..} ->
taggedObjectEnc opts targs tagFieldName contentsFieldName
ObjectWithSingleField -> objectWithSingleFieldEnc opts targs
TwoElemArray -> twoElemArrayEnc opts targs
UntaggedValue -> untaggedValueEnc opts targs
--------------------------------------------------------------------------------
class TaggedObjectPairs arity f where
taggedObjectPairs :: Options -> ToArgs Value arity a
-> String -> String
-> f a -> [Pair]
instance ( TaggedObjectPairs arity a
, TaggedObjectPairs arity b
) => TaggedObjectPairs arity (a :+: b) where
taggedObjectPairs opts targs tagFieldName contentsFieldName (L1 x) =
taggedObjectPairs opts targs tagFieldName contentsFieldName x
taggedObjectPairs opts targs tagFieldName contentsFieldName (R1 x) =
taggedObjectPairs opts targs tagFieldName contentsFieldName x
instance ( IsRecord a isRecord
, TaggedObjectPairs' arity a isRecord
, Constructor c
) => TaggedObjectPairs arity (C1 c a) where
taggedObjectPairs opts targs tagFieldName contentsFieldName =
(pack tagFieldName .= constructorTagModifier opts
(conName (undefined :: t c a p)) :) .
(unTagged :: Tagged isRecord [Pair] -> [Pair]) .
taggedObjectPairs' opts targs contentsFieldName . unM1
class TaggedObjectPairs' arity f isRecord where
taggedObjectPairs' :: Options -> ToArgs Value arity a
-> String -> f a -> Tagged isRecord [Pair]
instance OVERLAPPING_ TaggedObjectPairs' arity U1 False where
taggedObjectPairs' _ _ _ _ = Tagged []
instance (RecordToPairs arity f) => TaggedObjectPairs' arity f True where
taggedObjectPairs' opts targs _ =
Tagged . toList . recordToPairs opts targs
instance (GToJSON arity f) => TaggedObjectPairs' arity f False where
taggedObjectPairs' opts targs contentsFieldName =
Tagged . (:[]) . (pack contentsFieldName .=) . gToJSON opts targs
--------------------------------------------------------------------------------
class TaggedObjectEnc arity f where
taggedObjectEnc :: Options -> ToArgs Encoding arity a
-> String -> String
-> f a -> Encoding
instance ( TaggedObjectEnc arity a
, TaggedObjectEnc arity b
) => TaggedObjectEnc arity (a :+: b) where
taggedObjectEnc opts targs tagFieldName contentsFieldName (L1 x) =
taggedObjectEnc opts targs tagFieldName contentsFieldName x
taggedObjectEnc opts targs tagFieldName contentsFieldName (R1 x) =
taggedObjectEnc opts targs tagFieldName contentsFieldName x
instance ( IsRecord a isRecord
, TaggedObjectEnc' arity a isRecord
, Constructor c
) => TaggedObjectEnc arity (C1 c a) where
taggedObjectEnc opts targs tagFieldName contentsFieldName v = E.pairs (E.pair key val)
where
key :: Text
key = pack tagFieldName
val = toEncoding (constructorTagModifier opts (conName (undefined :: t c a p)))
>< ((unTagged :: Tagged isRecord Encoding -> Encoding) . taggedObjectEnc' opts targs contentsFieldName . unM1 $ v)
class TaggedObjectEnc' arity f isRecord where
taggedObjectEnc' :: Options -> ToArgs Encoding arity a
-> String -> f a -> Tagged isRecord Encoding
instance OVERLAPPING_ TaggedObjectEnc' arity U1 False where
taggedObjectEnc' _ _ _ _ = Tagged E.empty
instance (RecordToEncoding arity f) => TaggedObjectEnc' arity f True where
taggedObjectEnc' opts targs _ = Tagged . (E.comma ><) . fst
. recordToEncoding opts targs
instance (GToEncoding arity f) => TaggedObjectEnc' arity f False where
taggedObjectEnc' opts targs contentsFieldName =
Tagged . (\z -> E.comma >< toEncoding contentsFieldName >< E.colon >< z) .
gToEncoding opts targs
--------------------------------------------------------------------------------
-- | Get the name of the constructor of a sum datatype.
class GetConName f where
getConName :: f a -> String
instance (GetConName a, GetConName b) => GetConName (a :+: b) where
getConName (L1 x) = getConName x
getConName (R1 x) = getConName x
instance (Constructor c) => GetConName (C1 c a) where
getConName = conName
--------------------------------------------------------------------------------
class TwoElemArrayObj arity f where
twoElemArrayObj :: Options -> ToArgs Value arity a
-> f a -> V.Vector Value
instance ( TwoElemArrayObj arity a
, TwoElemArrayObj arity b
) => TwoElemArrayObj arity (a :+: b) where
twoElemArrayObj opts targs (L1 x) = twoElemArrayObj opts targs x
twoElemArrayObj opts targs (R1 x) = twoElemArrayObj opts targs x
instance ( GToJSON arity a
, ConsToJSON arity a
, Constructor c
) => TwoElemArrayObj arity (C1 c a) where
twoElemArrayObj opts targs x = V.create $ do
mv <- VM.unsafeNew 2
VM.unsafeWrite mv 0 $ String $ pack $ constructorTagModifier opts
$ conName (undefined :: t c a p)
VM.unsafeWrite mv 1 $ gToJSON opts targs x
return mv
--------------------------------------------------------------------------------
class TwoElemArrayEnc arity f where
twoElemArrayEnc :: Options -> ToArgs Encoding arity a
-> f a -> Encoding
instance ( TwoElemArrayEnc arity a
, TwoElemArrayEnc arity b
) => TwoElemArrayEnc arity (a :+: b) where
twoElemArrayEnc opts targs (L1 x) = twoElemArrayEnc opts targs x
twoElemArrayEnc opts targs (R1 x) = twoElemArrayEnc opts targs x
instance ( GToEncoding arity a
, ConsToEncoding arity a
, Constructor c
) => TwoElemArrayEnc arity (C1 c a) where
twoElemArrayEnc opts targs x = E.list id
[ toEncoding (constructorTagModifier opts (conName (undefined :: t c a p)))
, gToEncoding opts targs x
]
--------------------------------------------------------------------------------
class ConsToJSON arity f where
consToJSON :: Options -> ToArgs Value arity a
-> f a -> Value
class ConsToJSON' arity f isRecord where
consToJSON' :: Options -> ToArgs Value arity a
-> Bool -- ^ Are we a record with one field?
-> f a -> Tagged isRecord Value
instance ( IsRecord f isRecord
, ConsToJSON' arity f isRecord
) => ConsToJSON arity f where
consToJSON opts targs =
(unTagged :: Tagged isRecord Value -> Value)
. consToJSON' opts targs (isUnary (undefined :: f a))
instance (RecordToPairs arity f) => ConsToJSON' arity f True where
consToJSON' opts targs isUn f = let
vals = toList $ recordToPairs opts targs f
in case (unwrapUnaryRecords opts,isUn,vals) of
(True,True,[(_,val)]) -> Tagged val
_ -> Tagged $ object vals
instance GToJSON arity f => ConsToJSON' arity f False where
consToJSON' opts targs _ = Tagged . gToJSON opts targs
--------------------------------------------------------------------------------
class ConsToEncoding arity f where
consToEncoding :: Options -> ToArgs Encoding arity a
-> f a -> Encoding
class ConsToEncoding' arity f isRecord where
consToEncoding' :: Options -> ToArgs Encoding arity a
-> Bool -- ^ Are we a record with one field?
-> f a -> Tagged isRecord Encoding
instance ( IsRecord f isRecord
, ConsToEncoding' arity f isRecord
) => ConsToEncoding arity f where
consToEncoding opts targs =
(unTagged :: Tagged isRecord Encoding -> Encoding)
. consToEncoding' opts targs (isUnary (undefined :: f a))
instance (RecordToEncoding arity f) => ConsToEncoding' arity f True where
consToEncoding' opts targs isUn x =
let (enc, mbVal) = recordToEncoding opts targs x
in case (unwrapUnaryRecords opts, isUn, mbVal) of
(True, True, Just val) -> Tagged val
_ -> Tagged $ E.wrapObject enc
instance GToEncoding arity f => ConsToEncoding' arity f False where
consToEncoding' opts targs _ = Tagged . gToEncoding opts targs
--------------------------------------------------------------------------------
class RecordToPairs arity f where
recordToPairs :: Options -> ToArgs Value arity a
-> f a -> DList Pair
instance ( RecordToPairs arity a
, RecordToPairs arity b
) => RecordToPairs arity (a :*: b) where
recordToPairs opts targs (a :*: b) = recordToPairs opts targs a <>
recordToPairs opts targs b
instance (Selector s, GToJSON arity a) => RecordToPairs arity (S1 s a) where
recordToPairs = fieldToPair
instance OVERLAPPING_ (Selector s, ToJSON a) =>
RecordToPairs arity (S1 s (K1 i (Maybe a))) where
recordToPairs opts _ (M1 k1) | omitNothingFields opts
, K1 Nothing <- k1 = DList.empty
recordToPairs opts targs m1 = fieldToPair opts targs m1
fieldToPair :: (Selector s, GToJSON arity a)
=> Options -> ToArgs Value arity p
-> S1 s a p -> DList Pair
fieldToPair opts targs m1 = pure ( pack $ fieldLabelModifier opts $ selName m1
, gToJSON opts targs (unM1 m1)
)
--------------------------------------------------------------------------------
class RecordToEncoding arity f where
-- 1st element: whole thing
-- 2nd element: in case the record has only 1 field, just the value
-- of the field (without the key); 'Nothing' otherwise
recordToEncoding :: Options -> ToArgs Encoding arity a
-> f a -> (Encoding, Maybe Encoding)
instance ( RecordToEncoding arity a
, RecordToEncoding arity b
) => RecordToEncoding arity (a :*: b) where
recordToEncoding opts targs (a :*: b) | omitNothingFields opts =
(E.econcat $ intersperse E.comma $
filter (not . E.nullEncoding)
[ fst (recordToEncoding opts targs a)
, fst (recordToEncoding opts targs b) ]
, Nothing)
recordToEncoding opts targs (a :*: b) =
(fst (recordToEncoding opts targs a) >< E.comma ><
fst (recordToEncoding opts targs b),
Nothing)
instance (Selector s, GToEncoding arity a) => RecordToEncoding arity (S1 s a) where
recordToEncoding = fieldToEncoding
instance OVERLAPPING_ (Selector s, ToJSON a) =>
RecordToEncoding arity (S1 s (K1 i (Maybe a))) where
recordToEncoding opts _ (M1 k1) | omitNothingFields opts
, K1 Nothing <- k1 = (E.empty, Nothing)
recordToEncoding opts targs m1 = fieldToEncoding opts targs m1
fieldToEncoding :: (Selector s, GToEncoding arity a)
=> Options -> ToArgs Encoding arity p
-> S1 s a p -> (Encoding, Maybe Encoding)
fieldToEncoding opts targs m1 =
let keyBuilder = toEncoding (fieldLabelModifier opts $ selName m1)
valueBuilder = gToEncoding opts targs (unM1 m1)
in (keyBuilder >< E.colon >< valueBuilder, Just valueBuilder)
--------------------------------------------------------------------------------
class WriteProduct arity f where
writeProduct :: Options
-> ToArgs Value arity a
-> VM.MVector s Value
-> Int -- ^ index
-> Int -- ^ length
-> f a
-> ST s ()
instance ( WriteProduct arity a
, WriteProduct arity b
) => WriteProduct arity (a :*: b) where
writeProduct opts targs mv ix len (a :*: b) = do
writeProduct opts targs mv ix lenL a
writeProduct opts targs mv ixR lenR b
where
lenL = len `unsafeShiftR` 1
lenR = len - lenL
ixR = ix + lenL
instance OVERLAPPABLE_ (GToJSON arity a) => WriteProduct arity a where
writeProduct opts targs mv ix _ =
VM.unsafeWrite mv ix . gToJSON opts targs
--------------------------------------------------------------------------------
class EncodeProduct arity f where
encodeProduct :: Options -> ToArgs Encoding arity a
-> f a -> Encoding' E.InArray
instance ( EncodeProduct arity a
, EncodeProduct arity b
) => EncodeProduct arity (a :*: b) where
encodeProduct opts targs (a :*: b) | omitNothingFields opts =
E.econcat $ intersperse E.comma $
filter (not . E.nullEncoding)
[encodeProduct opts targs a, encodeProduct opts targs b]
encodeProduct opts targs (a :*: b) =
encodeProduct opts targs a >*<
encodeProduct opts targs b
instance OVERLAPPABLE_ (GToEncoding arity a) => EncodeProduct arity a where
encodeProduct opts targs a = E.retagEncoding $ gToEncoding opts targs a
--------------------------------------------------------------------------------
class ObjectWithSingleFieldObj arity f where
objectWithSingleFieldObj :: Options -> ToArgs Value arity a
-> f a -> Object
instance ( ObjectWithSingleFieldObj arity a
, ObjectWithSingleFieldObj arity b
) => ObjectWithSingleFieldObj arity (a :+: b) where
objectWithSingleFieldObj opts targs (L1 x) =
objectWithSingleFieldObj opts targs x
objectWithSingleFieldObj opts targs (R1 x) =
objectWithSingleFieldObj opts targs x
instance ( GToJSON arity a
, ConsToJSON arity a
, Constructor c
) => ObjectWithSingleFieldObj arity (C1 c a) where
objectWithSingleFieldObj opts targs = H.singleton typ . gToJSON opts targs
where
typ = pack $ constructorTagModifier opts $
conName (undefined :: t c a p)
--------------------------------------------------------------------------------
class ObjectWithSingleFieldEnc arity f where
objectWithSingleFieldEnc :: Options -> ToArgs Encoding arity a
-> f a -> Encoding
instance ( ObjectWithSingleFieldEnc arity a
, ObjectWithSingleFieldEnc arity b
) => ObjectWithSingleFieldEnc arity (a :+: b) where
objectWithSingleFieldEnc opts targs (L1 x) =
objectWithSingleFieldEnc opts targs x
objectWithSingleFieldEnc opts targs (R1 x) =
objectWithSingleFieldEnc opts targs x
instance ( GToEncoding arity a
, ConsToEncoding arity a
, Constructor c
) => ObjectWithSingleFieldEnc arity (C1 c a) where
objectWithSingleFieldEnc opts targs v = E.pairs (E.pair key val)
where
key :: Text
key = pack (constructorTagModifier opts (conName (undefined :: t c a p)))
val :: Encoding' Value
val = gToEncoding opts targs v
--------------------------------------------------------------------------------
class UntaggedValueObj arity f where
untaggedValueObj :: Options -> ToArgs Value arity a
-> f a -> Value
instance
( UntaggedValueObj arity a
, UntaggedValueObj arity b
) => UntaggedValueObj arity (a :+: b)
where
untaggedValueObj opts targs (L1 x) = untaggedValueObj opts targs x
untaggedValueObj opts targs (R1 x) = untaggedValueObj opts targs x
instance OVERLAPPABLE_
( GToJSON arity a
, ConsToJSON arity a
) => UntaggedValueObj arity (C1 c a) where
untaggedValueObj = gToJSON
instance OVERLAPPING_
( Constructor c )
=> UntaggedValueObj arity (C1 c U1)
where
untaggedValueObj opts _ _ = toJSON $
constructorTagModifier opts $ conName (undefined :: t c U1 p)
--------------------------------------------------------------------------------
class UntaggedValueEnc arity f where
untaggedValueEnc :: Options -> ToArgs Encoding arity a
-> f a -> Encoding
instance
( UntaggedValueEnc arity a
, UntaggedValueEnc arity b
) => UntaggedValueEnc arity (a :+: b)
where
untaggedValueEnc opts targs (L1 x) = untaggedValueEnc opts targs x
untaggedValueEnc opts targs (R1 x) = untaggedValueEnc opts targs x
instance OVERLAPPABLE_
( GToEncoding arity a
, ConsToEncoding arity a
) => UntaggedValueEnc arity (C1 c a)
where
untaggedValueEnc = gToEncoding
instance OVERLAPPING_
( Constructor c )
=> UntaggedValueEnc arity (C1 c U1)
where
untaggedValueEnc opts _ _ = toEncoding $
constructorTagModifier opts $ conName (undefined :: t c U1 p)
-------------------------------------------------------------------------------
-- Instances
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- base
-------------------------------------------------------------------------------
instance ToJSON2 Const where
liftToJSON2 t _ _ _ (Const x) = t x
{-# INLINE liftToJSON2 #-}
liftToEncoding2 t _ _ _ (Const x) = t x
{-# INLINE liftToEncoding2 #-}
instance ToJSON a => ToJSON1 (Const a) where
liftToJSON _ _ (Const x) = toJSON x
{-# INLINE liftToJSON #-}
liftToEncoding _ _ (Const x) = toEncoding x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Const a b) where
toJSON (Const x) = toJSON x
{-# INLINE toJSON #-}
toEncoding (Const x) = toEncoding x
{-# INLINE toEncoding #-}
instance ToJSON1 Maybe where
liftToJSON t _ (Just a) = t a
liftToJSON _ _ Nothing = Null
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Just a) = t a
liftToEncoding _ _ Nothing = E.null_
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (Maybe a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON2 Either where
liftToJSON2 toA _ _toB _ (Left a) = Object $ H.singleton "Left" (toA a)
liftToJSON2 _toA _ toB _ (Right b) = Object $ H.singleton "Right" (toB b)
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toA _ _toB _ (Left a) = E.pairs $ E.pair "Left" $ toA a
liftToEncoding2 _toA _ toB _ (Right b) = E.pairs $ E.pair "Right" $ toB b
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a) => ToJSON1 (Either a) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b) => ToJSON (Either a b) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance ToJSON Bool where
toJSON = Bool
{-# INLINE toJSON #-}
toEncoding = E.bool
{-# INLINE toEncoding #-}
instance ToJSONKey Bool where
toJSONKey = toJSONKeyText $ \x -> if x then "true" else "false"
instance ToJSON Ordering where
toJSON = toJSON . orderingToText
toEncoding = toEncoding . orderingToText
orderingToText :: Ordering -> T.Text
orderingToText o = case o of
LT -> "LT"
EQ -> "EQ"
GT -> "GT"
instance ToJSON () where
toJSON _ = emptyArray
{-# INLINE toJSON #-}
toEncoding _ = emptyArray_
{-# INLINE toEncoding #-}
instance ToJSON Char where
toJSON = String . T.singleton
{-# INLINE toJSON #-}
toJSONList = String . T.pack
{-# INLINE toJSONList #-}
toEncoding = E.string . (:[])
{-# INLINE toEncoding #-}
toEncodingList = E.string
{-# INLINE toEncodingList #-}
instance ToJSON Double where
toJSON = realFloatToJSON
{-# INLINE toJSON #-}
toEncoding = E.double
{-# INLINE toEncoding #-}
instance ToJSONKey Double where
toJSONKey = toJSONKeyTextEnc E.doubleText
{-# INLINE toJSONKey #-}
instance ToJSON Number where
toJSON (D d) = toJSON d
toJSON (I i) = toJSON i
{-# INLINE toJSON #-}
toEncoding (D d) = toEncoding d
toEncoding (I i) = toEncoding i
{-# INLINE toEncoding #-}
instance ToJSON Float where
toJSON = realFloatToJSON
{-# INLINE toJSON #-}
toEncoding = E.float
{-# INLINE toEncoding #-}
instance ToJSONKey Float where
toJSONKey = toJSONKeyTextEnc E.floatText
{-# INLINE toJSONKey #-}
instance (ToJSON a, Integral a) => ToJSON (Ratio a) where
toJSON r = object [ "numerator" .= numerator r
, "denominator" .= denominator r
]
{-# INLINE toJSON #-}
toEncoding r = E.pairs $
"numerator" .= numerator r <>
"denominator" .= denominator r
{-# INLINE toEncoding #-}
instance HasResolution a => ToJSON (Fixed a) where
toJSON = Number . realToFrac
{-# INLINE toJSON #-}
toEncoding = E.scientific . realToFrac
{-# INLINE toEncoding #-}
instance HasResolution a => ToJSONKey (Fixed a) where
toJSONKey = toJSONKeyTextEnc (E.scientificText . realToFrac)
{-# INLINE toJSONKey #-}
instance ToJSON Int where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.int
{-# INLINE toEncoding #-}
instance ToJSONKey Int where
toJSONKey = toJSONKeyTextEnc E.intText
{-# INLINE toJSONKey #-}
instance ToJSON Integer where
toJSON = Number . fromInteger
{-# INLINE toJSON #-}
toEncoding = E.integer
{-# INLINE toEncoding #-}
instance ToJSONKey Integer where
toJSONKey = toJSONKeyTextEnc E.integerText
{-# INLINE toJSONKey #-}
instance ToJSON Natural where
toJSON = toJSON . toInteger
{-# INLINE toJSON #-}
toEncoding = toEncoding . toInteger
{-# INLINE toEncoding #-}
instance ToJSONKey Natural where
toJSONKey = toJSONKeyTextEnc (E.integerText . toInteger)
{-# INLINE toJSONKey #-}
instance ToJSON Int8 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.int8
{-# INLINE toEncoding #-}
instance ToJSONKey Int8 where
toJSONKey = toJSONKeyTextEnc E.int8Text
{-# INLINE toJSONKey #-}
instance ToJSON Int16 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.int16
{-# INLINE toEncoding #-}
instance ToJSONKey Int16 where
toJSONKey = toJSONKeyTextEnc E.int16Text
{-# INLINE toJSONKey #-}
instance ToJSON Int32 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.int32
{-# INLINE toEncoding #-}
instance ToJSONKey Int32 where
toJSONKey = toJSONKeyTextEnc E.int32Text
{-# INLINE toJSONKey #-}
instance ToJSON Int64 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.int64
{-# INLINE toEncoding #-}
instance ToJSONKey Int64 where
toJSONKey = toJSONKeyTextEnc E.int64Text
{-# INLINE toJSONKey #-}
instance ToJSON Word where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.word
{-# INLINE toEncoding #-}
instance ToJSONKey Word where
toJSONKey = toJSONKeyTextEnc E.wordText
{-# INLINE toJSONKey #-}
instance ToJSON Word8 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.word8
{-# INLINE toEncoding #-}
instance ToJSONKey Word8 where
toJSONKey = toJSONKeyTextEnc E.word8Text
{-# INLINE toJSONKey #-}
instance ToJSON Word16 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.word16
{-# INLINE toEncoding #-}
instance ToJSONKey Word16 where
toJSONKey = toJSONKeyTextEnc E.word16Text
{-# INLINE toJSONKey #-}
instance ToJSON Word32 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.word32
{-# INLINE toEncoding #-}
instance ToJSONKey Word32 where
toJSONKey = toJSONKeyTextEnc E.word32Text
{-# INLINE toJSONKey #-}
instance ToJSON Word64 where
toJSON = Number . fromIntegral
{-# INLINE toJSON #-}
toEncoding = E.word64
{-# INLINE toEncoding #-}
instance ToJSONKey Word64 where
toJSONKey = toJSONKeyTextEnc E.word64Text
{-# INLINE toJSONKey #-}
instance ToJSON Text where
toJSON = String
{-# INLINE toJSON #-}
toEncoding = E.text
{-# INLINE toEncoding #-}
instance ToJSONKey Text where
toJSONKey = toJSONKeyText id
{-# INLINE toJSONKey #-}
instance ToJSON LT.Text where
toJSON = String . LT.toStrict
{-# INLINE toJSON #-}
toEncoding = E.lazyText
{-# INLINE toEncoding #-}
instance ToJSONKey LT.Text where
toJSONKey = toJSONKeyText LT.toStrict
instance ToJSON Version where
toJSON = toJSON . showVersion
{-# INLINE toJSON #-}
toEncoding = toEncoding . showVersion
{-# INLINE toEncoding #-}
instance ToJSONKey Version where
toJSONKey = toJSONKeyText (T.pack . showVersion)
-------------------------------------------------------------------------------
-- semigroups NonEmpty
-------------------------------------------------------------------------------
instance ToJSON1 NonEmpty where
liftToJSON t _ = listValue t . NE.toList
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . NE.toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (NonEmpty a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- scientific
-------------------------------------------------------------------------------
instance ToJSON Scientific where
toJSON = Number
{-# INLINE toJSON #-}
toEncoding = E.scientific
{-# INLINE toEncoding #-}
instance ToJSONKey Scientific where
toJSONKey = toJSONKeyTextEnc E.scientificText
-------------------------------------------------------------------------------
-- DList
-------------------------------------------------------------------------------
instance ToJSON1 DList.DList where
liftToJSON t _ = listValue t . toList
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (DList.DList a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- transformers - Functors
-------------------------------------------------------------------------------
instance ToJSON1 Identity where
liftToJSON t _ (Identity a) = t a
{-# INLINE liftToJSON #-}
liftToJSONList _ tl xs = tl (map runIdentity xs)
{-# INLINE liftToJSONList #-}
liftToEncoding t _ (Identity a) = t a
{-# INLINE liftToEncoding #-}
liftToEncodingList _ tl xs = tl (map runIdentity xs)
{-# INLINE liftToEncodingList #-}
instance (ToJSON a) => ToJSON (Identity a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toJSONList = liftToJSONList toJSON toJSONList
{-# INLINE toJSONList #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
toEncodingList = liftToEncodingList toEncoding toEncodingList
{-# INLINE toEncodingList #-}
instance (ToJSONKey a, ToJSON a) => ToJSONKey (Identity a) where
toJSONKey = contramapToJSONKeyFunction runIdentity toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (map runIdentity) toJSONKeyList
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Compose f g) where
liftToJSON tv tvl (Compose x) = liftToJSON g gl x
where
g = liftToJSON tv tvl
gl = liftToJSONList tv tvl
{-# INLINE liftToJSON #-}
liftToJSONList te tel xs = liftToJSONList g gl (map getCompose xs)
where
g = liftToJSON te tel
gl = liftToJSONList te tel
{-# INLINE liftToJSONList #-}
liftToEncoding te tel (Compose x) = liftToEncoding g gl x
where
g = liftToEncoding te tel
gl = liftToEncodingList te tel
{-# INLINE liftToEncoding #-}
liftToEncodingList te tel xs = liftToEncodingList g gl (map getCompose xs)
where
g = liftToEncoding te tel
gl = liftToEncodingList te tel
{-# INLINE liftToEncodingList #-}
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Compose f g a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toJSONList = liftToJSONList toJSON toJSONList
{-# INLINE toJSONList #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
toEncodingList = liftToEncodingList toEncoding toEncodingList
{-# INLINE toEncodingList #-}
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Product f g) where
liftToJSON tv tvl (Pair x y) = liftToJSON2 tx txl ty tyl (x, y)
where
tx = liftToJSON tv tvl
txl = liftToJSONList tv tvl
ty = liftToJSON tv tvl
tyl = liftToJSONList tv tvl
liftToEncoding te tel (Pair x y) = liftToEncoding2 tx txl ty tyl (x, y)
where
tx = liftToEncoding te tel
txl = liftToEncodingList te tel
ty = liftToEncoding te tel
tyl = liftToEncodingList te tel
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Product f g a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Sum f g) where
liftToJSON tv tvl (InL x) = Object $ H.singleton "InL" (liftToJSON tv tvl x)
liftToJSON tv tvl (InR y) = Object $ H.singleton "InR" (liftToJSON tv tvl y)
liftToEncoding te tel (InL x) = E.pairs $ E.pair "InL" $ liftToEncoding te tel x
liftToEncoding te tel (InR y) = E.pairs $ E.pair "InR" $ liftToEncoding te tel y
instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Sum f g a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- containers
-------------------------------------------------------------------------------
instance ToJSON1 Seq.Seq where
liftToJSON t _ = listValue t . toList
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (Seq.Seq a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Set.Set where
liftToJSON t _ = listValue t . Set.toList
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . Set.toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (Set.Set a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON IntSet.IntSet where
toJSON = toJSON . IntSet.toList
{-# INLINE toJSON #-}
toEncoding = toEncoding . IntSet.toList
{-# INLINE toEncoding #-}
instance ToJSON1 IntMap.IntMap where
liftToJSON t tol = liftToJSON to' tol' . IntMap.toList
where
to' = liftToJSON2 toJSON toJSONList t tol
tol' = liftToJSONList2 toJSON toJSONList t tol
{-# INLINE liftToJSON #-}
liftToEncoding t tol = liftToEncoding to' tol' . IntMap.toList
where
to' = liftToEncoding2 toEncoding toEncodingList t tol
tol' = liftToEncodingList2 toEncoding toEncodingList t tol
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (IntMap.IntMap a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSONKey k => ToJSON1 (M.Map k) where
liftToJSON g _ = case toJSONKey of
ToJSONKeyText f _ -> Object . mapHashKeyVal f g
ToJSONKeyValue f _ -> Array . V.fromList . map (toJSONPair f g) . M.toList
{-# INLINE liftToJSON #-}
liftToEncoding g _ = case toJSONKey of
ToJSONKeyText _ f -> dict f g M.foldrWithKey
ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . M.toList
where
pairEncoding f (a, b) = E.list id [f a, g b]
{-# INLINE liftToEncoding #-}
instance (ToJSON v, ToJSONKey k) => ToJSON (M.Map k v) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Tree.Tree where
liftToJSON t tol = go
where
go (Tree.Node root branches) =
liftToJSON2 t tol to' tol' (root, branches)
to' = liftToJSON go (listValue go)
tol' = liftToJSONList go (listValue go)
{-# INLINE liftToJSON #-}
liftToEncoding t tol = go
where
go (Tree.Node root branches) =
liftToEncoding2 t tol to' tol' (root, branches)
to' = liftToEncoding go (listEncoding go)
tol' = liftToEncodingList go (listEncoding go)
{-# INLINE liftToEncoding #-}
instance (ToJSON v) => ToJSON (Tree.Tree v) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- vector
-------------------------------------------------------------------------------
instance ToJSON1 Vector where
liftToJSON t _ = Array . V.map t
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . V.toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (Vector a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
encodeVector :: (ToJSON a, VG.Vector v a) => v a -> Encoding
encodeVector = listEncoding toEncoding . VG.toList
{-# INLINE encodeVector #-}
vectorToJSON :: (VG.Vector v a, ToJSON a) => v a -> Value
vectorToJSON = Array . V.map toJSON . V.convert
{-# INLINE vectorToJSON #-}
instance (Storable a, ToJSON a) => ToJSON (VS.Vector a) where
toJSON = vectorToJSON
{-# INLINE toJSON #-}
toEncoding = encodeVector
{-# INLINE toEncoding #-}
instance (VP.Prim a, ToJSON a) => ToJSON (VP.Vector a) where
toJSON = vectorToJSON
{-# INLINE toJSON #-}
toEncoding = encodeVector
{-# INLINE toEncoding #-}
instance (VG.Vector VU.Vector a, ToJSON a) => ToJSON (VU.Vector a) where
toJSON = vectorToJSON
{-# INLINE toJSON #-}
toEncoding = encodeVector
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- unordered-containers
-------------------------------------------------------------------------------
instance ToJSON1 HashSet.HashSet where
liftToJSON t _ = listValue t . HashSet.toList
{-# INLINE liftToJSON #-}
liftToEncoding t _ = listEncoding t . HashSet.toList
{-# INLINE liftToEncoding #-}
instance (ToJSON a) => ToJSON (HashSet.HashSet a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSONKey k => ToJSON1 (H.HashMap k) where
liftToJSON g _ = case toJSONKey of
ToJSONKeyText f _ -> Object . mapKeyVal f g
ToJSONKeyValue f _ -> Array . V.fromList . map (toJSONPair f g) . H.toList
{-# INLINE liftToJSON #-}
-- liftToEncoding :: forall a. (a -> Encoding) -> ([a] -> Encoding) -> H.HashMap k a -> Encoding
liftToEncoding g _ = case toJSONKey of
ToJSONKeyText _ f -> dict f g H.foldrWithKey
ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . H.toList
where
pairEncoding f (a, b) = E.list id [f a, g b]
{-# INLINE liftToEncoding #-}
instance (ToJSON v, ToJSONKey k) => ToJSON (H.HashMap k v) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- aeson
-------------------------------------------------------------------------------
instance ToJSON Value where
toJSON a = a
{-# INLINE toJSON #-}
toEncoding = E.value
{-# INLINE toEncoding #-}
instance ToJSON DotNetTime where
toJSON = toJSON . dotNetTime
toEncoding = toEncoding . dotNetTime
dotNetTime :: DotNetTime -> String
dotNetTime (DotNetTime t) = secs ++ formatMillis t ++ ")/"
where secs = formatTime defaultTimeLocale "/Date(%s" t
formatMillis :: (FormatTime t) => t -> String
formatMillis = take 3 . formatTime defaultTimeLocale "%q"
-------------------------------------------------------------------------------
-- time
-------------------------------------------------------------------------------
instance ToJSON Day where
toJSON = stringEncoding . E.day
toEncoding = E.day
instance ToJSONKey Day where
toJSONKey = toJSONKeyTextEnc E.day
instance ToJSON TimeOfDay where
toJSON = stringEncoding . E.timeOfDay
toEncoding = E.timeOfDay
instance ToJSONKey TimeOfDay where
toJSONKey = toJSONKeyTextEnc E.timeOfDay
instance ToJSON LocalTime where
toJSON = stringEncoding . E.localTime
toEncoding = E.localTime
instance ToJSONKey LocalTime where
toJSONKey = toJSONKeyTextEnc E.localTime
instance ToJSON ZonedTime where
toJSON = stringEncoding . E.zonedTime
toEncoding = E.zonedTime
instance ToJSONKey ZonedTime where
toJSONKey = toJSONKeyTextEnc E.zonedTime
instance ToJSON UTCTime where
toJSON = stringEncoding . E.utcTime
toEncoding = E.utcTime
instance ToJSONKey UTCTime where
toJSONKey = toJSONKeyTextEnc E.utcTime
-- | Encode something t a JSON string.
stringEncoding :: Encoding' Text -> Value
stringEncoding = String
. T.dropAround (== '"')
. T.decodeLatin1
. lazyToStrictByteString
. E.encodingToLazyByteString
{-# INLINE stringEncoding #-}
instance ToJSON NominalDiffTime where
toJSON = Number . realToFrac
{-# INLINE toJSON #-}
toEncoding = E.scientific . realToFrac
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- base Monoid/Semigroup
-------------------------------------------------------------------------------
instance ToJSON1 Monoid.Dual where
liftToJSON t _ = t . Monoid.getDual
{-# INLINE liftToJSON #-}
liftToEncoding t _ = t . Monoid.getDual
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Monoid.Dual a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Monoid.First where
liftToJSON t to' = liftToJSON t to' . Monoid.getFirst
{-# INLINE liftToJSON #-}
liftToEncoding t to' = liftToEncoding t to' . Monoid.getFirst
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Monoid.First a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Monoid.Last where
liftToJSON t to' = liftToJSON t to' . Monoid.getLast
{-# INLINE liftToJSON #-}
liftToEncoding t to' = liftToEncoding t to' . Monoid.getLast
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Monoid.Last a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.Min where
liftToJSON t _ (Semigroup.Min x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Semigroup.Min x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.Min a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.Max where
liftToJSON t _ (Semigroup.Max x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Semigroup.Max x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.Max a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.First where
liftToJSON t _ (Semigroup.First x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Semigroup.First x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.First a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.Last where
liftToJSON t _ (Semigroup.Last x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Semigroup.Last x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.Last a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.WrappedMonoid where
liftToJSON t _ (Semigroup.WrapMonoid x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Semigroup.WrapMonoid x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.WrappedMonoid a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSON1 Semigroup.Option where
liftToJSON t to' = liftToJSON t to' . Semigroup.getOption
{-# INLINE liftToJSON #-}
liftToEncoding t to' = liftToEncoding t to' . Semigroup.getOption
{-# INLINE liftToEncoding #-}
instance ToJSON a => ToJSON (Semigroup.Option a) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- tagged
-------------------------------------------------------------------------------
instance ToJSON (Proxy a) where
toJSON _ = Null
{-# INLINE toJSON #-}
toEncoding _ = E.null_
{-# INLINE toEncoding #-}
instance ToJSON1 (Tagged a) where
liftToJSON t _ (Tagged x) = t x
{-# INLINE liftToJSON #-}
liftToEncoding t _ (Tagged x) = t x
{-# INLINE liftToEncoding #-}
instance ToJSON b => ToJSON (Tagged a b) where
toJSON = toJSON1
{-# INLINE toJSON #-}
toEncoding = toEncoding1
{-# INLINE toEncoding #-}
instance ToJSONKey b => ToJSONKey (Tagged a b) where
toJSONKey = contramapToJSONKeyFunction unTagged toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (fmap unTagged) toJSONKeyList
-------------------------------------------------------------------------------
-- Instances for converting t map keys
-------------------------------------------------------------------------------
instance (ToJSON a, ToJSON b) => ToJSONKey (a,b)
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSONKey (a,b,c)
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSONKey (a,b,c,d)
instance ToJSONKey Char where
toJSONKey = ToJSONKeyText T.singleton (E.string . (:[]))
toJSONKeyList = toJSONKeyText T.pack
instance (ToJSONKey a, ToJSON a) => ToJSONKey [a] where
toJSONKey = toJSONKeyList
-------------------------------------------------------------------------------
-- Tuple instances
-------------------------------------------------------------------------------
instance ToJSON2 (,) where
liftToJSON2 toA _ toB _ (a, b) = Array $ V.create $ do
mv <- VM.unsafeNew 2
VM.unsafeWrite mv 0 (toA a)
VM.unsafeWrite mv 1 (toB b)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toA _ toB _ (a, b) = E.list id [toA a, toB b]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a) => ToJSON1 ((,) a) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b) => ToJSON (a, b) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a) => ToJSON2 ((,,) a) where
liftToJSON2 toB _ toC _ (a, b, c) = Array $ V.create $ do
mv <- VM.unsafeNew 3
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toB b)
VM.unsafeWrite mv 2 (toC c)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toB _ toC _ (a, b, c) = E.list id
[ toEncoding a
, toB b
, toC c
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b) => ToJSON1 ((,,) a b) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON (a, b, c) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b) => ToJSON2 ((,,,) a b) where
liftToJSON2 toC _ toD _ (a, b, c, d) = Array $ V.create $ do
mv <- VM.unsafeNew 4
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toC c)
VM.unsafeWrite mv 3 (toD d)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toC _ toD _ (a, b, c, d) = E.list id
[ toEncoding a
, toEncoding b
, toC c
, toD d
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON1 ((,,,) a b c) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON (a, b, c, d) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON2 ((,,,,) a b c) where
liftToJSON2 toD _ toE _ (a, b, c, d, e) = Array $ V.create $ do
mv <- VM.unsafeNew 5
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toD d)
VM.unsafeWrite mv 4 (toE e)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toD _ toE _ (a, b, c, d, e) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toD d
, toE e
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON1 ((,,,,) a b c d) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON (a, b, c, d, e) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON2 ((,,,,,) a b c d) where
liftToJSON2 toE _ toF _ (a, b, c, d, e, f) = Array $ V.create $ do
mv <- VM.unsafeNew 6
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toE e)
VM.unsafeWrite mv 5 (toF f)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toE _ toF _ (a, b, c, d, e, f) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toE e
, toF f
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON1 ((,,,,,) a b c d e) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON (a, b, c, d, e, f) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON2 ((,,,,,,) a b c d e) where
liftToJSON2 toF _ toG _ (a, b, c, d, e, f, g) = Array $ V.create $ do
mv <- VM.unsafeNew 7
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toF f)
VM.unsafeWrite mv 6 (toG g)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toF _ toG _ (a, b, c, d, e, f, g) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toF f
, toG g
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON1 ((,,,,,,) a b c d e f) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON (a, b, c, d, e, f, g) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON2 ((,,,,,,,) a b c d e f) where
liftToJSON2 toG _ toH _ (a, b, c, d, e, f, g, h) = Array $ V.create $ do
mv <- VM.unsafeNew 8
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toG g)
VM.unsafeWrite mv 7 (toH h)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toG _ toH _ (a, b, c, d, e, f, g, h) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toG g
, toH h
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON1 ((,,,,,,,) a b c d e f g) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON (a, b, c, d, e, f, g, h) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON2 ((,,,,,,,,) a b c d e f g) where
liftToJSON2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = Array $ V.create $ do
mv <- VM.unsafeNew 9
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toH h)
VM.unsafeWrite mv 8 (toI i)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toH h
, toI i
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON1 ((,,,,,,,,) a b c d e f g h) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON (a, b, c, d, e, f, g, h, i) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON2 ((,,,,,,,,,) a b c d e f g h) where
liftToJSON2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = Array $ V.create $ do
mv <- VM.unsafeNew 10
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toI i)
VM.unsafeWrite mv 9 (toJ j)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toI i
, toJ j
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON1 ((,,,,,,,,,) a b c d e f g h i) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON (a, b, c, d, e, f, g, h, i, j) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON2 ((,,,,,,,,,,) a b c d e f g h i) where
liftToJSON2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = Array $ V.create $ do
mv <- VM.unsafeNew 11
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJ j)
VM.unsafeWrite mv 10 (toK k)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toJ j
, toK k
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON1 ((,,,,,,,,,,) a b c d e f g h i j) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON (a, b, c, d, e, f, g, h, i, j, k) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON2 ((,,,,,,,,,,,) a b c d e f g h i j) where
liftToJSON2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = Array $ V.create $ do
mv <- VM.unsafeNew 12
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toK k)
VM.unsafeWrite mv 11 (toL l)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toK k
, toL l
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON1 ((,,,,,,,,,,,) a b c d e f g h i j k) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON2 ((,,,,,,,,,,,,) a b c d e f g h i j k) where
liftToJSON2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = Array $ V.create $ do
mv <- VM.unsafeNew 13
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toL l)
VM.unsafeWrite mv 12 (toM m)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toL l
, toM m
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON1 ((,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON2 ((,,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftToJSON2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = Array $ V.create $ do
mv <- VM.unsafeNew 14
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toJSON l)
VM.unsafeWrite mv 12 (toM m)
VM.unsafeWrite mv 13 (toN n)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toEncoding l
, toM m
, toN n
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON1 ((,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON2 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftToJSON2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = Array $ V.create $ do
mv <- VM.unsafeNew 15
VM.unsafeWrite mv 0 (toJSON a)
VM.unsafeWrite mv 1 (toJSON b)
VM.unsafeWrite mv 2 (toJSON c)
VM.unsafeWrite mv 3 (toJSON d)
VM.unsafeWrite mv 4 (toJSON e)
VM.unsafeWrite mv 5 (toJSON f)
VM.unsafeWrite mv 6 (toJSON g)
VM.unsafeWrite mv 7 (toJSON h)
VM.unsafeWrite mv 8 (toJSON i)
VM.unsafeWrite mv 9 (toJSON j)
VM.unsafeWrite mv 10 (toJSON k)
VM.unsafeWrite mv 11 (toJSON l)
VM.unsafeWrite mv 12 (toJSON m)
VM.unsafeWrite mv 13 (toN n)
VM.unsafeWrite mv 14 (toO o)
return mv
{-# INLINE liftToJSON2 #-}
liftToEncoding2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = E.list id
[ toEncoding a
, toEncoding b
, toEncoding c
, toEncoding d
, toEncoding e
, toEncoding f
, toEncoding g
, toEncoding h
, toEncoding i
, toEncoding j
, toEncoding k
, toEncoding l
, toEncoding m
, toN n
, toO o
]
{-# INLINE liftToEncoding2 #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON1 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m n) where
liftToJSON = liftToJSON2 toJSON toJSONList
{-# INLINE liftToJSON #-}
liftToEncoding = liftToEncoding2 toEncoding toEncodingList
{-# INLINE liftToEncoding #-}
instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n, ToJSON o) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where
toJSON = toJSON2
{-# INLINE toJSON #-}
toEncoding = toEncoding2
{-# INLINE toEncoding #-}
-------------------------------------------------------------------------------
-- pre-bytestring-0.10 compatibility
-------------------------------------------------------------------------------
{-# INLINE lazyToStrictByteString #-}
lazyToStrictByteString :: L.ByteString -> S.ByteString
#if MIN_VERSION_bytestring(0,10,0)
lazyToStrictByteString = L.toStrict
#else
lazyToStrictByteString = packChunks
-- packChunks is taken from the blaze-builder package.
-- | Pack the chunks of a lazy bytestring into a single strict bytestring.
packChunks :: L.ByteString -> S.ByteString
packChunks lbs = do
S.unsafeCreate (fromIntegral $ L.length lbs) (copyChunks lbs)
where
copyChunks !L.Empty !_pf = return ()
copyChunks !(L.Chunk (S.PS fpbuf o l) lbs') !pf = do
withForeignPtr fpbuf $ \pbuf ->
copyBytes pf (pbuf `plusPtr` o) l
copyChunks lbs' (pf `plusPtr` l)
#endif
| tolysz/prepare-ghcjs | spec-lts8/aeson/Data/Aeson/Types/ToJSON.hs | bsd-3-clause | 97,291 | 0 | 15 | 23,900 | 24,386 | 12,954 | 11,432 | 1,772 | 4 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[IdInfo]{@IdInfos@: Non-essential information about @Ids@}
(And a pretty good illustration of quite a few things wrong with
Haskell. [WDP 94/11])
-}
module IdInfo (
-- * The IdDetails type
IdDetails(..), pprIdDetails, coVarDetails, isCoVarDetails,
RecSelParent(..),
-- * The IdInfo type
IdInfo, -- Abstract
vanillaIdInfo, noCafIdInfo,
-- ** The OneShotInfo type
OneShotInfo(..),
oneShotInfo, noOneShotInfo, hasNoOneShotInfo,
setOneShotInfo,
-- ** Zapping various forms of Info
zapLamInfo, zapFragileInfo,
zapDemandInfo, zapUsageInfo,
-- ** The ArityInfo type
ArityInfo,
unknownArity,
arityInfo, setArityInfo, ppArityInfo,
callArityInfo, setCallArityInfo,
-- ** Demand and strictness Info
strictnessInfo, setStrictnessInfo,
demandInfo, setDemandInfo, pprStrictness,
-- ** Unfolding Info
unfoldingInfo, setUnfoldingInfo, setUnfoldingInfoLazily,
-- ** The InlinePragInfo type
InlinePragInfo,
inlinePragInfo, setInlinePragInfo,
-- ** The OccInfo type
OccInfo(..),
isDeadOcc, isStrongLoopBreaker, isWeakLoopBreaker,
occInfo, setOccInfo,
InsideLam, OneBranch,
insideLam, notInsideLam, oneBranch, notOneBranch,
-- ** The RuleInfo type
RuleInfo(..),
emptyRuleInfo,
isEmptyRuleInfo, ruleInfoFreeVars,
ruleInfoRules, setRuleInfoHead,
ruleInfo, setRuleInfo,
-- ** The CAFInfo type
CafInfo(..),
ppCafInfo, mayHaveCafRefs,
cafInfo, setCafInfo,
-- ** Tick-box Info
TickBoxOp(..), TickBoxId,
) where
import CoreSyn
import Class
import {-# SOURCE #-} PrimOp (PrimOp)
import Name
import VarSet
import BasicTypes
import DataCon
import TyCon
import {-# SOURCE #-} PatSyn
import ForeignCall
import Outputable
import Module
import Demand
-- infixl so you can say (id `set` a `set` b)
infixl 1 `setRuleInfo`,
`setArityInfo`,
`setInlinePragInfo`,
`setUnfoldingInfo`,
`setOneShotInfo`,
`setOccInfo`,
`setCafInfo`,
`setStrictnessInfo`,
`setDemandInfo`
{-
************************************************************************
* *
IdDetails
* *
************************************************************************
-}
-- | The 'IdDetails' of an 'Id' give stable, and necessary,
-- information about the Id.
data IdDetails
= VanillaId
-- | The 'Id' for a record selector
| RecSelId
{ sel_tycon :: RecSelParent
, sel_naughty :: Bool -- True <=> a "naughty" selector which can't actually exist, for example @x@ in:
-- data T = forall a. MkT { x :: a }
} -- See Note [Naughty record selectors] in TcTyClsDecls
| DataConWorkId DataCon -- ^ The 'Id' is for a data constructor /worker/
| DataConWrapId DataCon -- ^ The 'Id' is for a data constructor /wrapper/
-- [the only reasons we need to know is so that
-- a) to support isImplicitId
-- b) when desugaring a RecordCon we can get
-- from the Id back to the data con]
| ClassOpId Class -- ^ The 'Id' is a superclass selector,
-- or class operation of a class
| PrimOpId PrimOp -- ^ The 'Id' is for a primitive operator
| FCallId ForeignCall -- ^ The 'Id' is for a foreign call
| TickBoxOpId TickBoxOp -- ^ The 'Id' is for a HPC tick box (both traditional and binary)
| DFunId Bool -- ^ A dictionary function.
-- Bool = True <=> the class has only one method, so may be
-- implemented with a newtype, so it might be bad
-- to be strict on this dictionary
| CoVarId -- ^ A coercion variable
data RecSelParent = RecSelData TyCon | RecSelPatSyn PatSyn deriving Eq
-- Either `TyCon` or `PatSyn` depending
-- on the origin of the record selector.
-- For a data type family, this is the
-- /instance/ 'TyCon' not the family 'TyCon'
instance Outputable RecSelParent where
ppr p = case p of
RecSelData ty_con -> ppr ty_con
RecSelPatSyn ps -> ppr ps
-- | Just a synonym for 'CoVarId'. Written separately so it can be
-- exported in the hs-boot file.
coVarDetails :: IdDetails
coVarDetails = CoVarId
-- | Check if an 'IdDetails' says 'CoVarId'.
isCoVarDetails :: IdDetails -> Bool
isCoVarDetails CoVarId = True
isCoVarDetails _ = False
instance Outputable IdDetails where
ppr = pprIdDetails
pprIdDetails :: IdDetails -> SDoc
pprIdDetails VanillaId = empty
pprIdDetails other = brackets (pp other)
where
pp VanillaId = panic "pprIdDetails"
pp (DataConWorkId _) = text "DataCon"
pp (DataConWrapId _) = text "DataConWrapper"
pp (ClassOpId {}) = text "ClassOp"
pp (PrimOpId _) = text "PrimOp"
pp (FCallId _) = text "ForeignCall"
pp (TickBoxOpId _) = text "TickBoxOp"
pp (DFunId nt) = text "DFunId" <> ppWhen nt (text "(nt)")
pp (RecSelId { sel_naughty = is_naughty })
= brackets $ text "RecSel"
<> ppWhen is_naughty (text "(naughty)")
pp CoVarId = text "CoVarId"
{-
************************************************************************
* *
\subsection{The main IdInfo type}
* *
************************************************************************
-}
-- | An 'IdInfo' gives /optional/ information about an 'Id'. If
-- present it never lies, but it may not be present, in which case there
-- is always a conservative assumption which can be made.
--
-- Two 'Id's may have different info even though they have the same
-- 'Unique' (and are hence the same 'Id'); for example, one might lack
-- the properties attached to the other.
--
-- Most of the 'IdInfo' gives information about the value, or definition, of
-- the 'Id', independent of its usage. Exceptions to this
-- are 'demandInfo', 'occInfo', 'oneShotInfo' and 'callArityInfo'.
data IdInfo
= IdInfo {
arityInfo :: !ArityInfo, -- ^ 'Id' arity
ruleInfo :: RuleInfo, -- ^ Specialisations of the 'Id's function which exist
-- See Note [Specialisations and RULES in IdInfo]
unfoldingInfo :: Unfolding, -- ^ The 'Id's unfolding
cafInfo :: CafInfo, -- ^ 'Id' CAF info
oneShotInfo :: OneShotInfo, -- ^ Info about a lambda-bound variable, if the 'Id' is one
inlinePragInfo :: InlinePragma, -- ^ Any inline pragma atached to the 'Id'
occInfo :: OccInfo, -- ^ How the 'Id' occurs in the program
strictnessInfo :: StrictSig, -- ^ A strictness signature
demandInfo :: Demand, -- ^ ID demand information
callArityInfo :: !ArityInfo -- ^ How this is called.
-- n <=> all calls have at least n arguments
}
-- Setters
setRuleInfo :: IdInfo -> RuleInfo -> IdInfo
setRuleInfo info sp = sp `seq` info { ruleInfo = sp }
setInlinePragInfo :: IdInfo -> InlinePragma -> IdInfo
setInlinePragInfo info pr = pr `seq` info { inlinePragInfo = pr }
setOccInfo :: IdInfo -> OccInfo -> IdInfo
setOccInfo info oc = oc `seq` info { occInfo = oc }
-- Try to avoid spack leaks by seq'ing
setUnfoldingInfoLazily :: IdInfo -> Unfolding -> IdInfo
setUnfoldingInfoLazily info uf -- Lazy variant to avoid looking at the
= -- unfolding of an imported Id unless necessary
info { unfoldingInfo = uf } -- (In this case the demand-zapping is redundant.)
setUnfoldingInfo :: IdInfo -> Unfolding -> IdInfo
setUnfoldingInfo info uf
= -- We don't seq the unfolding, as we generate intermediate
-- unfoldings which are just thrown away, so evaluating them is a
-- waste of time.
-- seqUnfolding uf `seq`
info { unfoldingInfo = uf }
setArityInfo :: IdInfo -> ArityInfo -> IdInfo
setArityInfo info ar = info { arityInfo = ar }
setCallArityInfo :: IdInfo -> ArityInfo -> IdInfo
setCallArityInfo info ar = info { callArityInfo = ar }
setCafInfo :: IdInfo -> CafInfo -> IdInfo
setCafInfo info caf = info { cafInfo = caf }
setOneShotInfo :: IdInfo -> OneShotInfo -> IdInfo
setOneShotInfo info lb = {-lb `seq`-} info { oneShotInfo = lb }
setDemandInfo :: IdInfo -> Demand -> IdInfo
setDemandInfo info dd = dd `seq` info { demandInfo = dd }
setStrictnessInfo :: IdInfo -> StrictSig -> IdInfo
setStrictnessInfo info dd = dd `seq` info { strictnessInfo = dd }
-- | Basic 'IdInfo' that carries no useful information whatsoever
vanillaIdInfo :: IdInfo
vanillaIdInfo
= IdInfo {
cafInfo = vanillaCafInfo,
arityInfo = unknownArity,
ruleInfo = emptyRuleInfo,
unfoldingInfo = noUnfolding,
oneShotInfo = NoOneShotInfo,
inlinePragInfo = defaultInlinePragma,
occInfo = NoOccInfo,
demandInfo = topDmd,
strictnessInfo = nopSig,
callArityInfo = unknownArity
}
-- | More informative 'IdInfo' we can use when we know the 'Id' has no CAF references
noCafIdInfo :: IdInfo
noCafIdInfo = vanillaIdInfo `setCafInfo` NoCafRefs
-- Used for built-in type Ids in MkId.
{-
************************************************************************
* *
\subsection[arity-IdInfo]{Arity info about an @Id@}
* *
************************************************************************
For locally-defined Ids, the code generator maintains its own notion
of their arities; so it should not be asking... (but other things
besides the code-generator need arity info!)
-}
-- | An 'ArityInfo' of @n@ tells us that partial application of this
-- 'Id' to up to @n-1@ value arguments does essentially no work.
--
-- That is not necessarily the same as saying that it has @n@ leading
-- lambdas, because coerces may get in the way.
--
-- The arity might increase later in the compilation process, if
-- an extra lambda floats up to the binding site.
type ArityInfo = Arity
-- | It is always safe to assume that an 'Id' has an arity of 0
unknownArity :: Arity
unknownArity = 0 :: Arity
ppArityInfo :: Int -> SDoc
ppArityInfo 0 = empty
ppArityInfo n = hsep [text "Arity", int n]
{-
************************************************************************
* *
\subsection{Inline-pragma information}
* *
************************************************************************
-}
-- | Tells when the inlining is active.
-- When it is active the thing may be inlined, depending on how
-- big it is.
--
-- If there was an @INLINE@ pragma, then as a separate matter, the
-- RHS will have been made to look small with a Core inline 'Note'
--
-- The default 'InlinePragInfo' is 'AlwaysActive', so the info serves
-- entirely as a way to inhibit inlining until we want it
type InlinePragInfo = InlinePragma
{-
************************************************************************
* *
Strictness
* *
************************************************************************
-}
pprStrictness :: StrictSig -> SDoc
pprStrictness sig = ppr sig
{-
************************************************************************
* *
RuleInfo
* *
************************************************************************
Note [Specialisations and RULES in IdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking, a GlobalId has an *empty* RuleInfo. All their
RULES are contained in the globally-built rule-base. In principle,
one could attach the to M.f the RULES for M.f that are defined in M.
But we don't do that for instance declarations and so we just treat
them all uniformly.
The EXCEPTION is PrimOpIds, which do have rules in their IdInfo. That is
jsut for convenience really.
However, LocalIds may have non-empty RuleInfo. We treat them
differently because:
a) they might be nested, in which case a global table won't work
b) the RULE might mention free variables, which we use to keep things alive
In TidyPgm, when the LocalId becomes a GlobalId, its RULES are stripped off
and put in the global list.
-}
-- | Records the specializations of this 'Id' that we know about
-- in the form of rewrite 'CoreRule's that target them
data RuleInfo
= RuleInfo
[CoreRule]
DVarSet -- Locally-defined free vars of *both* LHS and RHS
-- of rules. I don't think it needs to include the
-- ru_fn though.
-- Note [Rule dependency info] in OccurAnal
-- | Assume that no specilizations exist: always safe
emptyRuleInfo :: RuleInfo
emptyRuleInfo = RuleInfo [] emptyDVarSet
isEmptyRuleInfo :: RuleInfo -> Bool
isEmptyRuleInfo (RuleInfo rs _) = null rs
-- | Retrieve the locally-defined free variables of both the left and
-- right hand sides of the specialization rules
ruleInfoFreeVars :: RuleInfo -> DVarSet
ruleInfoFreeVars (RuleInfo _ fvs) = fvs
ruleInfoRules :: RuleInfo -> [CoreRule]
ruleInfoRules (RuleInfo rules _) = rules
-- | Change the name of the function the rule is keyed on on all of the 'CoreRule's
setRuleInfoHead :: Name -> RuleInfo -> RuleInfo
setRuleInfoHead fn (RuleInfo rules fvs)
= RuleInfo (map (setRuleIdName fn) rules) fvs
{-
************************************************************************
* *
\subsection[CG-IdInfo]{Code generator-related information}
* *
************************************************************************
-}
-- CafInfo is used to build Static Reference Tables (see simplStg/SRT.hs).
-- | Records whether an 'Id' makes Constant Applicative Form references
data CafInfo
= MayHaveCafRefs -- ^ Indicates that the 'Id' is for either:
--
-- 1. A function or static constructor
-- that refers to one or more CAFs, or
--
-- 2. A real live CAF
| NoCafRefs -- ^ A function or static constructor
-- that refers to no CAFs.
deriving (Eq, Ord)
-- | Assumes that the 'Id' has CAF references: definitely safe
vanillaCafInfo :: CafInfo
vanillaCafInfo = MayHaveCafRefs
mayHaveCafRefs :: CafInfo -> Bool
mayHaveCafRefs MayHaveCafRefs = True
mayHaveCafRefs _ = False
instance Outputable CafInfo where
ppr = ppCafInfo
ppCafInfo :: CafInfo -> SDoc
ppCafInfo NoCafRefs = text "NoCafRefs"
ppCafInfo MayHaveCafRefs = empty
{-
************************************************************************
* *
\subsection{Bulk operations on IdInfo}
* *
************************************************************************
-}
-- | This is used to remove information on lambda binders that we have
-- setup as part of a lambda group, assuming they will be applied all at once,
-- but turn out to be part of an unsaturated lambda as in e.g:
--
-- > (\x1. \x2. e) arg1
zapLamInfo :: IdInfo -> Maybe IdInfo
zapLamInfo info@(IdInfo {occInfo = occ, demandInfo = demand})
| is_safe_occ occ && is_safe_dmd demand
= Nothing
| otherwise
= Just (info {occInfo = safe_occ, demandInfo = topDmd})
where
-- The "unsafe" occ info is the ones that say I'm not in a lambda
-- because that might not be true for an unsaturated lambda
is_safe_occ (OneOcc in_lam _ _) = in_lam
is_safe_occ _other = True
safe_occ = case occ of
OneOcc _ once int_cxt -> OneOcc insideLam once int_cxt
_other -> occ
is_safe_dmd dmd = not (isStrictDmd dmd)
-- | Remove all demand info on the 'IdInfo'
zapDemandInfo :: IdInfo -> Maybe IdInfo
zapDemandInfo info = Just (info {demandInfo = topDmd})
-- | Remove usage (but not strictness) info on the 'IdInfo'
zapUsageInfo :: IdInfo -> Maybe IdInfo
zapUsageInfo info = Just (info {demandInfo = zapUsageDemand (demandInfo info)})
zapFragileInfo :: IdInfo -> Maybe IdInfo
-- ^ Zap info that depends on free variables
zapFragileInfo info
= Just (info `setRuleInfo` emptyRuleInfo
`setUnfoldingInfo` noUnfolding
`setOccInfo` zapFragileOcc occ)
where
occ = occInfo info
{-
************************************************************************
* *
\subsection{TickBoxOp}
* *
************************************************************************
-}
type TickBoxId = Int
-- | Tick box for Hpc-style coverage
data TickBoxOp
= TickBox Module {-# UNPACK #-} !TickBoxId
instance Outputable TickBoxOp where
ppr (TickBox mod n) = text "tick" <+> ppr (mod,n)
| oldmanmike/ghc | compiler/basicTypes/IdInfo.hs | bsd-3-clause | 18,548 | 0 | 11 | 5,854 | 2,092 | 1,241 | 851 | 217 | 10 |
module IxEnvMT (HasEnv(..), MT(..), at, Z, S, Top, Under, WithEnv, withEnv, mapEnv) where
import MT
import Control_Monad_Fix
import Control.Monad(liftM,MonadPlus(..))
newtype WithEnv e m a = E { unE :: e -> m a }
withEnv :: e -> WithEnv e m a -> m a
withEnv e (E f) = f e
mapEnv :: Monad m => (e2 -> e1) -> WithEnv e1 m a -> WithEnv e2 m a
mapEnv f (E m) = E (\e -> m (f e))
--------------------------------------------------------------------------------
instance Monad m => Functor (WithEnv e m) where
fmap = liftM
instance Monad m => Monad (WithEnv e m) where
return = lift . return
E m >>= f = E (\e -> do x <- m e; unE (f x) e)
E m >> n = E (\e -> m e >> withEnv e n)
fail = lift . fail
instance MT (WithEnv e) where
lift = E . const
instance MonadPlus m => MonadPlus (WithEnv e m) where
mzero = lift mzero
E a `mplus` E b = E (\e -> a e `mplus` b e)
--------------------------------------------------------------------------------
-- Features --------------------------------------------------------------------
instance Monad m => HasEnv (WithEnv e m) Z e where
getEnv _ = E return
inModEnv _ = mapEnv
instance HasEnv m ix e => HasEnv (WithEnv e' m) (S ix) e where
getEnv (Next ix) = lift (getEnv ix)
inModEnv (Next ix) f m = E (\e -> inModEnv ix f (withEnv e m))
instance HasState m ix s => HasState (WithEnv e m) ix s where
updSt ix = lift . updSt ix
instance HasOutput m ix o => HasOutput (WithEnv e m) ix o where
outputTree ix = lift . outputTree ix
instance HasExcept m x => HasExcept (WithEnv e m) x where
raise = lift . raise
handle h (E m) = E (\e -> handle (withEnv e . h) (m e))
instance HasCont m => HasCont (WithEnv e m) where
callcc f = E (\e -> callcc (\k -> withEnv e $ f (lift . k)))
instance MonadFix m => MonadFix (WithEnv e m) where
mfix f = E (\e -> mfix (withEnv e . f))
instance HasBaseMonad m n => HasBaseMonad (WithEnv e m) n where
inBase = lift . inBase
instance HasRefs m r => HasRefs (WithEnv e m) r where
newRef = lift . newRef
readRef = lift . readRef
writeRef r = lift . writeRef r
| mpickering/HaRe | old/tools/base/lib/Monads/IxEnvMT.hs | bsd-3-clause | 2,210 | 0 | 15 | 592 | 1,011 | 513 | 498 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Functors: uniform action over a parameterized type, generalizing the
-- 'Data.List.map' function on lists.
module Data.Functor
(
Functor(fmap),
(<$),
($>),
(<$>),
(<&>),
void,
) where
import GHC.Base ( Functor(..), flip )
-- $setup
-- Allow the use of Prelude in doctests.
-- >>> import Prelude hiding ((<$>))
infixl 4 <$>
-- | An infix synonym for 'fmap'.
--
-- The name of this operator is an allusion to '$'.
-- Note the similarities between their types:
--
-- > ($) :: (a -> b) -> a -> b
-- > (<$>) :: Functor f => (a -> b) -> f a -> f b
--
-- Whereas '$' is function application, '<$>' is function
-- application lifted over a 'Functor'.
--
-- ==== __Examples__
--
-- Convert from a @'Maybe' 'Int'@ to a @'Maybe' 'String'@ using 'show':
--
-- >>> show <$> Nothing
-- Nothing
-- >>> show <$> Just 3
-- Just "3"
--
-- Convert from an @'Either' 'Int' 'Int'@ to an @'Either' 'Int'@
-- 'String' using 'show':
--
-- >>> show <$> Left 17
-- Left 17
-- >>> show <$> Right 17
-- Right "17"
--
-- Double each element of a list:
--
-- >>> (*2) <$> [1,2,3]
-- [2,4,6]
--
-- Apply 'even' to the second element of a pair:
--
-- >>> even <$> (2,2)
-- (2,True)
--
(<$>) :: Functor f => (a -> b) -> f a -> f b
(<$>) = fmap
infixl 4 $>
-- | Flipped version of '<$>'.
--
-- @
-- ('<&>') = 'flip' 'fmap'
-- @
--
-- @since 4.11.0.0
--
-- ==== __Examples__
-- Apply @(+1)@ to a list, a 'Data.Maybe.Just' and a 'Data.Either.Right':
--
-- >>> Just 2 <&> (+1)
-- Just 3
--
-- >>> [1,2,3] <&> (+1)
-- [2,3,4]
--
-- >>> Right 3 <&> (+1)
-- Right 4
--
(<&>) :: Functor f => f a -> (a -> b) -> f b
as <&> f = f <$> as
infixl 1 <&>
-- | Flipped version of '<$'.
--
-- @since 4.7.0.0
--
-- ==== __Examples__
--
-- Replace the contents of a @'Maybe' 'Int'@ with a constant 'String':
--
-- >>> Nothing $> "foo"
-- Nothing
-- >>> Just 90210 $> "foo"
-- Just "foo"
--
-- Replace the contents of an @'Either' 'Int' 'Int'@ with a constant
-- 'String', resulting in an @'Either' 'Int' 'String'@:
--
-- >>> Left 8675309 $> "foo"
-- Left 8675309
-- >>> Right 8675309 $> "foo"
-- Right "foo"
--
-- Replace each element of a list with a constant 'String':
--
-- >>> [1,2,3] $> "foo"
-- ["foo","foo","foo"]
--
-- Replace the second element of a pair with a constant 'String':
--
-- >>> (1,2) $> "foo"
-- (1,"foo")
--
($>) :: Functor f => f a -> b -> f b
($>) = flip (<$)
-- | @'void' value@ discards or ignores the result of evaluation, such
-- as the return value of an 'System.IO.IO' action.
--
-- ==== __Examples__
--
-- Replace the contents of a @'Maybe' 'Int'@ with unit:
--
-- >>> void Nothing
-- Nothing
-- >>> void (Just 3)
-- Just ()
--
-- Replace the contents of an @'Either' 'Int' 'Int'@ with unit,
-- resulting in an @'Either' 'Int' '()'@:
--
-- >>> void (Left 8675309)
-- Left 8675309
-- >>> void (Right 8675309)
-- Right ()
--
-- Replace every element of a list with unit:
--
-- >>> void [1,2,3]
-- [(),(),()]
--
-- Replace the second element of a pair with unit:
--
-- >>> void (1,2)
-- (1,())
--
-- Discard the result of an 'System.IO.IO' action:
--
-- >>> mapM print [1,2]
-- 1
-- 2
-- [(),()]
-- >>> void $ mapM print [1,2]
-- 1
-- 2
--
void :: Functor f => f a -> f ()
void x = () <$ x
| rahulmutt/ghcvm | libraries/base/Data/Functor.hs | bsd-3-clause | 3,644 | 0 | 9 | 787 | 387 | 283 | 104 | 25 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances, UndecidableInstances, FlexibleContexts #-}
-- UndecidableInstances now needed because the Coverage Condition fails
module ShouldFail where
-- A stripped down functional-dependency
-- example that causes GHC 4.08.1 to crash with:
-- "basicTypes/Var.lhs:194: Non-exhaustive patterns in function readMutTyVar"
-- Reported by Thomas Hallgren Nov 00
-- July 07: I'm changing this from "should fail" to "should succeed"
-- See Note [Important subtlety in oclose] in FunDeps
primDup :: Int -> IO Int
primDup = undefined
dup () = call primDup
-- call :: Call c h => c -> h
--
-- call primDup :: {Call (Int -> IO Int) h} => h with
-- Using the instance decl gives
-- call primDup :: {Call (IO Int) h'} => Int -> h'
-- The functional dependency means that h must be constant
-- Hence program is rejected because it can't find an instance
-- for {Call (IO Int) h'}
class Call c h | c -> h where
call :: c -> h
instance Call c h => Call (Int->c) (Int->h) where
call f = call . f
| ezyang/ghc | testsuite/tests/typecheck/should_fail/tcfail093.hs | bsd-3-clause | 1,102 | 0 | 7 | 237 | 121 | 71 | 50 | 10 | 1 |
newtype MkT2 a = MkT2 [Maybe a] deriving Show
f :: t Int -> t Int
f x = x
f2 :: t Int -> t Int -> (t Int, t Int)
f2 x y = (x,y) | urbanslug/ghc | testsuite/tests/ghci.debugger/scripts/print029.hs | bsd-3-clause | 129 | 0 | 8 | 38 | 89 | 46 | 43 | 5 | 1 |
{-# LANGUAGE LambdaCase #-}
module WykopProfile (
indexProfile
, addedProfile
, publishedProfile
, commentedProfile
, diggedProfile
, buriedProfile
, observeProfile
, unobserveProfile
, blockProfile
, unblockProfile
, followersProfile
, followedProfile
, favoritesProfile
, module WykopTypes
) where
import WykopTypes
import WykopUtils
indexProfile :: Keys -> String -> IO (Maybe Profile)
indexProfile k login = get k [] [] res
where
res = "profile/index/" ++ login
addedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link])
addedProfile k login page = get k [] (mPageToGet page) res
where
res = "profile/added/" ++ login
publishedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link])
publishedProfile k login page = get k [] (mPageToGet page) res
where
res = "profile/published/" ++ login
commentedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link])
commentedProfile k login page = get k [] (mPageToGet page) res
where
res = "profile/commented/" ++ login
diggedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link])
diggedProfile k login page = get k [] (mPageToGet page) res
where
res = "profile/digged/" ++ login
buriedProfile :: Keys -> String -> Userkey -> Maybe Int -> IO (Maybe [Link])
buriedProfile k login userKey page = get k [] getData res
where
getData = (mPageToGet page) ++ (toGet userKey)
res = "profile/buried/" ++ login
observeProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool])
observeProfile k login userKey = get k [] (toGet userKey) res
where
res = "profile/observe/" ++ login
unobserveProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool])
unobserveProfile k login userKey = get k [] (toGet userKey) res
where
res = "profile/unobserve/" ++ login
blockProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool])
blockProfile k login userKey = get k [] (toGet userKey) res
where
res = "profile/block/" ++ login
unblockProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool])
unblockProfile k login userKey = get k [] (toGet userKey) res
where
res = "profile/unblock/" ++ login
followersProfile :: Keys -> String -> Maybe Userkey -> Maybe Int -> IO (Maybe [Profile])
followersProfile k login userKey page = get k [] getData res
where
getData = (mPageToGet page) ++ (mToGet userKey)
res = "profile/followers/" ++ login
followedProfile :: Keys -> String -> Maybe Userkey -> Maybe Int -> IO (Maybe [Profile])
followedProfile k login userKey page = get k [] getData res
where
getData = (mPageToGet page) ++ (mToGet userKey)
res = "profile/followed/" ++ login
favoritesProfile :: Keys -> String -> Maybe Int -> Maybe Int -> IO (Maybe [Link])
favoritesProfile k login id page = get k [] (mPageToGet page) res
where
res = "profile/favorites/" ++ login ++ idToURL
idToURL = case id of
Just x -> "/" ++ show x
_ -> ""
| mikusp/hwykop | WykopProfile.hs | mit | 3,055 | 0 | 12 | 753 | 1,077 | 550 | 527 | 63 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module System.PassengerCheck.Health (queuedRequests, status) where
import System.PassengerCheck.Types
import System.Nagios.Plugin (CheckStatus(..))
import Data.Text (Text)
queuedRequests :: PassengerStatus -> Integer
queuedRequests stat =
requestsInTopLevelQueue stat + sum (requestsInLocalQueues stat)
status :: PassengerStatus -> (CheckStatus, Text)
status stat
| percentFull >= (0.9 :: Double) =
(Critical, "Queue is at or above 90% full")
| percentFull >= (0.5 :: Double) =
(Warning, "Queue is at or above 50% full")
| otherwise = (OK, "Queue is less than 50% full")
where percentFull
= fromIntegral (queuedRequests stat) /
fromIntegral (maxPoolSize stat)
| stackbuilders/passenger-check | src/System/PassengerCheck/Health.hs | mit | 755 | 0 | 10 | 149 | 194 | 108 | 86 | 18 | 1 |
--This script orients sequences according to a PWM. The sequences must
--be in the first column of the input file
--Author: Tristan Bepler ([email protected])
import qualified Data.Map as Map
import System.Environment
import System.Exit
import Debug.Trace
data Orientation = Fwd | Rvs
main = getArgs >>= parse >>= putStr
parse ["-h"] = help >> exit
parse [x,y] = do
input <- readFile x
pwm <- readFile y
return $ assign (input, pwm)
parse xs = usage >> exit
usage = putStrLn "Usage: orientseqs [-h] Seqs_File PWM_File"
help = do
putStrLn "This script takes a file containing sequences in the first column and orients them according to the given PWM.\nThe input file should not have a header.\n"
usage
exit = exitWith ExitSuccess
die = exitWith (ExitFailure 1)
assign (inputFile, pwmFile) = unlines $ filter (not.isEmpty) $ map (process pwm) $ lines inputFile where
pwm::(Map.Map Char [Double])
pwm = readPWM pwmFile
isEmpty [] = True
isEmpty x = False
process pwm row = unwords $ (str orientation):rest where
entries = words row
rest = tail $ entries
orientation = orient pwm $ head $ entries
str Fwd = head $ entries
str Rvs = rvsCompliment $ head $ entries
selectScore pwm [] = []
selectScore pwm row = unwords [str,name,(orientAsString orientation),score] where
entries = words row
str = head entries
name = entries !! 1
score = entries !! (index orientation)
orientation = orient pwm str
index Fwd = 3
index Rvs = 4
orientAsString Fwd = "Fwd"
orientAsString Rvs = "Rvs"
orient pwm str = if score < rvsScore then Rvs else Fwd where
score = scoreCenter pwm str
rvsScore = scoreCenter pwm rvscomp
rvscomp = rvsCompliment str
scoreCenter pwm str = score pwm center where
center = take pwmLen $ drop flank str
flank = (strLen - pwmLen) `div` 2
strLen = length str
pwmLen = pwmLength pwm
maxScore pwm str = maximum $ map (score pwm) substrs where
substrs = map ((take len).flip drop str) [0..((length str)-len)]
len = pwmLength pwm
rvsCompliment str = reverse $ map (comp) str where
comp 'A' = 'T'
comp 'T' = 'A'
comp 'G' = 'C'
comp 'C' = 'G'
comp x = error ("Unknown base: " ++ [x])
pwmLength pwm = minimum $ map (length) $ Map.elems pwm
score pwm str = foldl (add') 0 $ zip [0..] str where
add' s t = (score' t) + s
score' (i,c) = score'' i $ Map.lookup c pwm where
score'' i (Just xs) = xs !! i
score'' i (Nothing) = error ("Character "++[c]++" not scorable by PWM: " ++ (show pwm))
readPWM input = foldl (insert) Map.empty rows where
rows = extractPWMRows $ lines input
insert m x = Map.insert (key row) (values row) m where
row = words x
key xs = head $ head xs
values xs = map (read) $ tail xs
extractPWMRows (x:xs) = if matches row then map (unwords.extract.words) (take 4 (x:xs)) else extractPWMRows xs where
row = words x
matches ("A:":ys) = True
matches ("C:":ys) = True
matches ("G:":ys) = True
matches ("T:":ys) = True
matches ys = False
extract ("A:":ys) = "A" : ys
extract ("C:":ys) = "C" : ys
extract ("G:":ys) = "G" : ys
extract ("T:":ys) = "T" : ys
extract ys = ys
| tbepler/PBM-Analysis | orientseqs.hs | mit | 3,061 | 4 | 14 | 621 | 1,255 | 643 | 612 | 82 | 10 |
module GHCJS.DOM.DeviceProximityEvent (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/DeviceProximityEvent.hs | mit | 50 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module Inputs (getInput) where
import qualified Data.Map as M
import Board (Coords, Board, getBlankCoords, getNeighbors)
getKeyMoveMapping :: Board -> M.Map Char Coords
getKeyMoveMapping board =
case getBlankCoords board of
Just coords -> buildMap coords
Nothing -> M.empty
where
buildMap (x,y) = M.fromList $ map (mapF (x,y)) (getNeighbors (x,y))
mapF (x,y) (x',y')
| x' > x = ('a', (x',y'))
| x' < x = ('d', (x',y'))
| y' > y = ('w', (x',y'))
| y' < y = ('s', (x',y'))
getInput :: Board -> IO (Maybe Coords)
getInput board = do
key <- getChar
keyMap <- return $ getKeyMoveMapping board
return $ M.lookup key keyMap
| lshemesh/Slide | src/Inputs.hs | mit | 674 | 0 | 11 | 161 | 316 | 170 | 146 | 19 | 2 |
module Network.Gazelle.Types.Artist (
Artist(..),
ArtistEntry(..),
ArtistStatistics(..),
Tag(..)
) where
import Network.Gazelle.Types.Gazelle
import Network.Gazelle.Types.Id
import Data.Aeson
import Data.Scientific
import Data.Text (Text)
import Network.API.Builder
data Tag = Tag {
tagName :: Text,
tagCount :: Integer
} deriving Show
instance FromJSON Tag where
parseJSON = withObject "Tag" $ \o -> Tag <$>
o .: "name" <*>
o .: "count"
data ArtistStatistics = ArtistStatistics {
asNumGroups :: Integer,
asNumTorrents :: Integer,
asNumSeeders :: Integer,
asNumLeechers :: Integer,
asNumSnatches :: Integer
} deriving Show
instance FromJSON ArtistStatistics where
parseJSON = withObject "ArtistStatistics" $ \o -> ArtistStatistics <$>
o .: "numGroups" <*>
o .: "numTorrents" <*>
o .: "numSeeders" <*>
o .: "numLeechers" <*>
o .: "numSnatches"
data ArtistEntry = ArtistEntry {
aeId :: Integer,
aeName :: Text,
aeScore :: Integer,
aeSimilarId :: SimilarId
} deriving Show
instance FromJSON ArtistEntry where
parseJSON = withObject "ArtistEntry" $ \o -> ArtistEntry <$>
o .: "artistId" <*>
o .: "name" <*>
o .: "score" <*>
o .: "similarId"
data Artist = Artist {
aId :: ArtistID,
aName :: Text,
aNotificationsEnabled :: Bool,
aHasBookmarked :: Bool,
aImage :: Text,
aBody :: Text,
aVanityHouse :: Bool,
aTags :: [Tag],
aSimilarArtists :: [ArtistEntry],
aStatistics :: ArtistStatistics
} deriving Show
instance FromJSON Artist where
parseJSON = withObject "Artist" $ \o -> Artist <$>
o .: "id" <*>
o .: "name" <*>
o .: "notificationsEnabled" <*>
o .: "hasBookmarked" <*>
o .: "image" <*>
o .: "body" <*>
o .: "vanityHouse" <*>
o .: "tags" <*>
o .: "similarArtists" <*>
o .: "statistics"
instance Receivable Artist where
receive = useResponseFromJSON
| mr/gazelle | src/Network/Gazelle/Types/Artist.hs | mit | 2,047 | 0 | 27 | 557 | 539 | 312 | 227 | 71 | 0 |
module Glucose.Test.IR.Core where
import Control.Comonad
import Data.Text (Text)
import Glucose.Identifier (Identifier (..))
import Glucose.IR
import Glucose.Parser.Source
import Glucose.Test.Source
-- * Without source locations
constantAnywhere :: Text -> Literal -> FromSource (Definition ann)
constantAnywhere name lit = definitionAnywhere name (Literal lit)
constructorAnywhere :: Text -> Text -> Int -> FromSource (Definition ann)
constructorAnywhere ty ctor index = definitionAnywhere ctor $ Constructor (fromSource $ Identifier ty) index
definitionAnywhere :: Text -> Expression ann -> FromSource (Definition ann)
definitionAnywhere name value = definition (fromSource name) (fromSource value)
-- * With source locations
constant :: FromSource Text -> FromSource Literal -> FromSource (Definition ann)
constant name lit = definition name (Literal <$> lit)
constructor :: FromSource Text -> FromSource Text -> Int -> FromSource (Definition ann)
constructor ty ctor index = definition ctor $ ctor $> Constructor (Identifier <$> ty) index
definition :: FromSource Text -> FromSource (Expression ann) -> FromSource (Definition ann)
definition name value = Definition <$> duplicate (Identifier <$> name) <*> duplicate value
reference :: RefKind ann -> FromSource Text -> Type ann -> FromSource (Expression ann)
reference kind name ty = (\n -> Reference kind (Identifier n) ty) <$> name
| sardonicpresence/glucose | test/Glucose/Test/IR/Core.hs | mit | 1,431 | 0 | 10 | 233 | 465 | 234 | 231 | 21 | 1 |
module Main where
import System.Environment
import PropDoc.Core
import PropDoc.Input.JavaProps
-- IO
main = do
args <- getArgs
name <- return (head args)
contents <- readFile name
let defs = props nameValuePair Nothing [] (lines contents)
putStr $ foldr (\a acc -> acc ++ (show a)) "" defs | chrislewis/PropDoc | src/Main.hs | mit | 302 | 0 | 13 | 59 | 119 | 60 | 59 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.IP.Extra where
import Data.IP
-- Return a List of AddrRanges within a given AddrRange
-- that have the given netmask. If the given netmask is
-- greater than the netmask of the original AddrRange,
-- an empty List is returned.
ranges :: AddrRange IPv4 -> Int -> [AddrRange IPv4]
ranges addrRange mask
| m > mask = []
| otherwise = [makeAddrRange (intToIPv4 i) mask | i <- [first,first+step..last]]
where
(r, m) = addrRangePair addrRange
first = iPv4ToInt r
last = first+(2^(32-m))-1
step = 2^(32-mask)
iPv4ToInt :: IPv4 -> Int
iPv4ToInt i =
let (o1:o2:o3:o4:_) = fromIPv4 i
oct n pow = n*((256::Int)^(pow::Int)) in
(oct o1 3) + (oct o2 2) + (oct o3 1) + o4
intToIPv4 :: Int -> IPv4
intToIPv4 i =
let (i', o4) = i `divMod` 256
(i'', o3) = i' `divMod` 256
(o1, o2) = i'' `divMod` 256 in
toIPv4 [o1, o2, o3, o4]
| rjosephwright/awstemplate | src/Data/IP/Extra.hs | mit | 909 | 0 | 13 | 208 | 384 | 208 | 176 | 22 | 1 |
module Main where
import Prelude
import qualified Hasql.Connection as A
import qualified Hasql.Session as B
import qualified Hasql.Transaction as C
import qualified Hasql.Transaction.Sessions as G
import qualified Main.Statements as D
import qualified Main.Transactions as E
import qualified Control.Concurrent.Async as F
main =
bracket acquire release use
where
acquire =
(,) <$> acquire <*> acquire
where
acquire =
join $
fmap (either (fail . show) return) $
A.acquire connectionSettings
where
connectionSettings =
A.settings "localhost" 5432 "postgres" "" "postgres"
release (connection1, connection2) =
do
transaction connection1 E.dropSchema
A.release connection1
A.release connection2
use (connection1, connection2) =
do
try (transaction connection1 E.dropSchema) :: IO (Either SomeException ())
transaction connection1 E.createSchema
success <- fmap and (traverse runTest tests)
if success
then exitSuccess
else exitFailure
where
runTest test =
test connection1 connection2
tests =
[readAndWriteTransactionsTest, transactionsTest, transactionAndQueryTest]
session connection session =
B.run session connection >>=
either (fail . show) return
transaction connection transaction =
session connection (G.transaction G.RepeatableRead G.Write transaction)
type Test =
A.Connection -> A.Connection -> IO Bool
transactionsTest :: Test
transactionsTest connection1 connection2 =
do
id1 <- session connection1 (B.statement 0 D.createAccount)
id2 <- session connection1 (B.statement 0 D.createAccount)
async1 <- F.async (replicateM_ 1000 (transaction connection1 (E.transfer id1 id2 1)))
async2 <- F.async (replicateM_ 1000 (transaction connection2 (E.transfer id1 id2 1)))
F.wait async1
F.wait async2
balance1 <- session connection1 (B.statement id1 D.getBalance)
balance2 <- session connection1 (B.statement id2 D.getBalance)
traceShowM balance1
traceShowM balance2
return (balance1 == Just 2000 && balance2 == Just (-2000))
readAndWriteTransactionsTest :: Test
readAndWriteTransactionsTest connection1 connection2 =
do
id1 <- session connection1 (B.statement 0 D.createAccount)
id2 <- session connection1 (B.statement 0 D.createAccount)
async1 <- F.async (replicateM_ 1000 (transaction connection1 (E.transfer id1 id2 1)))
async2 <- F.async (replicateM_ 1000 (transaction connection2 (C.statement id1 D.getBalance)))
F.wait async1
F.wait async2
balance1 <- session connection1 (B.statement id1 D.getBalance)
balance2 <- session connection1 (B.statement id2 D.getBalance)
traceShowM balance1
traceShowM balance2
return (balance1 == Just 1000 && balance2 == Just (-1000))
transactionAndQueryTest :: Test
transactionAndQueryTest connection1 connection2 =
do
id1 <- session connection1 (B.statement 0 D.createAccount)
id2 <- session connection1 (B.statement 0 D.createAccount)
async1 <- F.async (transaction connection1 (E.transferTimes 200 id1 id2 1))
async2 <- F.async (session connection2 (replicateM_ 200 (B.statement (id1, 1) D.modifyBalance)))
F.wait async1
F.wait async2
balance1 <- session connection1 (B.statement id1 D.getBalance)
balance2 <- session connection1 (B.statement id2 D.getBalance)
traceShowM balance1
traceShowM balance2
return (balance1 == Just 400 && balance2 == Just (-200))
| nikita-volkov/hasql-transaction | conflicts-test/Main.hs | mit | 3,586 | 0 | 15 | 778 | 1,147 | 558 | 589 | 85 | 2 |
module HsSearch.SearchResultTest
( getBinaryFileSearchResultTests
, getMultiLineSearchResultTests
, getSingleLineSearchResultTests
) where
import qualified Data.ByteString.Char8 as BC
import HsSearch.Config
import HsSearch.SearchResult
import HsSearch.SearchSettings
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Test)
testFileLineNum = 10
testFileMatchStartIndex = 15
testFileMatchEndIndex = 23
testFileLine = BC.pack "\tpublic class Searcher\n"
getBinaryFileSearchResultTests :: IO [Test]
getBinaryFileSearchResultTests = do
xsearchPath <- getXsearchPath
let binaryFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.exe"
let binaryFileSearchResult = blankSearchResult { filePath=binaryFilePath
, lineNum=0
, matchStartIndex=0
, matchEndIndex=0
}
let settings = defaultSearchSettings
let formattedResult = formatSearchResult settings binaryFileSearchResult
let expectedFormat = binaryFilePath ++ " matches at [0:0]"
return [testCase "binaryFileSearchResult" (formattedResult @?= expectedFormat)]
getSingleLineSearchResultTests :: IO [Test]
getSingleLineSearchResultTests = do
xsearchPath <- getXsearchPath
let testFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.cs"
let singleLineSearchResult = blankSearchResult { filePath=testFilePath
, lineNum=testFileLineNum
, matchStartIndex=testFileMatchStartIndex
, matchEndIndex=testFileMatchEndIndex
, line=testFileLine
}
let settings = defaultSearchSettings { colorize=False }
let formattedResult = formatSearchResult settings singleLineSearchResult
let expectedFormat = testFilePath ++ ": " ++ show testFileLineNum ++ ": [" ++
show testFileMatchStartIndex ++ ":" ++
show testFileMatchEndIndex ++ "]: " ++
trimLeadingWhitespace (BC.unpack testFileLine)
return [testCase "singleLineSearchResult" (formattedResult @?= expectedFormat)]
getMultiLineSearchResultTests :: IO [Test]
getMultiLineSearchResultTests = do
xsearchPath <- getXsearchPath
let testFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.cs"
let lb = [ BC.pack "namespace CsSearch\n"
, BC.pack "{\n" ]
let la = [ BC.pack "\t{\n"
, BC.pack "\t\tprivate readonly FileTypes _fileTypes;\n" ]
let multiLineSearchResult = blankSearchResult { filePath=testFilePath
, lineNum=testFileLineNum
, matchStartIndex=testFileMatchStartIndex
, matchEndIndex=testFileMatchEndIndex
, line=testFileLine
, beforeLines=lb
, afterLines=la
}
let settings = defaultSearchSettings { colorize=False, linesBefore=2, linesAfter=2 }
let formattedResult = formatSearchResult settings multiLineSearchResult
let expectedFormat = replicate 80 '=' ++ "\n" ++ testFilePath ++ ": " ++
show testFileLineNum ++ ": [" ++
show testFileMatchStartIndex ++ ":" ++
show testFileMatchEndIndex ++ "]\n" ++
replicate 80 '-' ++ "\n" ++
" 8 | namespace CsSearch\n" ++
" 9 | {\n" ++
"> 10 | \tpublic class Searcher\n" ++
" 11 | \t{\n" ++
" 12 | \t\tprivate readonly FileTypes _fileTypes;\n"
return [testCase "multiLineSearchResult" (formattedResult @?= expectedFormat)]
| clarkcb/xsearch | haskell/hssearch/test/HsSearch/SearchResultTest.hs | mit | 4,148 | 0 | 26 | 1,467 | 702 | 368 | 334 | 71 | 1 |
module Shipper (
startShipper,
Event,
Input(..),
Output(..),
) where
import Shipper.Inputs
import Shipper.Outputs
import Shipper.Types
import Shipper.Event (maxPacketSize)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TBQueue
import Control.Concurrent
import Control.Monad
import System.ZMQ4 (curveKeyPair)
-- How long inputs must sleep when there is no more input to read
waitTime :: Int
waitTime = 1000000 -- 1s
queueSize :: Int
queueSize = maxPacketSize
startShipper :: [ConfigSegment] -> IO ()
startShipper segments = do
-- Events read from inputs come through this 'channel'
in_ch <- atomically $ newTBQueue queueSize
when (null inputSegments) $ error "No inputs specified"
when (null outputSegments) $ error "No outputs specified"
-- Do something useful for each input segment, we hand all inputs the same
-- channel to stream events over
forM_ inputSegments $ \(InputSegment i) -> case i of
FileInput _ _ -> forkIO $ startFileInput in_ch i waitTime
ZMQ4Input _ _ -> forkIO $ startZMQ4Input in_ch i waitTime
k <- curveKeyPair
-- Output segments however, each get thier own channel. This is so that
-- inputs all block when any given output blocks. That way we don't leak
-- any memory and outputs don't get out of sync when a single output dies.
out_chs <- forM outputSegments $ \(OutputSegment o) -> do
out_chan <- atomically $ newTBQueue queueSize
case o of
Debug -> forkIO $ startDebugOutput out_chan waitTime
ZMQ4Output _ _ _ -> forkIO $ startZMQ4Output out_chan waitTime o k
Redis _ _ _ _ -> forkIO $ startRedisOutput out_chan waitTime o
return out_chan
forever $ do
-- For every event that comes in, try to send it to every output
-- channel. This way, if an output gets clogged we can block all the
-- way back to every input magically, and no output should get more
-- than one event more than another.
event <- atomically $ readTBQueue in_ch
forM_ out_chs $ \ch -> atomically $ writeTBQueue ch event
where
isInputSegment (InputSegment _) = True
isInputSegment _ = False
isOutputSegment (OutputSegment _) = True
isOutputSegment _ = False
inputSegments = filter isInputSegment segments
outputSegments = filter isOutputSegment segments
| christian-marie/pill-bug | Shipper.hs | mit | 2,463 | 0 | 16 | 628 | 505 | 259 | 246 | 43 | 6 |
module FactorTable (computeForList, toList) where
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.Set as S
import Types
toList :: FactorTable -> [(Int, [Int])]
toList = M.toList
computeForList :: [Int] -> FactorTable
computeForList = L.foldl' insert empty . L.sort . unique
insert :: FactorTable -> Int -> FactorTable
insert _ 0 = error "0 is not a valid divisor"
insert table n = M.insert n divisors table
where divisors = filter (`divides` n) candidates
-- Note: Because the numbers we insert are coming in
-- already sorted, we only have to check prior keys.
candidates = M.keys table
empty :: FactorTable
empty = M.empty
divides :: Int -> Int -> Bool
divides x y = y `mod` x == 0
unique :: Ord a => [a] -> [a]
unique = S.toList . S.fromList
----------
-- Note: These functions aren't used. They're simply illustrations
-- of how to invert the factoring procedure so that divisors point to
-- divisees rather than the other way around. As you can see, they're
-- very simple modifications of `computeForList` and `insert` above.
computeForList' = L.foldl' insert' empty . reverse . L.sort . unique
insert' table n = M.insert n divisees table
where divisees = filter (n `divides`) candidates
candidates = M.keys table
| mg50avant/factorizer | src/FactorTable.hs | mit | 1,305 | 0 | 9 | 260 | 334 | 190 | 144 | 24 | 1 |
module ChatCore.Util.Error where
import Control.Exception
import Control.Monad
-- | Runs the given IO action and wraps the return value in Nothing if an
-- exception is raised.
tryMaybe :: IO a -> IO (Maybe a)
tryMaybe f = catch (Just `liftM` f) (return . catchNothing)
where
catchNothing :: SomeException -> Maybe a
catchNothing _ = Nothing
| Forkk/ChatCore | ChatCore/Util/Error.hs | mit | 355 | 0 | 8 | 69 | 91 | 50 | 41 | 7 | 1 |
module Exercise where
data OA = Add | Mul
data OB = EQu | GTh | LTh
data ExprA = Const Int
| Var Char
| OpA OA ExprA ExprA
| If ExprB ExprA ExprA
data ExprB = OpB OB ExprA ExprA
evalA :: ExprA -> Int
evalA (Const a) = a
evalA (OpA Add expr1 expr2) = (evalA expr1) + (evalA expr2)
evalA (OpA Mul expr1 expr2) = (evalA expr1) * (evalA expr2)
evalA (If exprB expr1 expr2) | (evalB exprB) = evalA expr1
| otherwise = evalA expr2
evalB :: ExprB -> Bool
evalB (OpB EQu expr1 expr2) = evalA expr1 == evalA expr2
evalB (OpB GTh expr1 expr2) = evalA expr1 > evalA expr2
evalB (OpB LTh expr1 expr2) = evalA expr1 < evalA expr2
| tcoenraad/functioneel-programmeren | 2012/opg2a.hs | mit | 683 | 0 | 9 | 194 | 301 | 152 | 149 | 18 | 1 |
{- hpodder component
Copyright (C) 2006-2007 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Commands
Copyright : Copyright (C) 2006 John Goerzen
License : GNU GPL, version 2 or above
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
Written by John Goerzen, jgoerzen\@complete.org
-}
module Commands where
import Data.List
import Text.Printf
import Utils
import Types
import Config
import Data.ConfigFile
import Data.Either.Utils
import qualified Commands.Add
import qualified Commands.Ls
import qualified Commands.Update
import qualified Commands.Download
import qualified Commands.Setup
import qualified Commands.Catchup
import qualified Commands.ImportIpodder
import qualified Commands.Rm
import qualified Commands.SetStatus
import qualified Commands.SetTitle
import qualified Commands.EnableDisable
--allCommands :: [(String, Command)]
allCommands =
[Commands.Add.cmd,
Commands.Catchup.cmd,
Commands.EnableDisable.cmd_disable,
Commands.Download.cmd,
Commands.EnableDisable.cmd_enable,
fetch,
Commands.ImportIpodder.cmd,
Commands.Ls.lscasts,
lscommands,
Commands.Ls.lsepisodes,
Commands.Ls.lseps,
Commands.Rm.cmd,
Commands.SetStatus.cmd,
Commands.SetTitle.cmd,
Commands.Update.cmd]
lscommands =
simpleCmd "lscommands" "Display a list of all available commands" ""
[] lscommands_worker
lscommands_worker _ _ =
do putStrLn "All available commands:"
printf "%-20s %s\n" "Name" "Description"
putStrLn "-------------------- -------------------------------------------------------"
mapM_ (\(_, x) -> printf "%-20s %s\n" (cmdname x) (cmddescrip x))
allCommands
fetch =
simpleCmd "fetch" "Scan feeds, then download new episodes" fetch_help
[] fetch_worker
fetch_worker gi ([], casts) =
do cp <- loadCP
let showintro = forceEither $ get cp "general" "showintro"
if showintro
then Commands.Setup.cmd_worker gi ([], [])
else do Commands.Update.cmd_worker gi ([], casts)
Commands.Download.cmd_worker gi ([], casts)
fetch_worker _ _ =
fail $ "Invalid arguments to fetch; please see hpodder fetch --help"
fetch_help = "Usage: hpodder fetch [castid [castid...]]\n\n" ++
genericIdHelp ++
"\nThe fetch command will cause hpodder to scan all feeds (as with\n\
\\"hpodder update\") and then download all new episodes (as with\n\
\\"hpodder download\").\n"
| jgoerzen/hpodder | Commands.hs | gpl-2.0 | 3,278 | 0 | 12 | 684 | 440 | 252 | 188 | 59 | 2 |
module Main where
import System.FilePath
import System.Directory
import Rm
import System.Environment (getArgs)
main :: IO ()
main = do
args <- getArgs
cur_path <- getCurrentDirectory
home <- getHomeDirectory
mapM_ (rmObject home cur_path) args
rmObject :: FilePath -> FilePath -> FilePath -> IO Bool
rmObject home cur_path obj = do
let (path, name) = splitFileName obj
(pth, nm) = if name == "" then
(joinPath $ init $ splitPath path, last (splitPath path))
else
(path, name)
putStrLn $ "Path: " ++ pth ++ ", Name: " ++ nm
runToIO (handleObject nm) (Rms {tempdir=joinPath [home, ".rm.hs-temp"], base=joinPath [cur_path, pth],
rel_path=""})
| sebiva/rm.hs | src/Main.hs | gpl-2.0 | 762 | 0 | 14 | 220 | 260 | 137 | 123 | 20 | 2 |
solve :: [String] -> String
solve xs = map head xs
main :: IO ()
main = do
c <- getLine
putStrLn $ solve (words c)
| m00nlight/hackerrank | functional/contests/Lambda-Calculi-10/A.hs | gpl-2.0 | 120 | 0 | 10 | 31 | 64 | 31 | 33 | 6 | 1 |
-- Aufgabe 12.5
-- a)
data Term = Lit Int | Add Term Term | Mult Term Term deriving Show
-- b)
-- This is the solution given in the text book all credit to the author
eval' :: Term -> Term
eval' t = Lit (eval'' t)
eval'' :: Term -> Int
eval'' (Add t1 t2) = (eval'' t1) + (eval'' t2)
eval'' (Mult t1 t2) = (eval'' t1) * (eval'' t2)
eval'' (Lit a) = a
-- c)
transform :: [Term] -> [Term]
transform [] = []
transform (x:xs) = if (eval'' x) < 0
then (transform xs)
else ((eval' x) : (transform xs))
| KaliszAd/programmierung2012 | aufgabe12_5.hs | gpl-2.0 | 521 | 20 | 9 | 132 | 250 | 133 | 117 | 12 | 2 |
module Luhn (addends, checkDigit, checksum, create, isValid) where
import Data.Digits
checkDigit :: Integer -> Integer
checkDigit = last . digits 10
addends :: Integer -> [Integer]
addends =
reverse . dblEveryOther . digitsRev 10
where
dblEveryOther = everyOther' (clean . (*2))
clean n = if n < 10 then n else n - 9
checksum :: Integer -> Integer
checksum n = sum (addends n) `mod` 10
isValid :: Integer -> Bool
isValid = (==0) . checksum
-- this is sort of cheeky, try all the digits and grab the first one that is
-- valid. Worst case performance only requires 10 appends, digits and undigits
-- so its fine to do it this way.
create :: Integer -> Integer
create n =
head [ x | d <- [0..9]
, let x = unDigits 10 (digits 10 n ++ [d])
, isValid x
]
-- Just because it is nicer to do it this way
everyOther, everyOther' :: (a -> a) -> [a] -> [a]
everyOther f = zipWith ($) (cycle [f,id])
everyOther' f = zipWith ($) (cycle [id,f])
| ciderpunx/exercismo | src/Luhn.hs | gpl-3.0 | 999 | 0 | 14 | 248 | 333 | 186 | 147 | 21 | 2 |
-- Print out the nth prime, where n is the 1st argument
module Main where
import NaiveSieveLimit (primesToNth)
import System (getArgs)
printNthPrime :: Int -> IO ()
printNthPrime n = print (n, last(primesToNth n))
main = do
args <- getArgs
printNthPrime $ read $ head args
| dkensinger/haskell | haskell-primes/NaiveSieveLimitTest.hs | gpl-3.0 | 286 | 0 | 9 | 59 | 87 | 46 | 41 | 8 | 1 |
{-# LANGUAGE TypeApplications #-}
layoutPatternBindFinal alignmentToken binderDoc mPatDoc clauseDocs = do
docAlt
$ -- one-line solution
[ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[ appSep $ return binderDoc
, docForceSingleline $ return body
, wherePart
]
]
| not hasComments
, [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
, wherePart <- case mWhereDocs of
Nothing -> return @[] $ docEmpty
Just [w] -> return @[] $ docSeq
[ docSeparator
, appSep $ docLit $ Text.pack "where"
, docSetIndentLevel $ docForceSingleline $ return w
]
_ -> []
]
++ -- one-line solution + where in next line(s)
[ docLines
$ [ docCols
(ColBindingLine alignmentToken)
[ docSeq (patPartInline ++ [guardPart])
, docSeq
[appSep $ return binderDoc, docForceParSpacing $ return body]
]
]
++ wherePartMultiLine
| [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
, Data.Maybe.isJust mWhereDocs
]
++ -- two-line solution + where in next line(s)
[ docLines
$ [ docForceSingleline
$ docSeq (patPartInline ++ [guardPart, return binderDoc])
, docEnsureIndent BrIndentRegular $ docForceSingleline $ return
body
]
++ wherePartMultiLine
| [(guards, body, _bodyRaw)] <- [clauseDocs]
, let guardPart = singleLineGuardsDoc guards
]
| lspitzner/brittany | data/Test308.hs | agpl-3.0 | 1,790 | 0 | 21 | 664 | 435 | 227 | 208 | 41 | 3 |
module Sieve where
findPrime :: [Int] -> [Int]
findPrime [] = []
findPrime (x : xs) = x : (findPrime $ filter (\a -> a `mod` x /= 0) xs)
primes :: Int -> [Int]
primes n = findPrime [2 .. n]
| ice1000/OI-codes | codewars/101-200/sieve-of-eratosthenes.hs | agpl-3.0 | 198 | 0 | 12 | 50 | 109 | 61 | 48 | 6 | 1 |
{-
Small tool for converting old FP (FixedPrecision) files to a binary IEEE
format float files. Old file is retained, with an -old prefix
Usage:
./FP2floats [all|w|rr|rbs] infile
w - reads FP-formatted infile, writes float-formatted outfile
all - specify directory as infile, converts all .dat files to float format
rr - prints contents of a repa (float-formatted) file to stdout
rbs - prints contents of a bytestring (FP-formatted) file to stdout
-}
import Control.Monad (zipWithM, zipWithM_)
import Data.Array.Repa hiding ( (++), map )
import Data.Array.Repa.IO.Binary (readArrayFromStorableFile)
import Data.Array.Repa.Repr.ForeignPtr
import Data.Binary.IEEE754 (putFloat32le)
import Data.Binary.Put (runPut)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as S
import System.Environment (getArgs)
import System.IO (openBinaryFile, IOMode(ReadMode), hFileSize, hClose)
import System.Posix.Files (rename)
import System.Process (readProcess)
import Text.Regex.Posix
import FixedPrecisionMath
toIEEE :: Float -> S.ByteString
toIEEE a = runPut $ putFloat32le a
-- Gets the list of *.dat files in a directory
files :: String -> IO [String]
files dir = do
ls <- readProcess ("ls") [dir] []
let files = lines ls
pat = "(\\.dat)"
return $ map ((dir++"/")++) $ filter (\x -> x =~ pat :: Bool) files
-- Renames all files in the list by adding a suffix to their names
-- and returns the list of new names
renameAll :: [String] -> String -> IO [String]
renameAll files suffix = do
let suffixes = map (++suffix) files
zipWithM_ rename files suffixes
return suffixes
fileSize :: FilePath -> IO Int
fileSize path = do
h <- openBinaryFile path ReadMode
sz <- hFileSize h
hClose h
return $ fromIntegral sz
write :: String -> String -> IO ()
write input output = do
h <- openBinaryFile input ReadMode
b <- BS.hGetContents h
let strings = map toIEEE $ bytesToFloats b
S.writeFile output $ S.concat strings
writeOne :: String -> IO ()
writeOne input = do
let oldName = input ++ ".dat"
rename input oldName
write oldName input
writeAll :: String -> IO ()
writeAll dir = do
newNames <- files dir
oldNames <- renameAll newNames "-old"
zipWithM_ write oldNames newNames
readRepa :: String -> IO ()
readRepa input = do
sz <- fileSize input
vs <- readArrayFromStorableFile
input
(Z :. ((fromIntegral sz) `div` 4)) :: IO (Array F DIM1 Float)
let floats = toList vs
mapM_ (putStrLn . show) floats
readBS :: String -> IO ()
readBS input = do
h <- openBinaryFile input ReadMode
b <- BS.hGetContents h
let floats = bytesToFloats b
mapM_ (putStrLn . show) floats
main = do
[mode, infile] <- getArgs
case mode of
"w" -> writeOne infile
"all" -> writeAll infile
"rr" -> readRepa infile
"rbs" -> readBS infile
_ -> putStrLn "wrong arguments"
| kvelicka/Fizz | FP2floats.hs | lgpl-2.1 | 2,919 | 0 | 14 | 615 | 860 | 436 | 424 | 71 | 5 |
module Main where
import E
main :: IO ()
main = lq
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/existentials/2017-03-arnaud-bailly-understanding-existentials/app/Main.hs | unlicense | 54 | 0 | 6 | 15 | 22 | 13 | 9 | 4 | 1 |
{-# OPTIONS_GHC -Wall -}
{-
- Module about terms. Used after parsing eMOD
-
-
- Copyright 2013 -- name removed for blind review, all rights reserved! Please push a git request to receive author's name! --
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Terms(Term(..),(.+.)) where
import qualified Data.Set as Set
data Term = T_AND Term Term | T_OR Term Term | T_NOT Term | T_XOR Term Term | T_ITE Term Term Term
| T_QTRDY String | T_QIRDY String | T_QDATA String Int
| T_OTRDY String | T_IIRDY String | T_IDATA String Int
| T_FLOPV String | T_RESET
| T_UNKNOWN Int
| T_INPUT String -- same as unknown, only named
| T_VALUE Bool
deriving (Eq,Ord,Show)
type TermSet = Set.Set Term
(.+.) :: TermSet -> TermSet -> TermSet
(.+.) = Set.union | DatePaper616/code | Terms.hs | apache-2.0 | 1,324 | 32 | 8 | 299 | 273 | 166 | 107 | -1 | -1 |
{-# LANGUAGE LambdaCase, Rank2Types #-}
module Drasil.ExtractDocDesc (getDocDesc, egetDocDesc, ciGetDocDesc, sentencePlate) where
import Control.Lens((^.))
import Drasil.DocumentLanguage.Core
import Drasil.Sections.SpecificSystemDescription (inDataConstTbl, outDataConstTbl)
import Language.Drasil hiding (Manual, Vector, Verb)
import Theory.Drasil (Theory(..))
import Data.List(transpose)
import Data.Functor.Constant (Constant(Constant))
import Data.Generics.Multiplate (appendPlate, foldFor, purePlate, preorderFold)
secConPlate :: Monoid b => (forall a. HasContents a => [a] -> b) ->
([Section] -> b) -> DLPlate (Constant b)
secConPlate mCon mSec = preorderFold $ purePlate {
refSec = Constant <$> \(RefProg c _) -> mCon [c],
introSub = Constant <$> \case
(IOrgSec _ _ s _) -> mSec [s]
_ -> mempty,
--gsdSec = Constant <$> \case
-- (GSDProg _) -> mempty,
gsdSub = Constant <$> \case
(SysCntxt c) -> mCon c
(UsrChars c) -> mCon c
(SystCons c s) -> mCon c `mappend` mSec s,
pdSec = Constant <$> \(PDProg _ s _) -> mSec s,
pdSub = Constant <$> \case
(TermsAndDefs _ _) -> mempty
(PhySysDesc _ _ lc c) -> mCon [lc] `mappend` mCon c
(Goals _ _) -> mempty,
scsSub = Constant <$> \case
(Constraints _ c) -> mCon [inDataConstTbl c]
(CorrSolnPpties c cs) -> mCon [outDataConstTbl c] `mappend` mCon cs
_ -> mempty,
reqSub = Constant <$> \case
(FReqsSub' _ c) -> mCon c
(FReqsSub _ c) -> mCon c
(NonFReqsSub _) -> mempty,
offShelfSec = Constant <$> \(OffShelfSolnsProg c) -> mCon c,
appendSec = Constant <$> \(AppndxProg c) -> mCon c
}
exprPlate :: DLPlate (Constant [Expr])
exprPlate = sentencePlate (concatMap sentToExp) `appendPlate` secConPlate (concatMap egetCon')
(concatMap egetSec) `appendPlate` (preorderFold $ purePlate {
scsSub = Constant <$> \case
(TMs _ _ t) -> let r = concatMap (\x -> x ^. invariants ++
defExp (x ^. defined_quant ++ x ^. defined_fun) ++
r (x ^. valid_context)) in r t
(DDs _ _ d _) -> map sy d ++ defExp d
(GDs _ _ g _) -> expRel g
(IMs _ _ i _) -> expRel i
_ -> [],
auxConsSec = Constant <$> \(AuxConsProg _ qdef) -> defExp qdef
})where
defExp :: DefiningExpr a => [a] -> [Expr]
defExp = map (^. defnExpr)
expRel :: ExprRelat a => [a] -> [Expr]
expRel = map (^. relat)
sentToExp :: Sentence -> [Expr]
sentToExp ((:+:) s1 s2) = sentToExp s1 ++ sentToExp s2
sentToExp (E e) = [e]
sentToExp _ = []
fmGetDocDesc :: DLPlate (Constant [a]) -> DocDesc -> [a]
fmGetDocDesc p = concatMap (foldFor docSec p)
egetDocDesc :: DocDesc -> [Expr]
egetDocDesc = fmGetDocDesc exprPlate
egetSec :: Section -> [Expr]
egetSec (Section _ sc _ ) = concatMap egetSecCon sc
egetSecCon :: SecCons -> [Expr]
egetSecCon (Sub s) = egetSec s
egetSecCon (Con c) = egetCon' c
egetCon' :: HasContents a => a -> [Expr]
egetCon' = egetCon . (^. accessContents)
egetCon :: RawContent -> [Expr]
egetCon (EqnBlock e) = [e]
egetCon (Defini _ []) = []
egetCon (Defini dt (hd:tl)) = concatMap egetCon' (snd hd) ++ egetCon (Defini dt tl)
egetCon _ = []
sentencePlate :: Monoid a => ([Sentence] -> a) -> DLPlate (Constant a)
sentencePlate f = appendPlate (secConPlate (f . concatMap getCon') $ f . concatMap getSec) $
preorderFold $ purePlate {
introSec = Constant . f <$> \(IntroProg s1 s2 _) -> [s1, s2],
introSub = Constant . f <$> \case
(IPurpose s) -> [s]
(IScope s) -> [s]
(IChar s1 s2 s3) -> concat [s1, s2, s3]
(IOrgSec s1 _ _ s2) -> [s1, s2],
stkSub = Constant . f <$> \case
(Client _ s) -> [s]
(Cstmr _) -> [],
pdSec = Constant . f <$> \(PDProg s _ _) -> [s],
pdSub = Constant . f <$> \case
(TermsAndDefs Nothing cs) -> def cs
(TermsAndDefs (Just s) cs) -> s : def cs
(PhySysDesc _ s _ _) -> s
(Goals s c) -> s ++ def c,
scsSub = Constant . f <$> \case
(Assumptions c) -> def c
(TMs s _ t) -> let r = mappend s . concatMap (\x -> def (x ^. operations) ++
def (x ^. defined_quant) ++ notes [x] ++
r (x ^. valid_context)) in r t
(DDs s _ d _) -> s ++ der d ++ notes d
(GDs s _ d _) -> def d ++ s ++ der d ++ notes d
(IMs s _ d _) -> s ++ der d ++ notes d
(Constraints s _) -> [s]
(CorrSolnPpties _ _) -> [],
reqSub = Constant . f <$> \case
(FReqsSub' c _) -> def c
(FReqsSub c _) -> def c
(NonFReqsSub c) -> def c,
lcsSec = Constant . f <$> \(LCsProg c) -> def c,
ucsSec = Constant . f <$> \(UCsProg c) -> def c,
traceSec = Constant . f <$> \(TraceabilityProg progs) ->
concatMap (\(TraceConfig _ ls s _ _) -> s : ls) progs,
auxConsSec = Constant . f <$> \(AuxConsProg _ qdef) -> def qdef
} where
def :: Definition a => [a] -> [Sentence]
def = map (^. defn)
der :: HasDerivation a => [a] -> [Sentence]
der = concatMap (getDerivSent . (^. derivations))
getDerivSent :: Maybe Derivation -> [Sentence]
getDerivSent Nothing = []
getDerivSent (Just (Derivation h s)) = h : s
notes :: HasAdditionalNotes a => [a] -> [Sentence]
notes = concatMap (^. getNotes)
getDocDesc :: DocDesc -> [Sentence]
getDocDesc = fmGetDocDesc (sentencePlate id)
getSec :: Section -> [Sentence]
getSec (Section t sc _ ) = t : concatMap getSecCon sc
getSecCon :: SecCons -> [Sentence]
getSecCon (Sub s) = getSec s
getSecCon (Con c) = getCon' c
getCon' :: HasContents a => a -> [Sentence]
getCon' = getCon . (^. accessContents)
getCon :: RawContent -> [Sentence]
getCon (Table s1 s2 t _) = isVar (s1, transpose s2) ++ [t]
getCon (Paragraph s) = [s]
getCon EqnBlock{} = []
getCon (DerivBlock h d) = h : concatMap getCon d
getCon (Enumeration lst) = getLT lst
getCon (Figure l _ _) = [l]
getCon (Bib bref) = getBib bref
getCon (Graph [(s1, s2)] _ _ l) = [s1, s2, l]
getCon Graph{} = []
getCon (Defini _ []) = []
getCon (Defini dt (hd:fs)) = concatMap getCon' (snd hd) ++ getCon (Defini dt fs)
-- This function is used in collecting sentence from table.
-- Since only the table's first Column titled "Var" should be collected,
-- this function is used to filter out only the first Column of Sentence.
isVar :: ([Sentence], [[Sentence]]) -> [Sentence]
isVar (S "Var" : _, hd1 : _) = hd1
isVar (_ : tl, _ : tl1) = isVar (tl, tl1)
isVar ([], _) = []
isVar (_, []) = []
getBib :: (HasFields c) => [c] -> [Sentence]
getBib a = map getField $ concatMap (^. getFields) a
getField :: CiteField -> Sentence
getField (Address s) = S s
getField Author{} = EmptyS
getField (BookTitle s) = S s
getField Chapter{} = EmptyS
getField Edition{} = EmptyS
getField Editor{} = EmptyS
getField HowPublished{} = EmptyS
getField (Institution s) = S s
getField (Journal s) = S s
getField Month{} = EmptyS
getField (Note s) = S s
getField Number{} = EmptyS
getField (Organization s) = S s
getField Pages{} = EmptyS
getField (Publisher s) = S s
getField (School s) = S s
getField (Series s) = S s
getField (Title s) = S s
getField (Type s) = S s
getField Volume{} = EmptyS
getField Year{} = EmptyS
getLT :: ListType -> [Sentence]
getLT (Bullet it) = concatMap (getIL . fst) it
getLT (Numeric it) = concatMap (getIL . fst) it
getLT (Simple lp) = concatMap getLP lp
getLT (Desc lp) = concatMap getLP lp
getLT (Definitions lp) = concatMap getLP lp
getLP :: ListTuple -> [Sentence]
getLP (t, it, _) = t : getIL it
getIL :: ItemType -> [Sentence]
getIL (Flat s) = [s]
getIL (Nested h lt) = h : getLT lt
ciPlate :: DLPlate (Constant [CI])
ciPlate = preorderFold $ purePlate {
introSub = Constant <$> \case
(IOrgSec _ ci _ _) -> [ci]
_ -> [],
stkSub = Constant <$> \case
(Client ci _) -> [ci]
(Cstmr ci) -> [ci],
auxConsSec = Constant <$> \(AuxConsProg ci _) -> [ci]
}
ciGetDocDesc :: DocDesc -> [CI]
ciGetDocDesc = fmGetDocDesc ciPlate
| JacquesCarette/literate-scientific-software | code/drasil-docLang/Drasil/ExtractDocDesc.hs | bsd-2-clause | 7,905 | 0 | 27 | 1,895 | 3,626 | 1,895 | 1,731 | 190 | 17 |
module Permissions.Onping.InternalSpec (main, spec) where
import Test.Hspec
import Permissions.Onping.Internal
import Database.Persist
import Persist.Mongo.Settings
import Data.Aeson
import Control.Applicative
import qualified Data.Yaml as Y
import Data.Traversable
import Data.Maybe
main :: IO ()
main = do
hspec $ spec
spec :: Spec
spec = do
describe "getSuperUserList" $ do
it "should pull a super user list from the supplied runDB with config.yml" $ do
emdbc <- Y.decodeFileEither "config.yml"
case emdbc of
Left _ -> do
print "error decoding config.yml"
False `shouldBe` True
Right mdbc -> do
meusr <- runDBConf mdbc $ selectFirst [] [Asc UserId]
let muid = entityKey <$> meusr
rslt <- traverse getSuperUserList muid
(isJust rslt) `shouldBe` True
| smurphy8/onping-permissions | test/Permissions/Onping/InternalSpec.hs | bsd-3-clause | 920 | 0 | 22 | 275 | 236 | 121 | 115 | 27 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, PatternGuards #-}
{- Implements a proof state, some primitive tactics for manipulating
proofs, and some high level commands for introducing new theorems,
evaluation/checking inside the proof system, etc. --}
module Idris.Core.ProofTerm(ProofTerm, Goal(..), mkProofTerm, getProofTerm,
updateSolved, updateSolvedTerm, updateSolvedTerm',
bound_in, bound_in_term, refocus,
Hole, RunTactic',
goal, atHole) where
import Idris.Core.Typecheck
import Idris.Core.Evaluate
import Idris.Core.TT
import Control.Monad.State.Strict
import Data.List
import Debug.Trace
data TermPath = Top
| AppL TermPath Term
| AppR Term TermPath
| InBind Name BinderPath Term
| InScope Name (Binder Term) TermPath
deriving Show
data BinderPath = Binder (Binder TermPath)
| LetT TermPath Term
| LetV Term TermPath
| GuessT TermPath Term
| GuessV Term TermPath
deriving Show
replaceTop :: TermPath -> TermPath -> TermPath
replaceTop p Top = p
replaceTop p (AppL l t) = AppL (replaceTop p l) t
replaceTop p (AppR t r) = AppR t (replaceTop p r)
replaceTop p (InBind n bp sc) = InBind n (replaceTopB p bp) sc
where
replaceTopB p (Binder b) = Binder (fmap (replaceTop p) b)
replaceTopB p (LetT t v) = LetT (replaceTop p t) v
replaceTopB p (LetV t v) = LetV t (replaceTop p v)
replaceTopB p (GuessT t v) = GuessT (replaceTop p t) v
replaceTopB p (GuessV t v) = GuessV t (replaceTop p v)
replaceTop p (InScope n b sc) = InScope n b (replaceTop p sc)
rebuildTerm :: Term -> TermPath -> Term
rebuildTerm tm Top = tm
rebuildTerm tm (AppL p a) = App (rebuildTerm tm p) a
rebuildTerm tm (AppR f p) = App f (rebuildTerm tm p)
rebuildTerm tm (InScope n b p) = Bind n b (rebuildTerm tm p)
rebuildTerm tm (InBind n bp sc) = Bind n (rebuildBinder tm bp) sc
rebuildBinder :: Term -> BinderPath -> Binder Term
rebuildBinder tm (Binder p) = fmap (rebuildTerm tm) p
rebuildBinder tm (LetT p t) = Let (rebuildTerm tm p) t
rebuildBinder tm (LetV v p) = Let v (rebuildTerm tm p)
rebuildBinder tm (GuessT p t) = Guess (rebuildTerm tm p) t
rebuildBinder tm (GuessV v p) = Guess v (rebuildTerm tm p)
findHole :: Name -> Env -> Term -> Maybe (TermPath, Env, Term)
findHole n env t = fh' env Top t where
fh' env path tm@(Bind x h sc)
| hole h && n == x = Just (path, env, tm)
fh' env path (App f a)
| Just (p, env', tm) <- fh' env path a = Just (AppR f p, env', tm)
| Just (p, env', tm) <- fh' env path f = Just (AppL p a, env', tm)
fh' env path (Bind x b sc)
| Just (bp, env', tm) <- fhB env path b = Just (InBind x bp sc, env', tm)
| Just (p, env', tm) <- fh' ((x,b):env) path sc = Just (InScope x b p, env', tm)
fh' _ _ _ = Nothing
fhB env path (Let t v)
| Just (p, env', tm) <- fh' env path t = Just (LetT p v, env', tm)
| Just (p, env', tm) <- fh' env path v = Just (LetV t p, env', tm)
fhB env path (Guess t v)
| Just (p, env', tm) <- fh' env path t = Just (GuessT p v, env', tm)
| Just (p, env', tm) <- fh' env path v = Just (GuessV t p, env', tm)
fhB env path b
| Just (p, env', tm) <- fh' env path (binderTy b)
= Just (Binder (fmap (\_ -> p) b), env', tm)
fhB _ _ _ = Nothing
data ProofTerm = PT { -- wholeterm :: Term,
path :: TermPath,
subterm_env :: Env,
subterm :: Term,
updates :: [(Name, Term)] }
deriving Show
type RunTactic' a = Context -> Env -> Term -> StateT a TC Term
type Hole = Maybe Name -- Nothing = default hole, first in list in proof state
refocus :: Hole -> ProofTerm -> ProofTerm
refocus h t = let res = refocus' h t in res
-- trace ("OLD: " ++ show t ++ "\n" ++
-- "REFOCUSSED " ++ show h ++ ": " ++ show res) res
refocus' (Just n) pt@(PT path env tm ups)
| Just (p', env', tm') <- findHole n env tm
= PT (replaceTop p' path) env' tm' ups
| Just (p', env', tm') <- findHole n [] (rebuildTerm tm (updateSolvedPath ups path))
= PT p' env' tm' []
| otherwise = pt
refocus' _ pt = pt
data Goal = GD { premises :: Env,
goalType :: Binder Term
}
mkProofTerm :: Term -> ProofTerm
mkProofTerm tm = PT Top [] tm []
getProofTerm :: ProofTerm -> Term
getProofTerm (PT path _ sub ups) = rebuildTerm sub (updateSolvedPath ups path)
same :: Eq a => Maybe a -> a -> Bool
same Nothing n = True
same (Just x) n = x == n
hole :: Binder b -> Bool
hole (Hole _) = True
hole (Guess _ _) = True
hole _ = False
updateSolvedTerm :: [(Name, Term)] -> Term -> Term
updateSolvedTerm xs x = fst $ updateSolvedTerm' xs x
updateSolvedTerm' [] x = (x, False)
updateSolvedTerm' xs x = -- updateSolved' xs x where
-- This version below saves allocations, because it doesn't need to reallocate
-- the term if there are no updates to do.
-- The Bool is ugly, and probably 'Maybe' would be less ugly, but >>= is
-- the wrong combinator. Feel free to tidy up as long as it's still as cheap :).
updateSolved' xs x where
updateSolved' [] x = (x, False)
updateSolved' xs (Bind n (Hole ty) t)
| Just v <- lookup n xs
= case xs of
[_] -> (subst n v t, True) -- some may be Vs! Can't assume
-- explicit names
_ -> let (t', _) = updateSolved' xs t in
(subst n v t', True)
updateSolved' xs tm@(Bind n b t)
| otherwise = let (t', ut) = updateSolved' xs t
(b', ub) = updateSolvedB' xs b in
if ut || ub then (Bind n b' t', True)
else (tm, False)
updateSolved' xs t@(App f a)
= let (f', uf) = updateSolved' xs f
(a', ua) = updateSolved' xs a in
if uf || ua then (App f' a', True)
else (t, False)
updateSolved' xs t@(P _ n@(MN _ _) _)
| Just v <- lookup n xs = (v, True)
updateSolved' xs t = (t, False)
updateSolvedB' xs b@(Let t v) = let (t', ut) = updateSolved' xs t
(v', uv) = updateSolved' xs v in
if ut || uv then (Let t' v', True)
else (b, False)
updateSolvedB' xs b@(Guess t v) = let (t', ut) = updateSolved' xs t
(v', uv) = updateSolved' xs v in
if ut || uv then (Guess t' v', True)
else (b, False)
updateSolvedB' xs b = let (ty', u) = updateSolved' xs (binderTy b) in
if u then (b { binderTy = ty' }, u) else (b, False)
noneOf ns (P _ n _) | n `elem` ns = False
noneOf ns (App f a) = noneOf ns a && noneOf ns f
noneOf ns (Bind n (Hole ty) t) = n `notElem` ns && noneOf ns ty && noneOf ns t
noneOf ns (Bind n b t) = noneOf ns t && noneOfB ns b
where
noneOfB ns (Let t v) = noneOf ns t && noneOf ns v
noneOfB ns (Guess t v) = noneOf ns t && noneOf ns v
noneOfB ns b = noneOf ns (binderTy b)
noneOf ns _ = True
updateEnv [] e = e
updateEnv ns [] = []
updateEnv ns ((n, b) : env) = (n, fmap (updateSolvedTerm ns) b) : updateEnv ns env
updateSolvedPath [] t = t
updateSolvedPath ns Top = Top
updateSolvedPath ns (AppL p r) = AppL (updateSolvedPath ns p) (updateSolvedTerm ns r)
updateSolvedPath ns (AppR l p) = AppR (updateSolvedTerm ns l) (updateSolvedPath ns p)
updateSolvedPath ns (InBind n b sc)
= InBind n (updateSolvedPathB b) (updateSolvedTerm ns sc)
where
updateSolvedPathB (Binder b) = Binder (fmap (updateSolvedPath ns) b)
updateSolvedPathB (LetT p v) = LetT (updateSolvedPath ns p) (updateSolvedTerm ns v)
updateSolvedPathB (LetV v p) = LetV (updateSolvedTerm ns v) (updateSolvedPath ns p)
updateSolvedPathB (GuessT p v) = GuessT (updateSolvedPath ns p) (updateSolvedTerm ns v)
updateSolvedPathB (GuessV v p) = GuessV (updateSolvedTerm ns v) (updateSolvedPath ns p)
updateSolvedPath ns (InScope n (Hole ty) t)
| Just v <- lookup n ns = case ns of
[_] -> updateSolvedPath [(n,v)] t
_ -> updateSolvedPath ns $
updateSolvedPath [(n,v)] t
updateSolvedPath ns (InScope n b sc)
= InScope n (fmap (updateSolvedTerm ns) b) (updateSolvedPath ns sc)
updateSolved :: [(Name, Term)] -> ProofTerm -> ProofTerm
updateSolved xs pt@(PT path env sub ups)
= PT path -- (updateSolvedPath xs path)
(updateEnv xs (filter (\(n, t) -> n `notElem` map fst xs) env))
(updateSolvedTerm xs sub)
(ups ++ xs)
goal :: Hole -> ProofTerm -> TC Goal
goal h pt@(PT path env sub ups)
-- | OK ginf <- g env sub = return ginf
| otherwise = g [] (rebuildTerm sub (updateSolvedPath ups path))
where
g :: Env -> Term -> TC Goal
g env (Bind n b@(Guess _ _) sc)
| same h n = return $ GD env b
| otherwise
= gb env b `mplus` g ((n, b):env) sc
g env (Bind n b sc) | hole b && same h n = return $ GD env b
| otherwise
= g ((n, b):env) sc `mplus` gb env b
g env (App f a) = g env a `mplus` g env f
g env t = fail "Can't find hole"
gb env (Let t v) = g env v `mplus` g env t
gb env (Guess t v) = g env v `mplus` g env t
gb env t = g env (binderTy t)
atHole :: Hole -> RunTactic' a -> Context -> Env -> ProofTerm ->
StateT a TC (ProofTerm, Bool)
atHole h f c e pt -- @(PT path env sub)
= do let PT path env sub ups = refocus h pt
(tm, u) <- atH f c env sub
return (PT path env tm ups, u)
-- if u then return (PT path env tm ups, u)
-- else do let PT path env sub ups = refocus h pt
-- (tm, u) <- atH f c env sub
-- return (PT path env tm ups, u)
where
updated o = do o' <- o
return (o', True)
ulift2 f c env op a b
= do (b', u) <- atH f c env b
if u then return (op a b', True)
else do (a', u) <- atH f c env a
return (op a' b', u)
-- Search the things most likely to contain the binding first!
atH :: RunTactic' a -> Context -> Env -> Term -> StateT a TC (Term, Bool)
atH f c env binder@(Bind n b@(Guess t v) sc)
| same h n = updated (f c env binder)
| otherwise
= do -- binder first
(b', u) <- ulift2 f c env Guess t v
if u then return (Bind n b' sc, True)
else do (sc', u) <- atH f c ((n, b) : env) sc
return (Bind n b' sc', u)
atH f c env binder@(Bind n b sc)
| hole b && same h n = updated (f c env binder)
| otherwise -- scope first
= do (sc', u) <- atH f c ((n, b) : env) sc
if u then return (Bind n b sc', True)
else do (b', u) <- atHb f c env b
return (Bind n b' sc', u)
atH tac c env (App f a) = ulift2 tac c env App f a
atH tac c env t = return (t, False)
atHb f c env (Let t v) = ulift2 f c env Let t v
atHb f c env (Guess t v) = ulift2 f c env Guess t v
atHb f c env t = do (ty', u) <- atH f c env (binderTy t)
return (t { binderTy = ty' }, u)
bound_in :: ProofTerm -> [Name]
bound_in (PT path _ tm ups) = bound_in_term (rebuildTerm tm (updateSolvedPath ups path))
bound_in_term :: Term -> [Name]
bound_in_term (Bind n b sc) = n : bi b ++ bound_in_term sc
where
bi (Let t v) = bound_in_term t ++ bound_in_term v
bi (Guess t v) = bound_in_term t ++ bound_in_term v
bi b = bound_in_term (binderTy b)
bound_in_term (App f a) = bound_in_term f ++ bound_in_term a
bound_in_term _ = []
| andyarvanitis/Idris-dev | src/Idris/Core/ProofTerm.hs | bsd-3-clause | 12,408 | 0 | 17 | 4,365 | 5,053 | 2,569 | 2,484 | 228 | 20 |
module Main (main) where
import Control.Arrow
import Crypto.Random
import Data.Ratio
import Crypto.Ed25519.Pure
import Text.Read
import Data.Thyme.Clock
import System.IO
import System.FilePath
import qualified Data.Yaml as Y
import qualified Data.Set as Set
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Juno.Types
configDir :: String
configDir = "run/conf"
nodes :: [NodeID]
nodes = iterate (\n@(NodeID h p _) -> n {_port = p + 1, _fullAddr = "tcp://" ++ h ++ ":" ++ show (p+1)}) (NodeID "127.0.0.1" 10000 "tcp://127.0.0.1:10000")
makeKeys :: CryptoRandomGen g => Int -> g -> [(PrivateKey,PublicKey)]
makeKeys 0 _ = []
makeKeys n g = case generateKeyPair g of
Left err -> error $ show err
Right (p,priv,g') -> (p,priv) : makeKeys (n-1) g'
keyMaps :: [(PrivateKey,PublicKey)] -> (Map NodeID PrivateKey, Map NodeID PublicKey)
keyMaps ls = (Map.fromList $ zip nodes (fst <$> ls), Map.fromList $ zip nodes (snd <$> ls))
main :: IO ()
main = do
putStrLn "Number of cluster nodes?"
hFlush stdout
mn <- fmap readMaybe getLine
putStrLn "Number of client nodes?"
hFlush stdout
cn <- fmap readMaybe getLine
putStrLn "Enable logging for Followers (True/False)?"
hFlush stdout
debugFollower <- fmap readMaybe getLine
case (mn,cn,debugFollower) of
(Just n,Just c,Just df)-> do
g <- newGenIO :: IO SystemRandom
let keyMaps' = keyMaps $ makeKeys (n+c) g
let clientIds = take c $ drop n nodes
let isAClient nid _ = Set.member nid (Set.fromList clientIds)
let isNotAClient nid _ = not $ Set.member nid (Set.fromList clientIds)
let clusterKeyMaps = (Map.filterWithKey isNotAClient *** Map.filterWithKey isNotAClient) keyMaps'
let clientKeyMaps = (Map.filterWithKey isAClient *** Map.filterWithKey isAClient) keyMaps'
let clusterConfs = createClusterConfig df clusterKeyMaps (snd clientKeyMaps) <$> take n nodes
let clientConfs = createClientConfig df (snd clusterKeyMaps) clientKeyMaps <$> clientIds
mapM_ (\c' -> Y.encodeFile (configDir </> show (_port $ _nodeId c') ++ "-cluster.yaml") c') clusterConfs
mapM_ (\c' -> Y.encodeFile (configDir </> show (_port $ _nodeId c') ++ "-client.yaml") c') clientConfs
_ -> putStrLn "Failed to read either input into a number, please try again"
createClusterConfig :: Bool -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> Map NodeID PublicKey -> NodeID -> Config
createClusterConfig debugFollower (privMap, pubMap) clientPubMap nid = Config
{ _otherNodes = Set.delete nid $ Map.keysSet pubMap
, _nodeId = nid
, _publicKeys = pubMap
, _clientPublicKeys = Map.union pubMap clientPubMap -- NOTE: [2016 04 26] all nodes are client (support API signing)
, _myPrivateKey = privMap Map.! nid
, _myPublicKey = pubMap Map.! nid
, _electionTimeoutRange = (3000000,6000000)
, _heartbeatTimeout = 1500000 -- seems like a while...
, _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms
, _enableDebug = True
, _clientTimeoutLimit = 50000
, _dontDebugFollower = not debugFollower
, _apiPort = 8000
}
createClientConfig :: Bool -> Map NodeID PublicKey -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> NodeID -> Config
createClientConfig debugFollower clusterPubMap (privMap, pubMap) nid = Config
{ _otherNodes = Map.keysSet clusterPubMap
, _nodeId = nid
, _publicKeys = clusterPubMap
, _clientPublicKeys = pubMap
, _myPrivateKey = privMap Map.! nid
, _myPublicKey = pubMap Map.! nid
, _electionTimeoutRange = (3000000,6000000)
, _heartbeatTimeout = 1500000
, _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms
, _enableDebug = False
, _clientTimeoutLimit = 50000
, _dontDebugFollower = not debugFollower
, _apiPort = 8000
}
| haroldcarr/juno | app/GenerateConfigFiles.hs | bsd-3-clause | 3,940 | 0 | 22 | 884 | 1,231 | 650 | 581 | 80 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module Internal.Types
( DTree (..)
, DElemType (..)
, DAttr (..)
) where
import qualified Data.Text as T
import Data.Data
import qualified Data.Map as M
-- DOM tree: either a text node or an element with some children
data DTree = DText T.Text
| DElem DElemType DAttr [DTree]
deriving (Eq)
instance Show DTree where
show (DText t) = T.unpack t
show (DElem et attr ch) = open ++ children ++ close
where
open = "<" ++ show et ++ printedAttrs ++ ">"
printedAttrs = let pa = show attr in
if pa == "" then "" else " " ++ pa
close = "</" ++ show et ++ ">"
children = concatMap show ch
-- Supported HTML elements
data DElemType = DHtml
| DBody
| DDiv
| DH1
| DP
deriving (Eq, Ord, Data, Typeable)
instance Show DElemType where
show et = case et of
DHtml -> "html"
DBody -> "body"
DDiv -> "div"
DH1 -> "h1"
DP -> "p"
-- HTML element attributes (key-value pairs)
newtype DAttr = DAttr { attributes :: M.Map T.Text T.Text }
deriving (Eq)
instance Show DAttr where
show (DAttr attr) =
let fn (k, v) = T.unpack k ++ "=\"" ++ T.unpack v ++ "\""
in unwords . map fn $ M.toList attr
| qnnguyen/howser | src/Internal/Types.hs | bsd-3-clause | 1,354 | 0 | 14 | 463 | 419 | 227 | 192 | 38 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.