code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds, PolyKinds, RankNTypes, GADTs #-}
module T7481 where
import Data.Proxy
data D a where
D1 :: a -> D a
D2 :: (a~Int) => D a
D3 :: forall (a::k) b. Proxy a -> D b
data Foo :: D * -> *
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/polykinds/T7481.hs
|
bsd-3-clause
| 214 | 0 | 8 | 54 | 86 | 49 | 37 | -1 | -1 |
{-
Copyright (c) 2016 Albert Krewinkel
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
-}
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Text.Huskydoc.ParsingSpec
Copyright : © 2016 Albert Krewinkel
License : ISC
Maintainer : Albert Krewinkel <[email protected]>
Stability : experimental
Portability : portable
-}
module Text.Huskydoc.ParsingSpec
( main
, spec
) where
import Test.Hspec
import Test.Hspec.Megaparsec
import Text.Huskydoc.Parsing
-- | Run this spec.
main :: IO ()
main = hspec spec
-- | Specifications for Parsing functions.
spec :: Spec
spec = do
describe "spaceChar parser" $ do
it "parses tab" $ do
parseDef spaceChar "\t" `shouldParse` '\t'
it "parses space" $ do
parseDef spaceChar " " `shouldParse` ' '
it "doesn't parse newline characters" $ do
parseDef spaceChar `shouldFailOn` "\n"
describe "skipSpaces" $ do
it "parses single space char" $ do
parseDef skipSpaces `shouldSucceedOn` " "
it "parses many tabs and spaces" $ do
parseDef skipSpaces `shouldSucceedOn` " \t \t\t"
it "succeeds on empty string" $ do
parseDef skipSpaces `shouldSucceedOn` ""
describe "someSpaces" $ do
it "parses single space char" $ do
parseDef someSpaces `shouldSucceedOn` " "
it "parses many tabs and spaces" $ do
parseDef someSpaces `shouldSucceedOn` " \t \t\t"
it "fails on empty string" $ do
parseDef someSpaces `shouldFailOn` ""
describe "blankline" $ do
it "parses empty line plus final newline" $ do
parseDef blankline `shouldSucceedOn`" \n"
it "fails on non-empty line" $ do
parseDef blankline `shouldFailOn` " a \n"
|
tarleb/huskydoc
|
test/Text/Huskydoc/ParsingSpec.hs
|
isc
| 2,336 | 0 | 14 | 495 | 351 | 169 | 182 | 37 | 1 |
module Language.Aspell.Options (
ACOption(..),
WordListSize(..),
Encoding(..),
SuggestMode(..),
NormalizeForm(..)
) where
import Data.ByteString
data WordListSize = Tiny
| ReallySmall
| Small
| MediumSmall
| Medium
| MediumLarge
| Large
| Huge
| Insane
data Encoding = UTF8 | Latin1
data SuggestMode = Ultra
| Fast
| Normal
| Slow
| BadSpellers
data NormalizeForm = None
| NFD
| NFC
| Composed
data ACOption = Dictionary ByteString -- ^ Base name of the dictionary to use. If this option is specified then Aspell will either use this dictionary or die.
| WordListDir ByteString -- ^ Location of the main word list.
| Lang ByteString -- ^ Language to use. It follows the same format of the LANG environment variable on most systems. It consists of the two letter ISO 639 language code and an optional two letter ISO 3166 country code after a dash or underscore. The default value is based on the value of the @LC_MESSAGES@ locale.
| Size WordListSize -- ^ The preferred size of the word list.
| PersonalWordList ByteString -- ^ Personal word list file name.
| ReplacementsList ByteString -- ^ Replacements list file name.
| Encoding Encoding -- ^ The encoding the input text is in. When using the Aspell utility the default encoding is based on the current locale. Thus if your locale currently uses the @utf-8@ encoding than everything will be in UTF-8.
| Normalize Bool -- ^ Perform Unicode normalization. Enabled by default.
| NormalizeStrict Bool -- ^ Avoid lossy conversions when normalizing. Lossy conversions includes compatibility mappings such as splitting the letter @OE@ (U+152) into @O@ and @E@ (when the combined letter is not available), and mappings which will remove accents. Disabled by default except when creating dictionaries.
| NormalizeForm NormalizeForm -- ^ The normalization form the output should be in. This option primarily effects the normalization form of the suggestions as when spell checkering as the actual text is unchanged unless there is an error. Valid values are 'None', 'NFD' for full decomposition (Normalization Form D), 'NFC' for Normalization Form C, or 'Composed' for fully composed. 'Composed' is like 'NFC' except that full composition is used rather than canonical composition. The normalize option must be enabled for this option to be used.
| NormalizeRequired Bool -- ^ Set to true when the current language requires Unicode normalization. This is generally the case when private use characters are used internally by Aspell or when Normalization Form C is not the same as full composition.
| Ignore Integer -- ^ Ignore words with N characters or less.
| IgnoreReplace Bool -- ^ Ignore commands to store replacement pairs.
| SaveReplace Bool -- ^ Save the replacement word list on save all.
| KeyboardDef ByteString -- ^ The base name of the keyboard definition file to use (see <http://aspell.net/man-html/Notes-on-Typo_002dAnalysis.html#Notes-on-Typo_002dAnalysis>).
| SuggestMode SuggestMode -- ^ Suggestion mode = @'Ultra' | 'Fast' | 'Normal' | 'Slow' | 'BadSpeller'@ (see <http://aspell.net/man-html/Notes-on-the-Different-Suggestion-Modes.html#Notes-on-the-Different-Suggestion-Modes>).
| IgnoreCase Bool -- ^ Ignore case when checking words.
| IgnoreAccents Bool -- ^ Ignore accents when checking words (currently ignored).
| FilterMode ByteString -- ^ Sets the filter mode. Possible values include, but not limited to, @none@, @url@, @email@, @sgml@, or @tex@.
| EmailMargin Integer -- ^ The number of characters that can appear before the quote character.
| TeXCheckComments Bool -- ^ Check TeX comments.
| ContextVisibleFirst Bool -- ^ Switches the context which should be visible to Aspell. Per default the initial context is assumed to be invisible as one would expect when spell checking source files of programs where relevant parts are contained in string constants and comments but not in the remaining code. If set to true the initial context is visible while the delimited ones are hidden.
| RunTogether Bool -- ^ Consider run-together words valid.
| RunTogetherLimit Integer -- ^ Maximum number of words that can be strung together.
| RunTogetherMin Integer -- ^ Minimal length of interior words.
| MainConfig ByteString -- ^ Main configuration file. This file overrides Aspell's global defaults.
| MainConfigDir ByteString -- ^ Location of main configuration file.
| DataDir ByteString -- ^ Location of language data files.
| LocalDataDir ByteString -- ^ Alternative location of language data files. This directory is searched before 'DataDir'. It defaults to the same directory the actual main word list is in (which is not necessary 'Dictionary').
| HomeDir ByteString -- ^ Location for personal files.
| PersonalConfig ByteString -- ^ Personal configuration file. This file overrides options found in the global 'MainConfig' file.
| Layout ByteString -- ^ Use this keyboard layout for suggesting possible words. These spelling errors happen if a user accidently presses a key next to the intended correct key. The default is keyboard standard. If you are creating documents, you may want to set it according to your particular type of keyboard. If spellchecking documents created elsewhere, you might want to set this to the keyboard type for that locale. If you are not sure, just leave this as standard.
| Prefix ByteString -- ^ Prefix directory.
| SetPrefix Bool -- ^ Set the prefix based on executable location (only works on WIN32 and when compiled with @--enable-win32-relocatable@).
|
pikajude/haspell
|
Language/Aspell/Options.hs
|
mit
| 6,227 | 0 | 6 | 1,619 | 333 | 220 | 113 | 60 | 0 |
module Week11 where
import Data.Char (isAlpha, isAlphaNum)
import Week10
import Control.Applicative
------------------------------------------------------------
-- 1. Parsing repetitions
------------------------------------------------------------
zeroOrMore :: Parser a -> Parser [a]
zeroOrMore p = oneOrMore p <|> pure []
oneOrMore :: Parser a -> Parser [a]
oneOrMore p = (:) <$> p <*> zeroOrMore p
------------------------------------------------------------
-- 2. Utilities
------------------------------------------------------------
spaces :: Parser String
spaces = zeroOrMore $ satisfy (== ' ')
ident :: Parser String
ident = (:) <$> satisfy isAlpha <*> zeroOrMore (satisfy isAlphaNum)
------------------------------------------------------------
-- 3. Parsing S-expressions
------------------------------------------------------------
-- An "identifier" is represented as just a String; however, only
-- those Strings consisting of a letter followed by any number of
-- letters and digits are valid identifiers.
type Ident = String
-- An "atom" is either an integer value or an identifier.
data Atom = N Integer | I Ident
deriving (Show, Eq)
-- An S-expression is either an atom, or a list of S-expressions.
data SExpr = A Atom
| Comb [SExpr]
deriving (Show, Eq)
parseAtom :: Parser Atom
parseAtom = N <$> posInt <|> I <$> ident
parseSExpr :: Parser SExpr
parseSExpr =
spaces *> (
A <$> parseAtom <|>
Comb <$> (char '(' *> oneOrMore parseSExpr <* char ')')
) <* spaces
|
taylor1791/cis-194-spring
|
src/Week11.hs
|
mit
| 1,524 | 0 | 12 | 250 | 315 | 174 | 141 | 26 | 1 |
import Control.Monad
import Data.Char
import Text.Printf
-- | count the number of alphabets
main :: IO ()
main = do
str <- getContents
forM_ (words str) $ printf "%s\t1\n"
|
nushio3/UFCORIN
|
exe-src/hs-wordcount-mapper.hs
|
mit
| 179 | 0 | 10 | 36 | 58 | 29 | 29 | 7 | 1 |
-- |
-- Module : Network.Wamp
-- Description : Re-exports
-- Copyright : (c) Maciej Kazulak, 2015
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module simply re-exports from other modules for convenience.
--
module Network.Wamp
( -- * Re-exports
module Network.Wamp.Types
, module Network.Wamp.Connection
, module Network.Wamp.Messages
, module Network.Wamp.Broker
, module Network.Wamp.Dealer
, module Network.Wamp.Router
)
where
import Network.Wamp.Types
import Network.Wamp.Messages
import Network.Wamp.Connection
import Network.Wamp.Broker
import Network.Wamp.Dealer
import Network.Wamp.Router
|
mulderr/haskell-wamp
|
wamp/Network/Wamp.hs
|
mit
| 705 | 0 | 5 | 118 | 98 | 71 | 27 | 14 | 0 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Browse where
import qualified Data.Map as M
import qualified Data.Text as T
import Import
import Barch.Adaptors
import Barch.Widgets (shortReferenceView)
pageLimit::Int
pageLimit = 30
getBrowseR::Int->Handler Html
getBrowseR page = do
citations <- runDB $ selectList [] [Desc ReferenceLastModified, LimitTo (pageLimit + 1), OffsetBy (pageLimit*page)]
let action = "Browse" :: Text
submission = Nothing :: Maybe (FileInfo, Text)
handlerName = "getBrowseR" :: Text
moreCitations = (length citations) > pageLimit
citations' = take pageLimit citations
defaultLayout $ do
aDomId <- newIdent
setTitle "Barch Homepage"
$(widgetFile "browse")
|
klarh/barch
|
Handler/Browse.hs
|
mit
| 771 | 0 | 13 | 160 | 213 | 115 | 98 | 21 | 1 |
module Main where
import qualified Data.ByteString.Lazy as BL
import qualified Data.Foldable as F
-- from cassava
import Data.Csv.Streaming
type BaseballStats = (BL.ByteString, Int, BL.ByteString, Int)
baseballStats :: BL.ByteString -> Records BaseballStats
baseballStats = decode NoHeader
fourth :: (a, b, c, d) -> d
fourth (_, _, _, x) = x
main :: IO ()
main = do
csvData <- BL.readFile "batting.csv"
let summed = F.foldr summer 0 (baseballStats csvData)
putStrLn $ "Total at bats was: " ++ (show summed)
where summer = (+) . fourth
|
pgarrison/bassbull
|
src/Main.hs
|
mit
| 548 | 0 | 12 | 98 | 194 | 111 | 83 | 15 | 1 |
before :: (Int, Int) -> (Int, Int) -> Bool
before (a, b) (c, d) = b < c
after :: (Int, Int) -> (Int, Int) -> Bool
after (a, b) (c, d) = a > d
mergeCrossing :: (Int, Int) -> (Int, Int) -> (Int, Int)
mergeCrossing (a, b) (c, d) = ((min a c), (max b d))
insertInterval :: (Int, Int) -> [(Int, Int)] -> [(Int, Int)]
insertInterval y [] = [y]
insertInterval y (x:xs)
| before y x = y:x:xs
| after y x = x:(insertInterval y xs)
| otherwise = insertInterval (mergeCrossing x y) xs
unionIntervals :: [(Int, Int)] -> [(Int, Int)]
unionIntervals = foldr insertInterval []
|
stoimenoff/functional-programming
|
examples/intervals.hs
|
mit
| 579 | 0 | 8 | 127 | 353 | 197 | 156 | 14 | 1 |
--determineType.hs
{-# LANGUAGE NoMonomorphismRestriction #-}
module DetermineType where
example = 1
a = (* 9) 6
b = head [(0, "doge"), (1, "kitteh")]
c = head [(0 :: Integer, "doge"), (1, "kitteh")]
d = if False then True else False
e = length [1,2,3,4,5]
--f = (length [1,2,3,4]) > (length "TACOCAT")
x = 5
y = x + 5
w = y * 10
z = "Haskell"
f = x ++ y ++ z
|
deciduously/Haskell-First-Principles-Exercises
|
2-Defining and combining/5-Types/code/determineType.hs
|
mit
| 365 | 0 | 7 | 79 | 156 | 95 | 61 | 13 | 2 |
module Primes (primes) where
import qualified Data.PQueue.Prio.Min as PQ
import Control.Monad.State.Lazy
type PrimesState = ([Integer], PQ.MinPQueue Integer Integer)
initPrimes :: PrimesState
initPrimes = ([5, 7 ..], PQ.fromList [(9, 3)])
nextPrime :: () -> State PrimesState Integer
nextPrime () = do
(nns@(n : ns), pq) <- get
let (q, p) = PQ.findMin pq
if q > n
then do put (ns, PQ.insert (3*n) n pq)
return n
else do put (if q==n then ns else nns,
PQ.insert (q + 2*p) p $ PQ.deleteMin pq)
nextPrime ()
listPrimes :: () -> State PrimesState [Integer]
listPrimes () = do
p <- nextPrime ()
ps <- listPrimes ()
return (p : ps)
primes :: [Integer]
primes = 2 : 3 : evalState (listPrimes ()) initPrimes
|
farrellm/euler
|
src/Primes-old.hs
|
mit
| 767 | 0 | 16 | 188 | 363 | 193 | 170 | 23 | 3 |
module Server.Execute where
import Utils.Utils
import CodeGen.Metadata.Metadata
import Data.Schema
import Data.Name
import Server.QueryCompile
import Data.Types
import Server.NutleyInstance
import Utils.Include
import CodeGen.SerializeCode
import CodeGen.NutleyQuery
import System.Plugins.Make
import System.Plugins.Load
import System.Directory
import System.Process
import Data.Char
import Control.Monad.Trans.Either
{-
instantiateWithType :: (InstanceID -> tups -> IO NutleyInstance) -> tups ->
(InstanceID -> tups -> IO NutleyInstance)
instantiateWithType f _ = f
sectionWithType :: (NutleyInstance -> IO tups) -> tups ->
(NutleyInstance -> IO tups)
sectionWithType f _ = f
-}
{-
executeInstantiate :: NutleyQuery -> InstanceID -> tups -> ErrorT IO NutleyInstance
executeInstantiate q instID inData = do
modname <- compileQuery q
ls <- liftEitherT $ load (modname ++ ".o") ["."] [] (name q)
case ls of
(LoadSuccess _ f) -> (f`instantiateWithType`inData) instID inData
(LoadFailure errs) -> (mapM_ putStrLn errs) >> (right $ cim "\n" id errs)
-}
buildDirs = [".","Plancache","dist/build/JoSQL/JoSQL-tmp/"]
executeInstantiateFromStrings :: NutleyQuery -> InstanceID -> [[Maybe String]] -> ErrorT IO NutleyInstance
executeInstantiateFromStrings q instID inData = do
modname <- compileQuery q
ls <- liftEitherT $ load ("Plancache/" ++ modname ++ ".o") buildDirs [] (stringsInstantiateName q)
case ls of
(LoadSuccess _ f) -> (EitherT $ f instID inData)
(LoadFailure errs) -> (mapM_ (liftEitherT.putStrLn) errs) >> (left $ cim "\n" id errs)
executeInstantiateSelect :: NutleyQuery -> InstanceID -> NutleyInstance -> ErrorT IO NutleyInstance
executeInstantiateSelect q instID fromInstance = do
modname <- compileQuery q
ls <- liftEitherT $ load ("Plancache/" ++ modname ++ ".o") buildDirs [] (name q)
case ls of
(LoadSuccess _ f) -> f instID fromInstance
(LoadFailure errs) -> (mapM_ (liftEitherT.putStrLn) errs) >> (left $ cim "\n" id errs)
{-
executeInstantiateSerialize :: NutleyQuery -> InstanceID -> LazyByteString -> IO ByteString
executeInstantiateSerialize q instID inData = do
modname <- compileQuery q
ls <- load (modname ++ ".o") ["."] [] (serializedName q)
case ls of
(LoadSuccess _ f) -> f instID inData -- type is completely inferrable, cool!
(LoadFailure errs) -> (mapM_ putStrLn errs) >> error ""
executeSection :: tups -> NutleyQuery -> NutleyInstance -> IO tups
executeSection tps q instID = do
modname <- compileQuery q
ls <- load (modname ++ ".o") ["."] [] (name q)
case ls of
(LoadSuccess m f) -> (f`sectionWithType`tps) instID
(LoadFailure errs) -> (mapM_ putStrLn errs) >> error ""
executeSectionSerialize :: NutleyQuery -> NutleyInstance -> IO LazyByteString
executeSectionSerialize q instID = do
modname <- compileQuery q
ls <- load (modname ++ ".o") ["."] [] (serializedName q)
case ls of
(LoadSuccess m f) -> f instID
(LoadFailure errs) -> (mapM_ putStrLn errs) >> error ""
-}
executeSectionString :: NutleyQuery -> NutleyInstance -> ErrorT IO String
executeSectionString q instID = do
modname <- compileQuery q
ls <- liftEitherT $ load ("Plancache/" ++ modname ++ ".o") buildDirs [] (stringResultName q)
case ls of
(LoadSuccess m f) -> f instID
(LoadFailure errs) -> (mapM_ (liftEitherT.putStrLn) errs) >> (left $ cim "\n" id errs)
|
jvictor0/JoSQL
|
Server/Execute.hs
|
mit
| 3,429 | 0 | 14 | 634 | 572 | 296 | 276 | 39 | 2 |
-- Strange principal
-- http://www.codewars.com/kata/55fc061cc4f485a39900001f/
module Codewars.G964.StrangePrinc where
numOpenLockers :: Int -> Int
numOpenLockers n = floor . sqrt . fromIntegral
|
gafiatulin/codewars
|
src/7 kyu/StrangePrinc.hs
|
mit
| 197 | 0 | 6 | 23 | 34 | 20 | 14 | 3 | 1 |
module Service.VerifyTaskConfig (
verify_task_config, verify_task_config_localized
) where
import Util.Task
import Util.Sign
import Util.Parse
import Util.Description
import Util.Timeout
import Types.Basic
import Types.Signed
import Types.Config
import Types.Description
import Types.TT
import Inter.Types
import Autolib.Reporter.IO.Type
import Autolib.Multilingual hiding (Make)
import Control.Monad.Error
verify_task_config
:: TT Task -> TT Config
-> IO (TT (Either Description (Signed (Task, Config))))
verify_task_config t c = verify_task_config_localized t c (TT DE)
verify_task_config_localized
:: TT Task -> TT Config -> TT Language
-> IO (TT (Either Description (Signed (Task, Config))))
verify_task_config_localized (TT task) (TT (CString config)) (TT lang)
= withTimeout . fmap TT . runErrorT $ do
Make _ _ _ verifyConf _ <- lookupTaskM task
config' <- parseHelper lang "<config>" config
let report = Autolib.Reporter.IO.Type.lift
$ verifyConf config'
rr <- liftIO $ result report
case rr of
Nothing -> liftIO (fromReport lang report) >>= throwError
_ -> return ()
return $ sign (task, CString config)
|
marcellussiegburg/autotool
|
server-implementation/src/Service/VerifyTaskConfig.hs
|
gpl-2.0
| 1,244 | 0 | 15 | 277 | 398 | 204 | 194 | 34 | 2 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Graph.Selfcom where
-- $Id$
import Graph.Util
import Graph.Iso
import Autolib.Graph.Ops ( complement )
import Autolib.Graph.Basic
import Inter.Types
import Autolib.ToDoc
import Autolib.Size
import Autolib.FiniteMap
import qualified Challenger as C
import Data.Typeable
data Selfcom = Selfcom deriving ( Eq, Ord, Show, Read, Typeable )
instance OrderScore Selfcom where
scoringOrder _ = Increasing
instance C.Partial Selfcom Int ( Graph Int, FiniteMap Int Int ) where
describe p i = vcat
[ text "Gesucht ist ein regulärer selbstkomplementärer Graph"
, text "mit wenigstens" <+> toDoc i <+> text "Knoten."
, text "Sie sollen auch die passende Isomorphie angeben."
]
initial p i = ( circle [ 1 .. i ]
, listToFM $ zip [ 1 .. i ] $ reverse [ 1 .. i ]
)
partial p i (g, f) = validate g
total p i (g, f) = do
inform $ vcat [ text "Der Graph ist" , nest 4 $ toDoc g ]
let h = complement g
inform $ vcat [ text "Das Komplement ist" , nest 4 $ toDoc h ]
check_iso f g h
inform $ text "Dieser Graph ist selbstkomplementär."
check_reg g
assert ( size g >= i )
$ text "Ist der Graph groß genug?"
instance Size ( Graph Int, FiniteMap Int Int ) where
size ( g, f ) = size g
make :: Make
make = direct Selfcom ( 6 :: Int )
--------------------------------------------------------------------------
|
Erdwolf/autotool-bonn
|
src/Graph/Selfcom.hs
|
gpl-2.0
| 1,489 | 7 | 14 | 355 | 458 | 238 | 220 | 36 | 1 |
-- rm1x-template: make keymaps for Yamaha Rm1X
-- Copyright (C) 2017 [email protected]
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, write to the Free Software Foundation, Inc.,
-- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
--
module Helpers
(
getDataFileName',
module System.IO,
module System.Process,
module System.IO.Temp,
module System.Environment,
module System.Directory,
module System.FilePath,
module System.Exit,
module Control.Monad,
module Data.Char,
module Data.List,
module Data.Maybe,
module Data.Version,
module Paths_rm1x_template,
) where
import System.IO
import System.Process
import System.IO.Temp
import System.Environment
import System.Directory
import System.FilePath
import System.Exit
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
import Data.Version
import Paths_rm1x_template
--------------------------------------------------------------------------------
--
-- | this lets us retrieve files without being installed
getDataFileName' :: FilePath -> IO FilePath
getDataFileName' path = do
path' <- getDataFileName path
doesFileExist path' >>= \exist ->
if exist then return path'
else return $ "data/" ++ path
|
karamellpelle/rm1x-template
|
source/Helpers.hs
|
gpl-2.0
| 1,894 | 0 | 11 | 382 | 235 | 149 | 86 | 35 | 2 |
{- |
Module : $Header$
Description : A Parser for the TPTP-THF0 Syntax
Copyright : (c) A. Tsogias, DFKI Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
A Parser for the TPTP-THF0 Input Syntax taken from
<http://www.ags.uni-sb.de/~chris/papers/C25.pdf> P. 15-16 and
<http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html>
-}
module THF.ParseTHF0 (parseTHF0) where
import THF.As
import Text.ParserCombinators.Parsec
import Common.Parsec
import Data.Char
import Data.Maybe
--------------------------------------------------------------------------------
-- Parser for the THF0 Syntax
-- Most methods match those of As.hs
--------------------------------------------------------------------------------
parseTHF0 :: CharParser st [TPTP_THF]
parseTHF0 = do
h <- optionMaybe header
thf <- many ((systemComment <|> definedComment <|> comment <|>
include <|> thfAnnotatedFormula) << skipSpaces)
return $ if isJust h then fromJust h : thf else thf
header :: CharParser st TPTP_THF
header = try (do
s <- headerSE
c <- myManyTill (try (commentLine << skipSpaces)) (try headerSE)
return $ TPTP_Header (s : c))
-- parser for the start and end of headers (defined by "%----..."
headerSE :: CharParser st Comment
headerSE = do
try (char '%' >> notFollowedBy (char '$'))
c <- many1 $ char '-' << notFollowedBy printableChar
skipSpaces
return $ Comment_Line c
commentLine :: CharParser st Comment
commentLine = do
try (char '%' >> notFollowedBy (char '$'))
c <- many printableChar
return $ Comment_Line c
comment :: CharParser st TPTP_THF
comment = fmap TPTP_Comment commentLine
<|> do
try (string "/*" >> notFollowedBy (char '$'))
c <- many (noneOf "*/")
skipMany1 (char '*'); char '/'
return $ TPTP_Comment (Comment_Block (lines c))
definedComment :: CharParser st TPTP_THF
definedComment = do
try (string "%$" >> notFollowedBy (char '$'))
c <- many printableChar
return $ TPTP_Defined_Comment (Defined_Comment_Line c)
<|> do
try (string "/*$" >> notFollowedBy (char '$'))
c <- many (noneOf "*/")
skipMany1 (char '*'); char '/'
return $ TPTP_Defined_Comment (Defined_Comment_Block (lines c))
systemComment :: CharParser st TPTP_THF
systemComment = do
tryString "%$$"
c <- many printableChar
return $ TPTP_System_Comment (System_Comment_Line c)
<|> do
tryString "/*$$"
c <- many (noneOf "*/")
skipMany1 (char '*'); char '/'
return $ TPTP_System_Comment (System_Comment_Block (lines c))
include :: CharParser st TPTP_THF
include = do
key $ tryString "include"
oParentheses
fn <- fileName
fs <- formulaSelection
cParentheses; char '.'
return $ TPTP_Include (I_Include fn fs)
thfAnnotatedFormula :: CharParser st TPTP_THF
thfAnnotatedFormula = do
key $ tryString "thf"
oParentheses
n <- name; comma
fr <- formulaRole; comma
tf <- thfFormula
a <- annotations
cParentheses; char '.'
return $ TPTP_THF_Annotated_Formula n fr tf a
annotations :: CharParser st Annotations
annotations = do
comma
s <- source
oi <- optionalInfo
return $ Annotations s oi
<|> do
notFollowedBy (char ',');
return Null
formulaRole :: CharParser st FormulaRole
formulaRole = do
r <- lowerWord
case r of
"axiom" -> return Axiom
"hypothesis" -> return Hypothesis
"definition" -> return Definition
"assumption" -> return Assumption
"lemma" -> return Lemma
"theorem" -> return Theorem
"conjecture" -> return Conjecture
"negated_conjecture" -> return Negated_Conjecture
"plain" -> return Plain
"fi_domain" -> return Fi_Domain
"fi_functors" -> return Fi_Functors
"fi_predicates" -> return Fi_Predicates
"type" -> return Type
"unknown" -> return Unknown
_ -> fail ("No such Role: " ++ r)
thfFormula :: CharParser st THFFormula
thfFormula = fmap T0F_THF_Typed_Const thfTypedConst
<|> fmap TF_THF_Logic_Formula thfLogicFormula
thfLogicFormula :: CharParser st THFLogicFormula
thfLogicFormula = fmap TLF_THF_Binary_Formula thfBinaryFormula
<|> fmap TLF_THF_Unitary_Formula thfUnitaryFormula
thfBinaryFormula :: CharParser st THFBinaryFormula
thfBinaryFormula = fmap TBF_THF_Binary_Tuple thfBinaryTuple
<|> do
(uff, pc) <- try $ do
uff1 <- thfUnitaryFormula
pc1 <- thfPairConnective
return (uff1, pc1)
ufb <- thfUnitaryFormula
return $ TBF_THF_Binary_Pair uff pc ufb
thfBinaryTuple :: CharParser st THFBinaryTuple
thfBinaryTuple = do -- or
uff <- try (thfUnitaryFormula << vLine)
ufb <- sepBy1 thfUnitaryFormula vLine
return $ TBT_THF_Or_Formula (uff : ufb)
<|> do -- and
uff <- try (thfUnitaryFormula << ampersand)
ufb <- sepBy1 thfUnitaryFormula ampersand
return $ TBT_THF_And_Formula (uff : ufb)
<|> do -- apply
uff <- try (thfUnitaryFormula << at)
ufb <- sepBy1 thfUnitaryFormula at
return $ TBT_THF_Apply_Formula (uff : ufb)
thfUnitaryFormula :: CharParser st THFUnitaryFormula
thfUnitaryFormula = fmap TUF_THF_Logic_Formula_Par (parentheses thfLogicFormula)
<|> fmap TUF_THF_Quantified_Formula thfQuantifiedFormula
<|> thfUnaryFormula
<|> fmap TUF_THF_Atom thfAtom
<|> do
keyChar '^'
vl <- brackets thfVariableList; colon
uf <- thfUnitaryFormula
return $ T0UF_THF_Abstraction vl uf
thfQuantifiedFormula :: CharParser st THFQuantifiedFormula
thfQuantifiedFormula = do
q <- quantifier
vl <- brackets thfVariableList; colon
uf <- thfUnitaryFormula
return $ T0QF_THF_Quantified_Var q vl uf
<|> do
q <- thfQuantifier
uf <- parentheses thfUnitaryFormula
return $ T0QF_THF_Quantified_Novar q uf
thfVariableList :: CharParser st THFVariableList
thfVariableList = sepBy1 thfVariable comma
thfVariable :: CharParser st THFVariable
thfVariable = do
v <- try (variable << colon)
tlt <- thfTopLevelType
return $ TV_THF_Typed_Variable v tlt
<|> fmap TV_Variable variable
thfTypedConst :: CharParser st THFTypedConst
thfTypedConst = fmap T0TC_THF_TypedConst_Par (parentheses thfTypedConst)
<|> do
c <- try (constant << colon)
tlt <- thfTopLevelType
return $ T0TC_Typed_Const c tlt
thfUnaryFormula :: CharParser st THFUnitaryFormula
thfUnaryFormula = do
uc <- thfUnaryConnective
lf <- parentheses thfLogicFormula
return $ TUF_THF_Unary_Formula uc lf
thfTopLevelType :: CharParser st THFTopLevelType
thfTopLevelType = fmap T0TLT_THF_Binary_Type thfBinaryType
<|> fmap T0TLT_Constant constant
<|> fmap T0TLT_Variable variable
<|> fmap T0TLT_Defined_Type definedType
<|> fmap T0TLT_System_Type systemType
thfUnitaryType :: CharParser st THFUnitaryType
thfUnitaryType = fmap T0UT_Constant constant
<|> fmap T0UT_Variable variable
<|> fmap T0UT_Defined_Type definedType
<|> fmap T0UT_System_Type systemType
<|> fmap T0UT_THF_Binary_Type_Par (parentheses thfBinaryType)
thfBinaryType :: CharParser st THFBinaryType
thfBinaryType = do
utf <- try (thfUnitaryType << arrow)
utb <- sepBy1 thfUnitaryType arrow
return $ TBT_THF_Mapping_Type (utf : utb)
<|> fmap T0BT_THF_Binary_Type_Par (parentheses thfBinaryType)
thfAtom :: CharParser st THFAtom
thfAtom = fmap T0A_Constant constant
<|> fmap T0A_Defined_Constant atomicDefinedWord
<|> fmap T0A_System_Constant atomicSystemWord
<|> fmap T0A_Variable variable
<|> fmap TA_THF_Conn_Term thfConnTerm
thfConnTerm :: CharParser st THFConnTerm
thfConnTerm = fmap TCT_THF_Pair_Connective thfPairConnective
<|> fmap TCT_Assoc_Connective assocConnective
<|> fmap TCT_THF_Unary_Connective thfUnaryConnective
<|> fmap T0CT_THF_Quantifier thfQuantifier
thfQuantifier :: CharParser st THFQuantifier
thfQuantifier = (key (tryString "!!") >> return T0Q_PiForAll)
<|> (key (tryString "??") >> return T0Q_SigmaExists)
<?> "thfQuantifier"
quantifier :: CharParser st Quantifier
quantifier = (keyChar '!' >> return T0Q_ForAll)
<|> (keyChar '?' >> return T0Q_Exists)
<?> "quantifier"
thfPairConnective :: CharParser st THFPairConnective
thfPairConnective = (key (tryString "!=") >> return Infix_Inequality)
<|> (key (tryString "<=>") >> return Equivalent)
<|> (key (tryString "=>") >> return Implication)
<|> (key (tryString "<=") >> return IF)
<|> (key (tryString "<~>") >> return XOR)
<|> (key (tryString "~|") >> return NOR)
<|> (key (tryString "~&") >> return NAND)
<|> (keyChar '=' >> return Infix_Equality)
<?> "pairConnective"
thfUnaryConnective :: CharParser st THFUnaryConnective
thfUnaryConnective = keyChar '~' >> return Negation
assocConnective :: CharParser st AssocConnective
assocConnective = (keyChar '|' >> return OR)
<|> (keyChar '&' >> return AND)
definedType :: CharParser st DefinedType
definedType = do
adw <- atomicDefinedWord
case adw of
"oType" -> return DT_oType
"o" -> return DT_o
"iType" -> return DT_iType
"i" -> return DT_i
"tType" -> return DT_tType
_ -> fail ("No such definedType: " ++ adw)
systemType :: CharParser st SystemType
systemType = atomicSystemWord
constant :: CharParser st Constant
constant = tptpFunctor
tptpFunctor :: CharParser st TPTPFunctor
tptpFunctor = atomicWord
variable :: CharParser st Variable
variable = do
u <- upper
an <- many alphaNum
skipAll
return (u : an)
<?> "Variable"
principalSymbol :: CharParser st PrincipalSymbol
principalSymbol = fmap PS_Functor tptpFunctor
<|> fmap PS_Variable variable
source :: CharParser st Source
source = (key (tryString "unknown") >> return S_Unknown)
<|> fmap S_Dag_Source dagSource
<|> fmap S_External_Source externalSource
<|> fmap S_Sources (sepBy1 source comma)
<|> do -- internal_source
key $ tryString "introduced"; oParentheses
it <- introType
oi <- optionalInfo; cParentheses
return $ S_Internal_Source it oi
dagSource :: CharParser st DagSource
dagSource = do
key (tryString "inference"); oParentheses
ir <- atomicWord; comma
ui <- usefulInfo; comma
pl <- brackets (sepBy1 parentInfo comma)
cParentheses
return (DS_Inference_Record ir ui pl)
<|> fmap DS_Name name
parentInfo :: CharParser st ParentInfo
parentInfo = do
s <- source
pd <- parentDetails
return $ PI_Parent_Info s pd
parentDetails :: CharParser st (Maybe GeneralList)
parentDetails = fmap Just (colon >> generalList)
<|> (notFollowedBy (char ':') >> return Nothing)
introType :: CharParser st IntroType
introType = (key (tryString "definition") >> return IT_definition)
<|> (key (tryString "axiom_of_choice") >> return IT_axiom_of_choice)
<|> (key (tryString "tautology") >> return IT_tautology)
<|> (key (tryString "assumption") >> return IT_assumption)
externalSource :: CharParser st ExternalSource
externalSource = fmap ES_File_Source fileSource
<|> do
key $ tryString "theory"; oParentheses
tn <- theoryName
oi <- optionalInfo; cParentheses
return $ ES_Theory tn oi
<|> do
key $ tryString "creator"; oParentheses
cn <- atomicWord
oi <- optionalInfo; cParentheses
return $ ES_Creator_Source cn oi
fileSource :: CharParser st FileSource
fileSource = do
key $ tryString "file"; oParentheses
fn <- fileName
fi <- fileInfo; cParentheses
return $ FS_File fn fi
fileInfo :: CharParser st (Maybe Name)
fileInfo = fmap Just (comma >> name)
<|> (notFollowedBy (char ',') >> return Nothing)
theoryName :: CharParser st TheoryName
theoryName = (key (tryString "equality") >> return Equality)
<|> (key (tryString "ac") >> return Ac)
optionalInfo :: CharParser st OptionalInfo
optionalInfo = fmap Just (comma >> usefulInfo)
<|> (notFollowedBy (char ',') >> return Nothing)
usefulInfo :: CharParser st UsefulInfo
usefulInfo = (oBracket >> cBracket >> return [])
<|> brackets (sepBy1 infoItem comma)
infoItem :: CharParser st InfoItem
infoItem = fmap II_Formula_Item formulaItem
<|> fmap II_Inference_Item inferenceItem
<|> fmap II_General_Function generalFunction
formulaItem :: CharParser st FormulaItem
formulaItem = do
key $ tryString "description"
fmap FI_Description_Item (parentheses atomicWord)
<|> do
key $ tryString "iquote"
fmap FI_Iquote_Item (parentheses atomicWord)
inferenceItem :: CharParser st InferenceItem
inferenceItem = fmap II_Inference_Status inferenceStatus
<|> do
key $ tryString "assumptions"
fmap II_Assumptions_Record (parentheses (brackets nameList))
<|> do
key $ tryString "new_symbols"; oParentheses
aw <- atomicWord; comma
nsl <- brackets (sepBy1 principalSymbol comma); cParentheses
return $ II_New_Symbol_Record aw nsl
<|> do
key $ tryString "refutation"
fmap II_Refutation (parentheses fileSource)
inferenceStatus :: CharParser st InferenceStatus
inferenceStatus = do
key $ tryString "status"
fmap IS_Status (parentheses statusValue)
<|> do
ir <- try (atomicWord << oParentheses)
aw <- atomicWord; comma
gl <- generalList; cParentheses
return $ IS_Inference_Info ir aw gl
statusValue :: CharParser st StatusValue
statusValue = choice $ map (\ r -> key (tryString $ showStatusValue r)
>> return r) allStatusValues
allStatusValues :: [StatusValue]
allStatusValues =
[Suc, Unp, Sap, Esa, Sat, Fsa, Thm, Eqv, Tac,
Wec, Eth, Tau, Wtc, Wth, Cax, Sca, Tca, Wca,
Cup, Csp, Ecs, Csa, Cth, Ceq, Unc, Wcc, Ect,
Fun, Uns, Wuc, Wct, Scc, Uca, Noc]
showStatusValue :: StatusValue -> String
showStatusValue = map toLower . show
formulaSelection :: CharParser st (Maybe NameList)
formulaSelection = fmap Just (comma >> brackets nameList)
<|> (notFollowedBy (char ',') >> return Nothing)
nameList :: CharParser st NameList
nameList = sepBy1 name comma
generalTerm :: CharParser st GeneralTerm
generalTerm = do
gd <- try (generalData << notFollowedBy (char ':'))
return $ GT_General_Data gd
<|> do
gd <- try (generalData << colon)
gt <- generalTerm
return $ GT_General_Data_Term gd gt
<|> fmap GT_General_List generalList
generalData :: CharParser st GeneralData
generalData = fmap GD_Variable variable
<|> fmap GD_Number number
<|> fmap GD_Distinct_Object distinctObject
<|> do
key $ tryString "bind"; oParentheses
v <- variable; comma
fd <- formulaData; cParentheses
return (GD_Bind v fd)
<|> fmap GD_General_Function generalFunction
<|> fmap GD_Atomic_Word atomicWord
<|> fmap GD_Formula_Data formulaData
generalFunction :: CharParser st GeneralFunction
generalFunction = do
aw <- atomicWord
gts <- parentheses generalTerms
return $ GF_General_Function aw gts
formulaData :: CharParser st FormulaData
formulaData = fmap THF_Formula thfFormula
generalList :: CharParser st GeneralList
generalList = (try (oBracket >> cBracket) >> return [])
<|> brackets generalTerms
generalTerms :: CharParser st [GeneralTerm]
generalTerms = sepBy1 generalTerm comma
name :: CharParser st Name
name = fmap T0N_Unsigned_Integer (unsignedInteger << skipAll)
<|> fmap N_Atomic_Word atomicWord
atomicWord :: CharParser st AtomicWord
atomicWord = fmap A_Lower_Word lowerWord
<|> fmap A_Single_Quoted singleQuoted
<?> "lowerWord or singleQuoted"
atomicDefinedWord :: CharParser st String
atomicDefinedWord = char '$' >> lowerWord
atomicSystemWord :: CharParser st AtomicSystemWord
atomicSystemWord = tryString "$$" >> lowerWord
number :: CharParser st Number
number = fmap Num_Real (real << skipAll)
<|> fmap Num_Rational (rational << skipAll)
<|> fmap Num_Integer (integer << skipAll)
fileName :: CharParser st FileName
fileName = singleQuoted
singleQuoted :: CharParser st SingleQuoted
singleQuoted = do
char '\''
s <- fmap concat $ many1 (tryString "\\\\" <|> tryString "\\'"
<|> tryString "\\\'"
<|> single ( satisfy (\ c -> printable c && notElem c ['\'', '\\'])))
keyChar '\''
return s
distinctObject :: CharParser st DistinctObject
distinctObject = do
char '\"'
s <- fmap concat $ many1 (tryString "\\\\" <|> tryString "\\\""
<|> single ( satisfy (\ c -> printable c && notElem c ['\'', '\\'])))
keyChar '\"'
return s
lowerWord :: CharParser st LowerWord
lowerWord = do
l <- lower
an <- many (alphaNum <|> char '_'); skipAll
return (l : an)
<?> "alphanumeric word with leading lowercase letter"
printableChar :: CharParser st Char
printableChar = satisfy printable
printable :: Char -> Bool
printable c = ord c >= 32 && ord c <= 126
-- Numbers
real :: CharParser st String
real = try (do
s <- oneOf "-+"
ur <- unsignedReal
return (s : ur))
<|> unsignedReal
<?> "(signed) real"
unsignedReal :: CharParser st String
unsignedReal = do
de <- try (do
d <- decimalFractional <|> decimal
e <- oneOf "Ee"
return (d ++ [e]))
ex <- decimal
return (de ++ ex)
<|> decimalFractional
<?> "unsigned real"
rational :: CharParser st String
rational = try (do
s <- oneOf "-+"
ur <- unsignedRational
return (s : ur))
<|> unsignedRational
<?> "(signed) rational"
unsignedRational :: CharParser st String
unsignedRational = do
d1 <- try (decimal << char '/')
d2 <- positiveDecimal
return (d1 ++ "/" ++ d2)
integer :: CharParser st String
integer = try (do
s <- oneOf "-+"
ui <- unsignedInteger
return (s : ui))
<|> unsignedInteger
<?> "(signed) integer"
unsignedInteger :: CharParser st String
unsignedInteger = try (decimal << notFollowedBy (oneOf "eE/."))
decimal :: CharParser st String
decimal = do
char '0'
notFollowedBy digit
return "0"
<|> positiveDecimal
<?> "single zero or digits"
positiveDecimal :: CharParser st String
positiveDecimal = do
nz <- satisfy (\ c -> isDigit c && c /= '0')
d <- many digit
return (nz : d)
<?> "positiv decimal"
decimalFractional :: CharParser st String
decimalFractional = do
dec <- try (decimal << char '.')
n <- many1 digit
return (dec ++ "." ++ n)
<?> "decimal fractional"
--------------------------------------------------------------------------------
-- Some helper functions
--------------------------------------------------------------------------------
skipAll :: CharParser st ()
skipAll = skipMany (skipMany1 space <|>
((comment <|> definedComment <|>
systemComment) >> return ()))
skipSpaces :: CharParser st ()
skipSpaces = skipMany space
key :: CharParser st a -> CharParser st ()
key = (>> skipAll)
keyChar :: Char -> CharParser st ()
keyChar = key . char
myManyTill :: CharParser st a -> CharParser st a -> CharParser st [a]
myManyTill p end = do
e <- end ; return [e]
<|> do
x <- p; xs <- myManyTill p end; return (x : xs)
--------------------------------------------------------------------------------
-- Different simple symbols
--------------------------------------------------------------------------------
vLine :: CharParser st ()
vLine = keyChar '|'
arrow :: CharParser st ()
arrow = keyChar '>'
comma :: CharParser st ()
comma = keyChar ','
colon :: CharParser st ()
colon = keyChar ':'
oParentheses :: CharParser st ()
oParentheses = keyChar '('
cParentheses :: CharParser st ()
cParentheses = keyChar ')'
parentheses :: CharParser st a -> CharParser st a
parentheses p = do
r <- try (oParentheses >> p)
cParentheses
return r
oBracket :: CharParser st ()
oBracket = keyChar '['
cBracket :: CharParser st ()
cBracket = keyChar ']'
brackets :: CharParser st a -> CharParser st a
brackets p = do
r <- try (oBracket >> p)
cBracket
return r
ampersand :: CharParser st ()
ampersand = keyChar '&'
at :: CharParser st ()
at = keyChar '@'
|
nevrenato/Hets_Fork
|
THF/ParseTHF0.hs
|
gpl-2.0
| 20,163 | 0 | 19 | 4,334 | 6,035 | 2,880 | 3,155 | 534 | 15 |
import Control.Applicative
import qualified Data.Map as M
import Data.List
-- DP approach, dp[i][j] means whether the first i number
-- can produce number j(j <= 101). The following is a very nice
-- approach to do the dp process by reading @tanakh's solution
main :: IO ()
main = do
n <- readLn :: IO Int
ns <- map read . words <$> getLine
let result = foldl' f (M.fromList [(head ns, [show $ head ns])]) (tail ns)
f m n = M.fromList $
concatMap (\(val, ops) ->
[ ((val+n)`mod`101, (show n):"+":ops)
, ((val-n)`mod`101, (show n):"-":ops)
, ((val*n)`mod`101, (show n):"*":ops)
]) $
M.toList m
ans = intercalate "" $ reverse $ result M.! 0
putStrLn ans
|
m00nlight/hackerrank
|
functional/memorization-dp/Expression/main.hs
|
gpl-2.0
| 824 | 0 | 20 | 288 | 298 | 161 | 137 | 17 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- SJC: is it possible to move this to the prototype part of ampersand? I mean,
-- do functions like plugFields and plug-path really need to be here?
-- perhaps we can at least move the largest part?
module Database.Design.Ampersand.FSpec.Plug
(Plugable(..), PlugInfo(..)
,SqlField(..)
,SqlFieldUsage(..)
,SqlTType(..)
,showSQL
,plugpath
,fldauto
,PlugSQL(..)
)
where
import Database.Design.Ampersand.ADL1
import Database.Design.Ampersand.Classes (Relational(..))
import Database.Design.Ampersand.Basics
import Data.List
import GHC.Exts (sortWith)
import Database.Design.Ampersand.FSpec.FSpec
import Prelude hiding (Ordering(..))
fatal :: Int -> String -> a
fatal = fatalMsg "FSpec.Plug"
----------------------------------------------
--Plug
----------------------------------------------
--TODO151210 -> define what a plug is and what it should do
--Plugs are of the class Object just like Activities(??? => PHP plug isn't an instance of Object)
--An Object is an entity to do things with like reading, updating, creating,deleting.
--A Interface is an Object using only Plugs for reading and writing data; a Plug is a data service maintaining the rules for one object:
-- + GEN Interface,Plug ISA Object
-- + cando::Operation*Object
-- + uses::Interface*Plug [TOT].
-- + maintains::Plug*Rule.
-- + signals::Interface*SignalRule.
--
--Plugs can currently be implemented in PHP or SQL.
--type Plugs = [Plug]
--data Plug = PlugSql PlugSQL | PlugPhp PlugPHP deriving (Show,Eq)
class (Named p, Eq p, Show p) => Plugable p where
makePlug :: PlugInfo -> p
instance Plugable PlugSQL where
makePlug (InternalPlug p) = p
makePlug (ExternalPlug _) = fatal 112 "external plug is not Plugable"
----------------------------------------------
--PlugSQL
----------------------------------------------
--TblSQL, BinSQL, and ScalarSQL hold different entities. See their definition FSpec.hs
-- all kernel fields can be related to an imaginary concept ID for the plug (a SqlField with type=SQLID)
-- i.e. For all kernel fields k1,k2, where concept k1=A, concept k2=B, fldexpr k1=r~, fldexpr k2=s~
-- You can imagine :
-- - a relation value::ID->A[INJ] or value::ID->A[INJ,SUR]
-- - a relation value::ID->B[INJ] or value::ID->B[INJ,SUR]
-- such that s~=value~;value;r~ and r~=value~;value;s~
-- because value is at least uni,tot,inj, all NULL in k0 imply NULL in k1 xor v.v.
-- if value also sur then all NULL in k0 imply NULL in k1 and v.v.
-- Without such an ID, the surjective or total property between any two kernel fields is required.
-- Because you can imagine an ID concept the surjective or total property between two kernel field has become a design choice.
--
-- With or without ID we choose to keep kernel = A closure of concepts A,B for which there exists a r::A->B[INJ] instead of r::A*B[UNI,INJ]
-- By making this choice:
-- - nice database table size
-- - we do not need the imaginary concept ID (and relation value::ID->A[INJ] or value::ID->A[INJ,SUR]), because:
-- with ID -> there will always be one or more kernel field k1 such that (value;(fldexpr k1)~)[UNI,INJ,TOT,SUR].
-- any of those k1 can serve as ID of the plug (a.k.a. concept p / source p)
-- without ID -> any of those k1 can still serve as ID of the plug (a.k.a. concept p / source p)
-- In other words, the imaginary concept is never needed
-- because there always is an existing one with the correct properties by definition of kernel.
-- Implementation without optional ID:
-- -> fldexpr of some kernel field k1 will be r~
-- k1 holds the target of r~
-- the source of r~ is a kernel concept too
-- r~ may be I
-- -> fldexpr of some attMor field a1 will be s
-- a1 holds the target of s
-- the source of s is a kernel concept
-- -> sqlRelFields r = (r,k1,a1) (or (r,k1,k2)) in mLkpTbl
-- is used to generate SQL code and PHP-objects without needing the ID field.
-- The ID field can be ignored and does not have to be generated because r=(fldexpr k1)~;(fldexpr a1)
-- You could generate the ID-field with autonum if you want, because it will not be used
-- -> TODO151210 -> sqlRelFields e where e is not in mLkpTbl
-- option1) Generate the ID field (see entityfield)
-- sqlRelFields e = (e, idfld;k1, idfld;a1) where e=(fldexpr k1)~;value~;value;(fldexpr a1)
-- remark: binary tables can be binary tables without kernels, but with ID field
-- (or from a different perspective: ID is the only kernel field)
-- sqlRelFields r = (r,idfld/\r;r~,idfld;m1) where r = (idfld/\r;r~)~;idfld;(fldexpr m1)
-- (sqlRelFields r~ to get the target of r)
-- (scalar tables can of course also have an ID field)
-- option2) sqlRelFields e = (e, k1;k2;..kn, a1)
-- where e=(fldexpr kn)~;..;(fldexpr k2)~;(fldexpr k1)~;(fldexpr k1)(fldexpr k2);..;(fldexpr kn);(fldexpr a1)
-- If I am right the function isTrue tries to support sqlRelFields e by ignoring the type error in kn;a1.
-- That is wrong!
--the entityfield is not implemented as part of the data type PlugSQL
--It is a constant which may or may not be used (you may always imagine it)
--TODO151210 -> generate the entityfield if options = --autoid -p
--REMARK151210 -> one would expect I[entityconcept p],
-- but any p (as instance of Object) has one always existing concept p suitable to replace entityconcept p.
-- concept p and entityconcept p are related uni,tot,inj,sur.
--the entity stored in a plug is an imaginary concept, that is uni,tot,inj,sur with (concept p)
--REMARK: there is a (concept p) because all kernel fields are related SUR with (concept p)
--Maintain rule: Object ObjectDef = Object (makeUserDefinedSqlPlug :: ObjectDef -> PlugSQL)
--TODO151210 -> Build a check which checks this rule for userdefined/showADL generated plugs(::[ObjectDef])
--TODO151210 -> The ObjectDef of a BinSQL plug for relation r is that:
-- 1) SQLPLUG mybinplug: r , or
-- 2) SQLPLUG labelforsourcem : I /\ r;r~ --(or just I if r is TOT)
-- = [labelfortargetm : r]
-- The first option has been implemented in instance ObjectPlugSQL i.e. attributes=[], ctx=ERel r _
instance Object PlugSQL where
concept p = case p of
TblSQL{mLkpTbl = []} -> fatal 263 $ "empty lookup table for plug "++name p++"."
TblSQL{} -> --TODO151210-> deze functieimplementatie zou beter moeten matchen met onderstaande beschrijving
-- nu wordt aangenomen dat de source van het 1e rel in mLkpTbl de source van de plug is.
--a relation between kernel concepts r::A*B is at least [UNI,INJ]
--to be able to point out one concept to be the source we are looking for one without NULLs in its field
-- i.e. there is a concept A such that
-- for all kernel field expr (s~)::B*C[UNI,INJ]:
-- s~ is total and there exists an expr::A*B[UNI,INJ,TOT,SUR] (possibly A=B => I[A][UNI,INJ,TOT,SUR])
--If A is such a concept,
-- and A is not B,
-- and there exist an expr::A*B[UNI,INJ,TOT,SUR]
--then (concept PlugSQL{}) may be A or B
--REMARK -> (source p) used to be implemented as (source . fldexpr . head . fields) p. That is different!
head [source r |(r,_,_)<-mLkpTbl p]
BinSQL{} -> source (mLkp p) --REMARK151210 -> the concept is actually ID such that I[ID]=I[source r]/\r;r~
ScalarSQL{} -> cLkp p
-- Usually source a==concept p. Otherwise, the attribute computation is somewhat more complicated. See ADL2FSpec for explanation about kernels.
attributes p@TblSQL{}
= [ Obj (fldname tFld) -- objnm
(Origin "This object is generated by attributes (Object PlugSQL)") -- objpos
(if source a==concept p then a else f (source a) [[a]]) -- objctx
Nothing
Nothing
[] -- objats and objstrs
| (a,_,tFld)<-mLkpTbl p]
where
f c mms
= case sortWith length stop of
[] -> f c mms' -- a path from c to a is not found (yet), so add another step to the recursion
(hd:_) -> case hd of
[] -> fatal 201 "Empty head should be impossible."
_ -> case [(l,r) | (l,r)<-zip (init hd) (tail hd), target l/=source r] of
[] -> foldr1 (.:.) hd -- pick the shortest path and turn it into an expression.
lrs -> fatal 204 ("illegal compositions " ++show lrs)
where
mms' = if [] `elem` mms
then fatal 295 "null in mms."
else [a:ms | ms<-mms, (a,_,_)<-mLkpTbl p, target a==source (head ms)]
stop = if [] `elem` mms'
then fatal 298 "null in mms'."
else [ms | ms<-mms', source (head ms)==c] -- contains all found paths from c to a
attributes _ = [] --no attributes for BinSQL and ScalarSQL
contextOf p@BinSQL{} = mLkp p
contextOf p = EDcI (concept p)
fldauto::SqlField->Bool -- is the field auto increment?
fldauto f = case fldtype f of
SQLSerial -> if not (fldnull f) && flduniq f
then True
else fatal 171 "AutoIncrement is not allowed at this place." --TODO: build check in P2Aconverters
_ -> False
showSQL :: SqlTType -> String
showSQL (SQLFloat ) = "FLOAT"
showSQL (SQLVarchar n) = "VARCHAR("++show n++")"
showSQL (SQLText ) = "TEXT"
showSQL (SQLMediumText ) = "MEDIUMTEXT"
showSQL (SQLBlob ) = "BLOB"
showSQL (SQLMediumBlob ) = "MEDIUMBLOB"
showSQL (SQLLongBlob ) = "LONGBLOB"
showSQL (SQLDate ) = "DATE"
showSQL (SQLDateTime ) = "DATETIME"
showSQL (SQLBigInt ) = "BIGINT"
showSQL (SQLBool ) = "BOOLEAN"
showSQL (SQLSerial ) = "SERIAL"
-- Every kernel field is a key, kernel fields are in cLkpTbl or the column of ScalarSQL (which has one column only)
-- isPlugIndex refers to UNIQUE key -- TODO: this is wrong
--isPlugIndex may contain NULL, but their key (the entityfield of the plug) must be unique for a kernel field (isPlugIndex=True)
--the field that is isIdent and isPlugIndex (i.e. concept plug), or any similar (uni,inj,sur,tot) field is also UNIQUE key
--IdentityDefs define UNIQUE key (fld1,fld2,..,fldn)
--REMARK -> a kernel field does not have to be in cLkpTbl, in that cast there is another kernel field that is
-- thus I must check whether fldexpr isUni && isInj && isSur
isPlugIndex :: PlugSQL->SqlField->Bool
isPlugIndex plug f =
case plug of
ScalarSQL{} -> sqlColumn plug==f
BinSQL{} --mLkp is not uni or inj by definition of BinSQL, if mLkp total then the (fldexpr srcfld)=I/\r;r~=I i.e. a key for this plug
| isUni(mLkp plug) || isInj(mLkp plug) -> fatal 366 "BinSQL may not store a univalent or injective rel, use TblSQL instead."
| otherwise -> False --binary does not have key, but I could do a SELECT DISTINCT iff f==fst(columns plug) && (isTot(mLkp plug))
TblSQL{} -> elem f (fields plug) && isUni(fldexpr f) && isInj(fldexpr f) && isSur(fldexpr f)
composeCheck :: Expression -> Expression -> Expression
composeCheck l r
= if target l/=source r then fatal 316 ("\nl: "++show l++"with target "++show (target l)++"\nl: "++show r++"with source "++show (source r)) else
l .:. r
--composition from srcfld to trgfld, if there is an expression for that
plugpath :: PlugSQL -> SqlField -> SqlField -> Maybe Expression
plugpath p srcfld trgfld =
case p of
BinSQL{}
| srcfld==trgfld -> let tm=mLkp p --(note: mLkp p is the relation from fst to snd column of BinSQL)
in if srcfld==fst(columns p)
then Just$ tm .:. flp tm --domain of r
else Just$ flp tm .:. tm --codomain of r
| srcfld==fst(columns p) && trgfld==snd(columns p) -> Just$ fldexpr trgfld
| trgfld==fst(columns p) && srcfld==snd(columns p) -> Just$ flp(fldexpr srcfld)
| otherwise -> fatal 444 $ "BinSQL has only two fields:"++show(fldname srcfld,fldname trgfld,name p)
ScalarSQL{}
| srcfld==trgfld -> Just$ fldexpr trgfld
| otherwise -> fatal 447 $ "scalarSQL has only one field:"++show(fldname srcfld,fldname trgfld,name p)
TblSQL{}
| srcfld==trgfld && isPlugIndex p trgfld -> Just$ EDcI (target (fldexpr trgfld))
| srcfld==trgfld && not(isPlugIndex p trgfld) -> Just$ composeCheck (flp (fldexpr srcfld)) (fldexpr trgfld) --codomain of r of morAtt
| (not . null) (paths srcfld trgfld)
-> case head (paths srcfld trgfld) of
[] -> fatal 338 ("Empty head (paths srcfld trgfld) should be impossible.")
ps -> Just$ foldr1 composeCheck ps
--bijective kernel fields, which are bijective with ID of plug have fldexpr=I[X].
--thus, path closures of these kernel fields are disjoint (path closure=set of fields reachable by paths),
-- because these kernel fields connect to themselves by r=I[X] (i.e. end of path).
--connect two paths over I[X] (I[X];srce)~;(I[X];trge) => filter I[X] => srcpath~;trgpath
| (not.null) (pathsoverIs srcfld trgfld) -> Just$ foldr1 composeCheck (head (pathsoverIs srcfld trgfld))
| (not.null) (pathsoverIs trgfld srcfld) -> Just$ flp (foldr1 composeCheck (head (pathsoverIs trgfld srcfld)))
| otherwise -> Nothing
--paths from s to t by connecting r from mLkpTbl
--the (r,srcfld,trgfld) from mLkpTbl form paths longer paths if connected: (trgfld m1==srcfld m2) => (m1;m2,srcfld m1,trgfld m2)
where
paths s t = [e |(e,es,et)<-eLkpTbl p,s==es,t==et]
--paths from I to field t
pathsfromIs t = [(e,es,et) |(e,es,et)<-eLkpTbl p,et==t,not (null e),isIdent(head e)]
--paths from s to t over I[X]
pathsoverIs s t = [flpsrce++tail trge
|(srce,srces,_)<-pathsfromIs s
,(trge,trges,_)<-pathsfromIs t
,srces==trges, let flpsrce = (map flp.reverse.tail) srce]
--the expression LkpTbl of a plug is the transitive closure of the mLkpTbl of the plug
--Warshall's transitive closure algorithm clos1 :: (Eq a) => [(a,a)] -> [(a,a)] is extended to combine paths i.e. r++r'
--[Expression] implies a 'composition' from a kernel SqlField to another SqlField
--use plugpath to get the Expression from srcfld to trgfld
--plugpath also combines expressions with head I like (I;tail1)~;(I;tail2) <=> tail1;tail2
eLkpTbl::PlugSQL -> [([Expression],SqlField,SqlField)]
eLkpTbl p = clos1 [([r],s,t)|(r,s,t)<-mLkpTbl p]
where
clos1 :: [([Expression],SqlField,SqlField)] -> [([Expression],SqlField,SqlField)] -- e.g. a list of SqlField pairs
clos1 xs
= foldl f xs (nub (map (\(_,x,_)->x) xs) `isc` nub (map (\(_,_,x)->x) xs))
where
f q x = q `uni` [( r++r' , a, b') | (r ,a, b) <- q, b == x, (r', a', b') <- q, a' == x]
|
guoy34/ampersand
|
src/Database/Design/Ampersand/FSpec/Plug.hs
|
gpl-3.0
| 16,084 | 7 | 21 | 4,623 | 2,391 | 1,326 | 1,065 | 123 | 5 |
{-# LANGUAGE FlexibleInstances, OverloadedStrings #-}
module Quran.QLines (
QLines
, qLines
, qLines_
, unQLines
, fromQLines
, readQtfFiles
, readQpfFiles
, defaultBrkToText
, qtfRngToQlf
, GrpStyle (RefRangesOnly, BigBreaks, AllBreaks, ByVerse)
, applyGrpStyleToRng
, splitRngByVerses
, splitRngByPars
) where
import Prelude hiding (readFile, lines, unlines)
import qualified System.IO as S (readFile)
import qualified Data.List as S (lines)
import qualified Data.Text as T
import Data.Text (Text, lines, pack)
import Data.Text.IO
import Data.Maybe (listToMaybe)
import Quran.Internal.QRefRng
import Quran.QRefRng
-- | Type for holding QTF and QPF files.
-- It has 6236 lines (could be: originals, translations, commentaries, break styles, etc.)
newtype QLines a = MkQLines [a]
deriving (Show)
qLines :: Monad m => [a] -> m (QLines a)
qLines ls
| length ls /= 6236 = fail $ "The QTF format expects 6236 lines."
| otherwise = return $ MkQLines ls
qLines_ :: [a] -> QLines a
qLines_ ls = MkQLines ls
unQLines :: QLines a -> [a]
unQLines (MkQLines ts) = ts
class QLinesSelector s where
fromQLines :: QLines a -> s -> [a]
instance QLinesSelector Int where fromQLines (MkQLines ls) n = [ls !! (n - 1)]
instance QLinesSelector [Int] where fromQLines (MkQLines ls) ns = map ((!!) ls . subtract 1) ns
instance QLinesSelector QRefRng where fromQLines qls rr = fromQLines qls $ qRefRngToLineNrs rr
-- | Read QTF files
readQtfFiles :: [FilePath] -> IO [QLines Text] -- TODO: handle IO and parse exceptions
readQtfFiles files = mapM readFile files >>= mapM (qLines . lines) >>= return
-- | Read QPF files
readQpfFiles :: [FilePath] -> IO [QLines Int]
readQpfFiles files = mapM S.readFile files >>= mapM rawQpfToQLines >>= return
where
rawQpfToQLines :: Monad m => String -> m (QLines Int)
rawQpfToQLines = qLines . (\qpf -> map (\s -> readNumOrZero [head s]) (S.lines qpf))
readNumOrZero :: String -> Int
readNumOrZero s = case (fmap fst . listToMaybe . reads) s of Just i -> i; Nothing -> 0
-- | The default mapping from QPF numbers to QLF style breaks
defaultBrkToText :: Int -> Text
defaultBrkToText brk = case brk of 0 -> " "
1 -> "\\br "
2 -> "\\bbr "
_ -> "\\bbr "
-- | Enrich a QTF text with refs and QPF-specified paragraphing into a QLF (LaTeX'ish) output
qtfRngToQlf :: QLines Int -> (Int -> Text) -> QLines Text -> QRefRng -> Text
qtfRngToQlf qpf brkToText qtf refRng = T.concat $
weave3 (map (\rng -> pack $ "\\nr{" ++ show rng ++ "} ") $ splitRngByVerses refRng)
(fromQLines qtf refRng)
(map (brkToText . head . fromQLines qpf) $ (init . splitRngByVerses) refRng)
where
weave3 :: [a] -> [a] -> [a] -> [a]
weave3 [] _ _ = [] -- ys and zs are woven into xs, so empty
weave3 xs [] _ = xs -- zs needs ys to be woven into xs
weave3 (x:xs) (y:ys) [] = x:y : weave3 xs ys []
weave3 (x:xs) (y:ys) (z:zs) = x:y:z : weave3 xs ys zs
-- | Intra-text grouping style (what to align in case of side-by-side)
data GrpStyle = RefRangesOnly -- only group on ranges (least grouping)
| BigBreaks -- on ranges and big breaks
| AllBreaks -- on ranges and all breaks
| ByVerse -- on individual verses (most grouping)
deriving Show
-- | Break up a `QRefRng` into `[QRefRng]` based on a grouping style and QPF
applyGrpStyleToRng :: GrpStyle -> QLines Int -> QRefRng -> [QRefRng]
applyGrpStyleToRng grpStyle qpf refRng = case grpStyle of
RefRangesOnly -> [refRng]
BigBreaks -> splitRngByPars (1 <) qpf refRng
AllBreaks -> splitRngByPars (0 <) qpf refRng
ByVerse -> splitRngByVerses refRng
-- | Flattens a reference to a verse-range-free list of reference
splitRngByVerses :: QRefRng -> [QRefRng]
splitRngByVerses rr = map (\v -> MkQRefRng (chapter rr, (v, v))) $ verseList rr
-- | Splits a reference on a QPF paragraphing style using a `criterion`
splitRngByPars :: (Int -> Bool) -> QLines Int -> QRefRng -> [QRefRng]
splitRngByPars criterion qpf refRng =
let markedPairs = zip (map criterion $ fromQLines qpf refRng)
(map (\x -> [x, x + 1]) $ verseList refRng)
in map (\vs -> qRefRng_ (chapter refRng) vs)
(listToTuples ([fstVerse refRng] ++ (concat . (map snd) . (filter fst) . init $ markedPairs) ++ [lstVerse refRng]))
where
listToTuples :: [a] -> [(a, a)]
listToTuples [] = []
listToTuples [_] = error "Odd lists should never occur"
listToTuples (x:y:rs) = (x, y) : listToTuples rs
|
oqc/hqt
|
Quran/QLines.hs
|
gpl-3.0
| 4,751 | 0 | 19 | 1,216 | 1,455 | 780 | 675 | 88 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AdSense.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AdSense.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | Output only. State of a site.
data SiteState
= StateUnspecified
-- ^ @STATE_UNSPECIFIED@
-- State unspecified.
| RequiresReview
-- ^ @REQUIRES_REVIEW@
-- The site hasn\'t been checked yet.
| GettingReady
-- ^ @GETTING_READY@
-- Running some checks on the site. This usually takes a few days, but in
-- some cases can take up to 2 weeks.
| Ready
-- ^ @READY@
-- The site is ready to show ads.
| NeedsAttention
-- ^ @NEEDS_ATTENTION@
-- Publisher needs to fix some issues before the site is ready to show ads.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SiteState
instance FromHttpApiData SiteState where
parseQueryParam = \case
"STATE_UNSPECIFIED" -> Right StateUnspecified
"REQUIRES_REVIEW" -> Right RequiresReview
"GETTING_READY" -> Right GettingReady
"READY" -> Right Ready
"NEEDS_ATTENTION" -> Right NeedsAttention
x -> Left ("Unable to parse SiteState from: " <> x)
instance ToHttpApiData SiteState where
toQueryParam = \case
StateUnspecified -> "STATE_UNSPECIFIED"
RequiresReview -> "REQUIRES_REVIEW"
GettingReady -> "GETTING_READY"
Ready -> "READY"
NeedsAttention -> "NEEDS_ATTENTION"
instance FromJSON SiteState where
parseJSON = parseJSONText "SiteState"
instance ToJSON SiteState where
toJSON = toJSONText
-- | Timezone in which to generate the report. If unspecified, this defaults
-- to the account timezone. For more information, see [changing the time
-- zone of your
-- reports](https:\/\/support.google.com\/adsense\/answer\/9830725).
data AccountsReportsSavedGenerateReportingTimeZone
= ReportingTimeZoneUnspecified
-- ^ @REPORTING_TIME_ZONE_UNSPECIFIED@
-- Unspecified timezone.
| AccountTimeZone
-- ^ @ACCOUNT_TIME_ZONE@
-- Use the account timezone in the report.
| GoogleTimeZone
-- ^ @GOOGLE_TIME_ZONE@
-- Use the Google timezone in the report (America\/Los_Angeles).
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsSavedGenerateReportingTimeZone
instance FromHttpApiData AccountsReportsSavedGenerateReportingTimeZone where
parseQueryParam = \case
"REPORTING_TIME_ZONE_UNSPECIFIED" -> Right ReportingTimeZoneUnspecified
"ACCOUNT_TIME_ZONE" -> Right AccountTimeZone
"GOOGLE_TIME_ZONE" -> Right GoogleTimeZone
x -> Left ("Unable to parse AccountsReportsSavedGenerateReportingTimeZone from: " <> x)
instance ToHttpApiData AccountsReportsSavedGenerateReportingTimeZone where
toQueryParam = \case
ReportingTimeZoneUnspecified -> "REPORTING_TIME_ZONE_UNSPECIFIED"
AccountTimeZone -> "ACCOUNT_TIME_ZONE"
GoogleTimeZone -> "GOOGLE_TIME_ZONE"
instance FromJSON AccountsReportsSavedGenerateReportingTimeZone where
parseJSON = parseJSONText "AccountsReportsSavedGenerateReportingTimeZone"
instance ToJSON AccountsReportsSavedGenerateReportingTimeZone where
toJSON = toJSONText
-- | Date range of the report, if unset the range will be considered CUSTOM.
data AccountsReportsSavedGenerateCSVDateRange
= ReportingDateRangeUnspecified
-- ^ @REPORTING_DATE_RANGE_UNSPECIFIED@
-- Unspecified date range.
| Custom
-- ^ @CUSTOM@
-- A custom date range specified using the \`start_date\` and \`end_date\`
-- fields. This is the default if no ReportingDateRange is provided.
| Today
-- ^ @TODAY@
-- Current day.
| Yesterday
-- ^ @YESTERDAY@
-- Yesterday.
| MonthToDate
-- ^ @MONTH_TO_DATE@
-- From the start of the current month to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-03-01,
-- 2020-03-12].
| YearToDate
-- ^ @YEAR_TO_DATE@
-- From the start of the current year to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-01-01,
-- 2020-03-12].
| Last7Days
-- ^ @LAST_7_DAYS@
-- Last 7 days, excluding current day.
| Last30Days
-- ^ @LAST_30_DAYS@
-- Last 30 days, excluding current day.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsSavedGenerateCSVDateRange
instance FromHttpApiData AccountsReportsSavedGenerateCSVDateRange where
parseQueryParam = \case
"REPORTING_DATE_RANGE_UNSPECIFIED" -> Right ReportingDateRangeUnspecified
"CUSTOM" -> Right Custom
"TODAY" -> Right Today
"YESTERDAY" -> Right Yesterday
"MONTH_TO_DATE" -> Right MonthToDate
"YEAR_TO_DATE" -> Right YearToDate
"LAST_7_DAYS" -> Right Last7Days
"LAST_30_DAYS" -> Right Last30Days
x -> Left ("Unable to parse AccountsReportsSavedGenerateCSVDateRange from: " <> x)
instance ToHttpApiData AccountsReportsSavedGenerateCSVDateRange where
toQueryParam = \case
ReportingDateRangeUnspecified -> "REPORTING_DATE_RANGE_UNSPECIFIED"
Custom -> "CUSTOM"
Today -> "TODAY"
Yesterday -> "YESTERDAY"
MonthToDate -> "MONTH_TO_DATE"
YearToDate -> "YEAR_TO_DATE"
Last7Days -> "LAST_7_DAYS"
Last30Days -> "LAST_30_DAYS"
instance FromJSON AccountsReportsSavedGenerateCSVDateRange where
parseJSON = parseJSONText "AccountsReportsSavedGenerateCSVDateRange"
instance ToJSON AccountsReportsSavedGenerateCSVDateRange where
toJSON = toJSONText
-- | Timezone in which to generate the report. If unspecified, this defaults
-- to the account timezone. For more information, see [changing the time
-- zone of your
-- reports](https:\/\/support.google.com\/adsense\/answer\/9830725).
data AccountsReportsGenerateCSVReportingTimeZone
= ARGCRTZReportingTimeZoneUnspecified
-- ^ @REPORTING_TIME_ZONE_UNSPECIFIED@
-- Unspecified timezone.
| ARGCRTZAccountTimeZone
-- ^ @ACCOUNT_TIME_ZONE@
-- Use the account timezone in the report.
| ARGCRTZGoogleTimeZone
-- ^ @GOOGLE_TIME_ZONE@
-- Use the Google timezone in the report (America\/Los_Angeles).
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateCSVReportingTimeZone
instance FromHttpApiData AccountsReportsGenerateCSVReportingTimeZone where
parseQueryParam = \case
"REPORTING_TIME_ZONE_UNSPECIFIED" -> Right ARGCRTZReportingTimeZoneUnspecified
"ACCOUNT_TIME_ZONE" -> Right ARGCRTZAccountTimeZone
"GOOGLE_TIME_ZONE" -> Right ARGCRTZGoogleTimeZone
x -> Left ("Unable to parse AccountsReportsGenerateCSVReportingTimeZone from: " <> x)
instance ToHttpApiData AccountsReportsGenerateCSVReportingTimeZone where
toQueryParam = \case
ARGCRTZReportingTimeZoneUnspecified -> "REPORTING_TIME_ZONE_UNSPECIFIED"
ARGCRTZAccountTimeZone -> "ACCOUNT_TIME_ZONE"
ARGCRTZGoogleTimeZone -> "GOOGLE_TIME_ZONE"
instance FromJSON AccountsReportsGenerateCSVReportingTimeZone where
parseJSON = parseJSONText "AccountsReportsGenerateCSVReportingTimeZone"
instance ToJSON AccountsReportsGenerateCSVReportingTimeZone where
toJSON = toJSONText
-- | Dimensions to base the report on.
data AccountsReportsGenerateDimensions
= ARGDDimensionUnspecified
-- ^ @DIMENSION_UNSPECIFIED@
-- Unspecified dimension.
| ARGDDate
-- ^ @DATE@
-- Date dimension in YYYY-MM-DD format (e.g. \"2010-02-10\").
| ARGDWeek
-- ^ @WEEK@
-- Week dimension in YYYY-MM-DD format, representing the first day of each
-- week (e.g. \"2010-02-08\"). The first day of the week is determined by
-- the language_code specified in a report generation request (so e.g. this
-- would be a Monday for \"en-GB\" or \"es\", but a Sunday for \"en\" or
-- \"fr-CA\").
| ARGDMonth
-- ^ @MONTH@
-- Month dimension in YYYY-MM format (e.g. \"2010-02\").
| ARGDAccountName
-- ^ @ACCOUNT_NAME@
-- Account name. The members of this dimension match the values from
-- Account.display_name.
| ARGDAdClientId
-- ^ @AD_CLIENT_ID@
-- Unique ID of an ad client. The members of this dimension match the
-- values from AdClient.reporting_dimension_id.
| ARGDProductName
-- ^ @PRODUCT_NAME@
-- Localized product name (e.g. \"AdSense for Content\", \"AdSense for
-- Search\").
| ARGDProductCode
-- ^ @PRODUCT_CODE@
-- Product code (e.g. \"AFC\", \"AFS\"). The members of this dimension
-- match the values from AdClient.product_code.
| ARGDAdUnitName
-- ^ @AD_UNIT_NAME@
-- Ad unit name (within which an ad was served). The members of this
-- dimension match the values from AdUnit.display_name.
| ARGDAdUnitId
-- ^ @AD_UNIT_ID@
-- Unique ID of an ad unit (within which an ad was served). The members of
-- this dimension match the values from AdUnit.reporting_dimension_id.
| ARGDAdUnitSizeName
-- ^ @AD_UNIT_SIZE_NAME@
-- Localized size of an ad unit (e.g. \"728x90\", \"Responsive\").
| ARGDAdUnitSizeCode
-- ^ @AD_UNIT_SIZE_CODE@
-- The size code of an ad unit (e.g. \"728x90\", \"responsive\").
| ARGDCustomChannelName
-- ^ @CUSTOM_CHANNEL_NAME@
-- Custom channel name. The members of this dimension match the values from
-- CustomChannel.display_name.
| ARGDCustomChannelId
-- ^ @CUSTOM_CHANNEL_ID@
-- Unique ID of a custom channel. The members of this dimension match the
-- values from CustomChannel.reporting_dimension_id.
| ARGDOwnedSiteDomainName
-- ^ @OWNED_SITE_DOMAIN_NAME@
-- Domain name of a verified site (e.g. \"example.com\"). The members of
-- this dimension match the values from Site.domain.
| ARGDOwnedSiteId
-- ^ @OWNED_SITE_ID@
-- Unique ID of a verified site. The members of this dimension match the
-- values from Site.reporting_dimension_id.
| ARGDURLChannelName
-- ^ @URL_CHANNEL_NAME@
-- Name of a URL channel. The members of this dimension match the values
-- from UrlChannel.uri_pattern.
| ARGDURLChannelId
-- ^ @URL_CHANNEL_ID@
-- Unique ID of a URL channel. The members of this dimension match the
-- values from UrlChannel.reporting_dimension_id.
| ARGDBuyerNetworkName
-- ^ @BUYER_NETWORK_NAME@
-- Name of an ad network that returned the winning ads for an ad request
-- (e.g. \"Google AdWords\"). Note that unlike other \"NAME\" dimensions,
-- the members of this dimensions are not localized.
| ARGDBuyerNetworkId
-- ^ @BUYER_NETWORK_ID@
-- Unique (opaque) ID of an ad network that returned the winning ads for an
-- ad request.
| ARGDBidTypeName
-- ^ @BID_TYPE_NAME@
-- Localized bid type name (e.g. \"CPC bids\", \"CPM bids\") for a served
-- ad.
| ARGDBidTypeCode
-- ^ @BID_TYPE_CODE@
-- Type of a bid (e.g. \"cpc\", \"cpm\") for a served ad.
| ARGDCreativeSizeName
-- ^ @CREATIVE_SIZE_NAME@
-- Localized creative size name (e.g. \"728x90\", \"Dynamic\") of a served
-- ad.
| ARGDCreativeSizeCode
-- ^ @CREATIVE_SIZE_CODE@
-- Creative size code (e.g. \"728x90\", \"dynamic\") of a served ad.
| ARGDDomainName
-- ^ @DOMAIN_NAME@
-- Localized name of a host on which an ad was served, after IDNA decoding
-- (e.g. \"www.google.com\", \"Web caches and other\", \"bücher.example\").
| ARGDDomainCode
-- ^ @DOMAIN_CODE@
-- Name of a host on which an ad was served (e.g. \"www.google.com\",
-- \"webcaches\", \"xn--bcher-kva.example\").
| ARGDCountryName
-- ^ @COUNTRY_NAME@
-- Localized region name of a user viewing an ad (e.g. \"United States\",
-- \"France\").
| ARGDCountryCode
-- ^ @COUNTRY_CODE@
-- CLDR region code of a user viewing an ad (e.g. \"US\", \"FR\").
| ARGDPlatformTypeName
-- ^ @PLATFORM_TYPE_NAME@
-- Localized platform type name (e.g. \"High-end mobile devices\",
-- \"Desktop\").
| ARGDPlatformTypeCode
-- ^ @PLATFORM_TYPE_CODE@
-- Platform type code (e.g. \"HighEndMobile\", \"Desktop\").
| ARGDTargetingTypeName
-- ^ @TARGETING_TYPE_NAME@
-- Localized targeting type name (e.g. \"Contextual\", \"Personalized\",
-- \"Run of Network\").
| ARGDTargetingTypeCode
-- ^ @TARGETING_TYPE_CODE@
-- Targeting type code (e.g. \"Keyword\", \"UserInterest\",
-- \"RunOfNetwork\").
| ARGDContentPlatformName
-- ^ @CONTENT_PLATFORM_NAME@
-- Localized content platform name an ad request was made from (e.g.
-- \"AMP\", \"Web\").
| ARGDContentPlatformCode
-- ^ @CONTENT_PLATFORM_CODE@
-- Content platform code an ad request was made from (e.g. \"AMP\",
-- \"HTML\").
| ARGDAdPlacementName
-- ^ @AD_PLACEMENT_NAME@
-- Localized ad placement name (e.g. \"Ad unit\", \"Global settings\",
-- \"Manual\").
| ARGDAdPlacementCode
-- ^ @AD_PLACEMENT_CODE@
-- Ad placement code (e.g. \"AD_UNIT\", \"ca-pub-123456:78910\",
-- \"OTHER\").
| ARGDRequestedAdTypeName
-- ^ @REQUESTED_AD_TYPE_NAME@
-- Localized requested ad type name (e.g. \"Display\", \"Link unit\",
-- \"Other\").
| ARGDRequestedAdTypeCode
-- ^ @REQUESTED_AD_TYPE_CODE@
-- Requested ad type code (e.g. \"IMAGE\", \"RADLINK\", \"OTHER\").
| ARGDServedAdTypeName
-- ^ @SERVED_AD_TYPE_NAME@
-- Localized served ad type name (e.g. \"Display\", \"Link unit\",
-- \"Other\").
| ARGDServedAdTypeCode
-- ^ @SERVED_AD_TYPE_CODE@
-- Served ad type code (e.g. \"IMAGE\", \"RADLINK\", \"OTHER\").
| ARGDAdFormatName
-- ^ @AD_FORMAT_NAME@
-- Localized ad format name indicating the way an ad is shown to the users
-- on your site (e.g. \"In-page\", \"Anchor\", \"Vignette\").
| ARGDAdFormatCode
-- ^ @AD_FORMAT_CODE@
-- Ad format code indicating the way an ad is shown to the users on your
-- site (e.g. \"ON_PAGE\", \"ANCHOR\", \"INTERSTITIAL\").
| ARGDCustomSearchStyleName
-- ^ @CUSTOM_SEARCH_STYLE_NAME@
-- Custom search style name.
| ARGDCustomSearchStyleId
-- ^ @CUSTOM_SEARCH_STYLE_ID@
-- Custom search style id.
| ARGDDomainRegistrant
-- ^ @DOMAIN_REGISTRANT@
-- Domain registrants.
| ARGDWebsearchQueryString
-- ^ @WEBSEARCH_QUERY_STRING@
-- Query strings for web searches.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateDimensions
instance FromHttpApiData AccountsReportsGenerateDimensions where
parseQueryParam = \case
"DIMENSION_UNSPECIFIED" -> Right ARGDDimensionUnspecified
"DATE" -> Right ARGDDate
"WEEK" -> Right ARGDWeek
"MONTH" -> Right ARGDMonth
"ACCOUNT_NAME" -> Right ARGDAccountName
"AD_CLIENT_ID" -> Right ARGDAdClientId
"PRODUCT_NAME" -> Right ARGDProductName
"PRODUCT_CODE" -> Right ARGDProductCode
"AD_UNIT_NAME" -> Right ARGDAdUnitName
"AD_UNIT_ID" -> Right ARGDAdUnitId
"AD_UNIT_SIZE_NAME" -> Right ARGDAdUnitSizeName
"AD_UNIT_SIZE_CODE" -> Right ARGDAdUnitSizeCode
"CUSTOM_CHANNEL_NAME" -> Right ARGDCustomChannelName
"CUSTOM_CHANNEL_ID" -> Right ARGDCustomChannelId
"OWNED_SITE_DOMAIN_NAME" -> Right ARGDOwnedSiteDomainName
"OWNED_SITE_ID" -> Right ARGDOwnedSiteId
"URL_CHANNEL_NAME" -> Right ARGDURLChannelName
"URL_CHANNEL_ID" -> Right ARGDURLChannelId
"BUYER_NETWORK_NAME" -> Right ARGDBuyerNetworkName
"BUYER_NETWORK_ID" -> Right ARGDBuyerNetworkId
"BID_TYPE_NAME" -> Right ARGDBidTypeName
"BID_TYPE_CODE" -> Right ARGDBidTypeCode
"CREATIVE_SIZE_NAME" -> Right ARGDCreativeSizeName
"CREATIVE_SIZE_CODE" -> Right ARGDCreativeSizeCode
"DOMAIN_NAME" -> Right ARGDDomainName
"DOMAIN_CODE" -> Right ARGDDomainCode
"COUNTRY_NAME" -> Right ARGDCountryName
"COUNTRY_CODE" -> Right ARGDCountryCode
"PLATFORM_TYPE_NAME" -> Right ARGDPlatformTypeName
"PLATFORM_TYPE_CODE" -> Right ARGDPlatformTypeCode
"TARGETING_TYPE_NAME" -> Right ARGDTargetingTypeName
"TARGETING_TYPE_CODE" -> Right ARGDTargetingTypeCode
"CONTENT_PLATFORM_NAME" -> Right ARGDContentPlatformName
"CONTENT_PLATFORM_CODE" -> Right ARGDContentPlatformCode
"AD_PLACEMENT_NAME" -> Right ARGDAdPlacementName
"AD_PLACEMENT_CODE" -> Right ARGDAdPlacementCode
"REQUESTED_AD_TYPE_NAME" -> Right ARGDRequestedAdTypeName
"REQUESTED_AD_TYPE_CODE" -> Right ARGDRequestedAdTypeCode
"SERVED_AD_TYPE_NAME" -> Right ARGDServedAdTypeName
"SERVED_AD_TYPE_CODE" -> Right ARGDServedAdTypeCode
"AD_FORMAT_NAME" -> Right ARGDAdFormatName
"AD_FORMAT_CODE" -> Right ARGDAdFormatCode
"CUSTOM_SEARCH_STYLE_NAME" -> Right ARGDCustomSearchStyleName
"CUSTOM_SEARCH_STYLE_ID" -> Right ARGDCustomSearchStyleId
"DOMAIN_REGISTRANT" -> Right ARGDDomainRegistrant
"WEBSEARCH_QUERY_STRING" -> Right ARGDWebsearchQueryString
x -> Left ("Unable to parse AccountsReportsGenerateDimensions from: " <> x)
instance ToHttpApiData AccountsReportsGenerateDimensions where
toQueryParam = \case
ARGDDimensionUnspecified -> "DIMENSION_UNSPECIFIED"
ARGDDate -> "DATE"
ARGDWeek -> "WEEK"
ARGDMonth -> "MONTH"
ARGDAccountName -> "ACCOUNT_NAME"
ARGDAdClientId -> "AD_CLIENT_ID"
ARGDProductName -> "PRODUCT_NAME"
ARGDProductCode -> "PRODUCT_CODE"
ARGDAdUnitName -> "AD_UNIT_NAME"
ARGDAdUnitId -> "AD_UNIT_ID"
ARGDAdUnitSizeName -> "AD_UNIT_SIZE_NAME"
ARGDAdUnitSizeCode -> "AD_UNIT_SIZE_CODE"
ARGDCustomChannelName -> "CUSTOM_CHANNEL_NAME"
ARGDCustomChannelId -> "CUSTOM_CHANNEL_ID"
ARGDOwnedSiteDomainName -> "OWNED_SITE_DOMAIN_NAME"
ARGDOwnedSiteId -> "OWNED_SITE_ID"
ARGDURLChannelName -> "URL_CHANNEL_NAME"
ARGDURLChannelId -> "URL_CHANNEL_ID"
ARGDBuyerNetworkName -> "BUYER_NETWORK_NAME"
ARGDBuyerNetworkId -> "BUYER_NETWORK_ID"
ARGDBidTypeName -> "BID_TYPE_NAME"
ARGDBidTypeCode -> "BID_TYPE_CODE"
ARGDCreativeSizeName -> "CREATIVE_SIZE_NAME"
ARGDCreativeSizeCode -> "CREATIVE_SIZE_CODE"
ARGDDomainName -> "DOMAIN_NAME"
ARGDDomainCode -> "DOMAIN_CODE"
ARGDCountryName -> "COUNTRY_NAME"
ARGDCountryCode -> "COUNTRY_CODE"
ARGDPlatformTypeName -> "PLATFORM_TYPE_NAME"
ARGDPlatformTypeCode -> "PLATFORM_TYPE_CODE"
ARGDTargetingTypeName -> "TARGETING_TYPE_NAME"
ARGDTargetingTypeCode -> "TARGETING_TYPE_CODE"
ARGDContentPlatformName -> "CONTENT_PLATFORM_NAME"
ARGDContentPlatformCode -> "CONTENT_PLATFORM_CODE"
ARGDAdPlacementName -> "AD_PLACEMENT_NAME"
ARGDAdPlacementCode -> "AD_PLACEMENT_CODE"
ARGDRequestedAdTypeName -> "REQUESTED_AD_TYPE_NAME"
ARGDRequestedAdTypeCode -> "REQUESTED_AD_TYPE_CODE"
ARGDServedAdTypeName -> "SERVED_AD_TYPE_NAME"
ARGDServedAdTypeCode -> "SERVED_AD_TYPE_CODE"
ARGDAdFormatName -> "AD_FORMAT_NAME"
ARGDAdFormatCode -> "AD_FORMAT_CODE"
ARGDCustomSearchStyleName -> "CUSTOM_SEARCH_STYLE_NAME"
ARGDCustomSearchStyleId -> "CUSTOM_SEARCH_STYLE_ID"
ARGDDomainRegistrant -> "DOMAIN_REGISTRANT"
ARGDWebsearchQueryString -> "WEBSEARCH_QUERY_STRING"
instance FromJSON AccountsReportsGenerateDimensions where
parseJSON = parseJSONText "AccountsReportsGenerateDimensions"
instance ToJSON AccountsReportsGenerateDimensions where
toJSON = toJSONText
-- | Required. Type of the header.
data HeaderType
= HeaderTypeUnspecified
-- ^ @HEADER_TYPE_UNSPECIFIED@
-- Unspecified header.
| Dimension
-- ^ @DIMENSION@
-- Dimension header type.
| MetricTally
-- ^ @METRIC_TALLY@
-- Tally header type.
| MetricRatio
-- ^ @METRIC_RATIO@
-- Ratio header type.
| MetricCurrency
-- ^ @METRIC_CURRENCY@
-- Currency header type.
| MetricMilliseconds
-- ^ @METRIC_MILLISECONDS@
-- Milliseconds header type.
| MetricDecimal
-- ^ @METRIC_DECIMAL@
-- Decimal header type.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable HeaderType
instance FromHttpApiData HeaderType where
parseQueryParam = \case
"HEADER_TYPE_UNSPECIFIED" -> Right HeaderTypeUnspecified
"DIMENSION" -> Right Dimension
"METRIC_TALLY" -> Right MetricTally
"METRIC_RATIO" -> Right MetricRatio
"METRIC_CURRENCY" -> Right MetricCurrency
"METRIC_MILLISECONDS" -> Right MetricMilliseconds
"METRIC_DECIMAL" -> Right MetricDecimal
x -> Left ("Unable to parse HeaderType from: " <> x)
instance ToHttpApiData HeaderType where
toQueryParam = \case
HeaderTypeUnspecified -> "HEADER_TYPE_UNSPECIFIED"
Dimension -> "DIMENSION"
MetricTally -> "METRIC_TALLY"
MetricRatio -> "METRIC_RATIO"
MetricCurrency -> "METRIC_CURRENCY"
MetricMilliseconds -> "METRIC_MILLISECONDS"
MetricDecimal -> "METRIC_DECIMAL"
instance FromJSON HeaderType where
parseJSON = parseJSONText "HeaderType"
instance ToJSON HeaderType where
toJSON = toJSONText
-- | Timezone in which to generate the report. If unspecified, this defaults
-- to the account timezone. For more information, see [changing the time
-- zone of your
-- reports](https:\/\/support.google.com\/adsense\/answer\/9830725).
data AccountsReportsSavedGenerateCSVReportingTimeZone
= ARSGCRTZReportingTimeZoneUnspecified
-- ^ @REPORTING_TIME_ZONE_UNSPECIFIED@
-- Unspecified timezone.
| ARSGCRTZAccountTimeZone
-- ^ @ACCOUNT_TIME_ZONE@
-- Use the account timezone in the report.
| ARSGCRTZGoogleTimeZone
-- ^ @GOOGLE_TIME_ZONE@
-- Use the Google timezone in the report (America\/Los_Angeles).
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsSavedGenerateCSVReportingTimeZone
instance FromHttpApiData AccountsReportsSavedGenerateCSVReportingTimeZone where
parseQueryParam = \case
"REPORTING_TIME_ZONE_UNSPECIFIED" -> Right ARSGCRTZReportingTimeZoneUnspecified
"ACCOUNT_TIME_ZONE" -> Right ARSGCRTZAccountTimeZone
"GOOGLE_TIME_ZONE" -> Right ARSGCRTZGoogleTimeZone
x -> Left ("Unable to parse AccountsReportsSavedGenerateCSVReportingTimeZone from: " <> x)
instance ToHttpApiData AccountsReportsSavedGenerateCSVReportingTimeZone where
toQueryParam = \case
ARSGCRTZReportingTimeZoneUnspecified -> "REPORTING_TIME_ZONE_UNSPECIFIED"
ARSGCRTZAccountTimeZone -> "ACCOUNT_TIME_ZONE"
ARSGCRTZGoogleTimeZone -> "GOOGLE_TIME_ZONE"
instance FromJSON AccountsReportsSavedGenerateCSVReportingTimeZone where
parseJSON = parseJSONText "AccountsReportsSavedGenerateCSVReportingTimeZone"
instance ToJSON AccountsReportsSavedGenerateCSVReportingTimeZone where
toJSON = toJSONText
-- | Type of the ad unit.
data ContentAdsSettingsType
= TypeUnspecified
-- ^ @TYPE_UNSPECIFIED@
-- Unspecified ad unit type.
| Display
-- ^ @DISPLAY@
-- Display ad unit.
| Feed
-- ^ @FEED@
-- In-feed ad unit.
| Article
-- ^ @ARTICLE@
-- In-article ad unit.
| MatchedContent
-- ^ @MATCHED_CONTENT@
-- Matched content unit.
| Link
-- ^ @LINK@
-- Link ad unit. Note that link ad units have now been retired, see
-- https:\/\/support.google.com\/adsense\/answer\/9987221.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ContentAdsSettingsType
instance FromHttpApiData ContentAdsSettingsType where
parseQueryParam = \case
"TYPE_UNSPECIFIED" -> Right TypeUnspecified
"DISPLAY" -> Right Display
"FEED" -> Right Feed
"ARTICLE" -> Right Article
"MATCHED_CONTENT" -> Right MatchedContent
"LINK" -> Right Link
x -> Left ("Unable to parse ContentAdsSettingsType from: " <> x)
instance ToHttpApiData ContentAdsSettingsType where
toQueryParam = \case
TypeUnspecified -> "TYPE_UNSPECIFIED"
Display -> "DISPLAY"
Feed -> "FEED"
Article -> "ARTICLE"
MatchedContent -> "MATCHED_CONTENT"
Link -> "LINK"
instance FromJSON ContentAdsSettingsType where
parseJSON = parseJSONText "ContentAdsSettingsType"
instance ToJSON ContentAdsSettingsType where
toJSON = toJSONText
-- | Date range of the report, if unset the range will be considered CUSTOM.
data AccountsReportsSavedGenerateDateRange
= ARSGDRReportingDateRangeUnspecified
-- ^ @REPORTING_DATE_RANGE_UNSPECIFIED@
-- Unspecified date range.
| ARSGDRCustom
-- ^ @CUSTOM@
-- A custom date range specified using the \`start_date\` and \`end_date\`
-- fields. This is the default if no ReportingDateRange is provided.
| ARSGDRToday
-- ^ @TODAY@
-- Current day.
| ARSGDRYesterday
-- ^ @YESTERDAY@
-- Yesterday.
| ARSGDRMonthToDate
-- ^ @MONTH_TO_DATE@
-- From the start of the current month to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-03-01,
-- 2020-03-12].
| ARSGDRYearToDate
-- ^ @YEAR_TO_DATE@
-- From the start of the current year to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-01-01,
-- 2020-03-12].
| ARSGDRLast7Days
-- ^ @LAST_7_DAYS@
-- Last 7 days, excluding current day.
| ARSGDRLast30Days
-- ^ @LAST_30_DAYS@
-- Last 30 days, excluding current day.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsSavedGenerateDateRange
instance FromHttpApiData AccountsReportsSavedGenerateDateRange where
parseQueryParam = \case
"REPORTING_DATE_RANGE_UNSPECIFIED" -> Right ARSGDRReportingDateRangeUnspecified
"CUSTOM" -> Right ARSGDRCustom
"TODAY" -> Right ARSGDRToday
"YESTERDAY" -> Right ARSGDRYesterday
"MONTH_TO_DATE" -> Right ARSGDRMonthToDate
"YEAR_TO_DATE" -> Right ARSGDRYearToDate
"LAST_7_DAYS" -> Right ARSGDRLast7Days
"LAST_30_DAYS" -> Right ARSGDRLast30Days
x -> Left ("Unable to parse AccountsReportsSavedGenerateDateRange from: " <> x)
instance ToHttpApiData AccountsReportsSavedGenerateDateRange where
toQueryParam = \case
ARSGDRReportingDateRangeUnspecified -> "REPORTING_DATE_RANGE_UNSPECIFIED"
ARSGDRCustom -> "CUSTOM"
ARSGDRToday -> "TODAY"
ARSGDRYesterday -> "YESTERDAY"
ARSGDRMonthToDate -> "MONTH_TO_DATE"
ARSGDRYearToDate -> "YEAR_TO_DATE"
ARSGDRLast7Days -> "LAST_7_DAYS"
ARSGDRLast30Days -> "LAST_30_DAYS"
instance FromJSON AccountsReportsSavedGenerateDateRange where
parseJSON = parseJSONText "AccountsReportsSavedGenerateDateRange"
instance ToJSON AccountsReportsSavedGenerateDateRange where
toJSON = toJSONText
-- | Required. Reporting metrics.
data AccountsReportsGenerateCSVMetrics
= MetricUnspecified
-- ^ @METRIC_UNSPECIFIED@
-- Unspecified metric.
| PageViews
-- ^ @PAGE_VIEWS@
-- Number of page views.
| AdRequests
-- ^ @AD_REQUESTS@
-- Number of ad units that requested ads (for content ads) or search
-- queries (for search ads). An ad request may result in zero, one, or
-- multiple individual ad impressions depending on the size of the ad unit
-- and whether any ads were available.
| MatchedAdRequests
-- ^ @MATCHED_AD_REQUESTS@
-- Requests that returned at least one ad.
| TotalImpressions
-- ^ @TOTAL_IMPRESSIONS@
-- Impressions. An impression is counted for each ad request where at least
-- one ad has been downloaded to the user’s device and has begun to load.
-- It is the number of ad units (for content ads) or search queries (for
-- search ads) that showed ads.
| Impressions
-- ^ @IMPRESSIONS@
-- Impressions. An impression is counted for each ad request where at least
-- one ad has been downloaded to the user’s device and has begun to load.
-- It is the number of ad units (for content ads) or search queries (for
-- search ads) that showed ads.
| IndividualAdImpressions
-- ^ @INDIVIDUAL_AD_IMPRESSIONS@
-- Ads shown. Different ad formats will display varying numbers of ads. For
-- example, a vertical banner may consist of 2 or more ads. Also, the
-- number of ads in an ad unit may vary depending on whether the ad unit is
-- displaying standard text ads, expanded text ads or image ads.
| Clicks
-- ^ @CLICKS@
-- Number of times a user clicked on a standard content ad.
| PageViewsSpamRatio
-- ^ @PAGE_VIEWS_SPAM_RATIO@
-- Fraction of page views considered to be spam. Only available to premium
-- accounts.
| AdRequestsSpamRatio
-- ^ @AD_REQUESTS_SPAM_RATIO@
-- Fraction of ad requests considered to be spam. Only available to premium
-- accounts.
| MatchedAdRequestsSpamRatio
-- ^ @MATCHED_AD_REQUESTS_SPAM_RATIO@
-- Fraction of ad requests that returned ads considered to be spam. Only
-- available to premium accounts.
| ImpressionsSpamRatio
-- ^ @IMPRESSIONS_SPAM_RATIO@
-- Fraction of impressions considered to be spam. Only available to premium
-- accounts.
| IndividualAdImpressionsSpamRatio
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO@
-- Fraction of ad impressions considered to be spam. Only available to
-- premium accounts.
| ClicksSpamRatio
-- ^ @CLICKS_SPAM_RATIO@
-- Fraction of clicks considered to be spam. Only available to premium
-- accounts.
| AdRequestsCoverage
-- ^ @AD_REQUESTS_COVERAGE@
-- Ratio of requested ad units or queries to the number returned to the
-- site.
| PageViewsCtr
-- ^ @PAGE_VIEWS_CTR@
-- Ratio of individual page views that resulted in a click.
| AdRequestsCtr
-- ^ @AD_REQUESTS_CTR@
-- Ratio of ad requests that resulted in a click.
| MatchedAdRequestsCtr
-- ^ @MATCHED_AD_REQUESTS_CTR@
-- Ratio of clicks to matched requests.
| ImpressionsCtr
-- ^ @IMPRESSIONS_CTR@
-- Ratio of IMPRESSIONS that resulted in a click.
| IndividualAdImpressionsCtr
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_CTR@
-- Ratio of individual ad impressions that resulted in a click.
| ActiveViewMeasurability
-- ^ @ACTIVE_VIEW_MEASURABILITY@
-- Ratio of requests that were measurable for viewability.
| ActiveViewViewability
-- ^ @ACTIVE_VIEW_VIEWABILITY@
-- Ratio of requests that were viewable.
| ActiveViewTime
-- ^ @ACTIVE_VIEW_TIME@
-- Mean time an ad was displayed on screen.
| EstimatedEarnings
-- ^ @ESTIMATED_EARNINGS@
-- Estimated earnings of the publisher. Note that earnings up to yesterday
-- are accurate, more recent earnings are estimated due to the possibility
-- of spam, or exchange rate fluctuations.
| PageViewsRpm
-- ^ @PAGE_VIEWS_RPM@
-- Revenue per thousand page views. This is calculated by dividing the
-- estimated revenue by the number of page views multiplied by 1000.
| AdRequestsRpm
-- ^ @AD_REQUESTS_RPM@
-- Revenue per thousand ad requests. This is calculated by dividing
-- estimated revenue by the number of ad requests multiplied by 1000.
| MatchedAdRequestsRpm
-- ^ @MATCHED_AD_REQUESTS_RPM@
-- Revenue per thousand matched ad requests. This is calculated by dividing
-- estimated revenue by the number of matched ad requests multiplied by
-- 1000.
| ImpressionsRpm
-- ^ @IMPRESSIONS_RPM@
-- Revenue per thousand ad impressions. This is calculated by dividing
-- estimated revenue by the number of ad impressions multiplied by 1000.
| IndividualAdImpressionsRpm
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_RPM@
-- Revenue per thousand individual ad impressions. This is calculated by
-- dividing estimated revenue by the number of individual ad impressions
-- multiplied by 1000.
| CostPerClick
-- ^ @COST_PER_CLICK@
-- Amount the publisher earns each time a user clicks on an ad. CPC is
-- calculated by dividing the estimated revenue by the number of clicks
-- received.
| AdsPerImpression
-- ^ @ADS_PER_IMPRESSION@
-- Number of ad views per impression.
| TotalEarnings
-- ^ @TOTAL_EARNINGS@
-- Total earnings.
| WebsearchResultPages
-- ^ @WEBSEARCH_RESULT_PAGES@
-- Number of results pages.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateCSVMetrics
instance FromHttpApiData AccountsReportsGenerateCSVMetrics where
parseQueryParam = \case
"METRIC_UNSPECIFIED" -> Right MetricUnspecified
"PAGE_VIEWS" -> Right PageViews
"AD_REQUESTS" -> Right AdRequests
"MATCHED_AD_REQUESTS" -> Right MatchedAdRequests
"TOTAL_IMPRESSIONS" -> Right TotalImpressions
"IMPRESSIONS" -> Right Impressions
"INDIVIDUAL_AD_IMPRESSIONS" -> Right IndividualAdImpressions
"CLICKS" -> Right Clicks
"PAGE_VIEWS_SPAM_RATIO" -> Right PageViewsSpamRatio
"AD_REQUESTS_SPAM_RATIO" -> Right AdRequestsSpamRatio
"MATCHED_AD_REQUESTS_SPAM_RATIO" -> Right MatchedAdRequestsSpamRatio
"IMPRESSIONS_SPAM_RATIO" -> Right ImpressionsSpamRatio
"INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO" -> Right IndividualAdImpressionsSpamRatio
"CLICKS_SPAM_RATIO" -> Right ClicksSpamRatio
"AD_REQUESTS_COVERAGE" -> Right AdRequestsCoverage
"PAGE_VIEWS_CTR" -> Right PageViewsCtr
"AD_REQUESTS_CTR" -> Right AdRequestsCtr
"MATCHED_AD_REQUESTS_CTR" -> Right MatchedAdRequestsCtr
"IMPRESSIONS_CTR" -> Right ImpressionsCtr
"INDIVIDUAL_AD_IMPRESSIONS_CTR" -> Right IndividualAdImpressionsCtr
"ACTIVE_VIEW_MEASURABILITY" -> Right ActiveViewMeasurability
"ACTIVE_VIEW_VIEWABILITY" -> Right ActiveViewViewability
"ACTIVE_VIEW_TIME" -> Right ActiveViewTime
"ESTIMATED_EARNINGS" -> Right EstimatedEarnings
"PAGE_VIEWS_RPM" -> Right PageViewsRpm
"AD_REQUESTS_RPM" -> Right AdRequestsRpm
"MATCHED_AD_REQUESTS_RPM" -> Right MatchedAdRequestsRpm
"IMPRESSIONS_RPM" -> Right ImpressionsRpm
"INDIVIDUAL_AD_IMPRESSIONS_RPM" -> Right IndividualAdImpressionsRpm
"COST_PER_CLICK" -> Right CostPerClick
"ADS_PER_IMPRESSION" -> Right AdsPerImpression
"TOTAL_EARNINGS" -> Right TotalEarnings
"WEBSEARCH_RESULT_PAGES" -> Right WebsearchResultPages
x -> Left ("Unable to parse AccountsReportsGenerateCSVMetrics from: " <> x)
instance ToHttpApiData AccountsReportsGenerateCSVMetrics where
toQueryParam = \case
MetricUnspecified -> "METRIC_UNSPECIFIED"
PageViews -> "PAGE_VIEWS"
AdRequests -> "AD_REQUESTS"
MatchedAdRequests -> "MATCHED_AD_REQUESTS"
TotalImpressions -> "TOTAL_IMPRESSIONS"
Impressions -> "IMPRESSIONS"
IndividualAdImpressions -> "INDIVIDUAL_AD_IMPRESSIONS"
Clicks -> "CLICKS"
PageViewsSpamRatio -> "PAGE_VIEWS_SPAM_RATIO"
AdRequestsSpamRatio -> "AD_REQUESTS_SPAM_RATIO"
MatchedAdRequestsSpamRatio -> "MATCHED_AD_REQUESTS_SPAM_RATIO"
ImpressionsSpamRatio -> "IMPRESSIONS_SPAM_RATIO"
IndividualAdImpressionsSpamRatio -> "INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO"
ClicksSpamRatio -> "CLICKS_SPAM_RATIO"
AdRequestsCoverage -> "AD_REQUESTS_COVERAGE"
PageViewsCtr -> "PAGE_VIEWS_CTR"
AdRequestsCtr -> "AD_REQUESTS_CTR"
MatchedAdRequestsCtr -> "MATCHED_AD_REQUESTS_CTR"
ImpressionsCtr -> "IMPRESSIONS_CTR"
IndividualAdImpressionsCtr -> "INDIVIDUAL_AD_IMPRESSIONS_CTR"
ActiveViewMeasurability -> "ACTIVE_VIEW_MEASURABILITY"
ActiveViewViewability -> "ACTIVE_VIEW_VIEWABILITY"
ActiveViewTime -> "ACTIVE_VIEW_TIME"
EstimatedEarnings -> "ESTIMATED_EARNINGS"
PageViewsRpm -> "PAGE_VIEWS_RPM"
AdRequestsRpm -> "AD_REQUESTS_RPM"
MatchedAdRequestsRpm -> "MATCHED_AD_REQUESTS_RPM"
ImpressionsRpm -> "IMPRESSIONS_RPM"
IndividualAdImpressionsRpm -> "INDIVIDUAL_AD_IMPRESSIONS_RPM"
CostPerClick -> "COST_PER_CLICK"
AdsPerImpression -> "ADS_PER_IMPRESSION"
TotalEarnings -> "TOTAL_EARNINGS"
WebsearchResultPages -> "WEBSEARCH_RESULT_PAGES"
instance FromJSON AccountsReportsGenerateCSVMetrics where
parseJSON = parseJSONText "AccountsReportsGenerateCSVMetrics"
instance ToJSON AccountsReportsGenerateCSVMetrics where
toJSON = toJSONText
-- | Date range of the report, if unset the range will be considered CUSTOM.
data AccountsReportsGenerateCSVDateRange
= ARGCDRReportingDateRangeUnspecified
-- ^ @REPORTING_DATE_RANGE_UNSPECIFIED@
-- Unspecified date range.
| ARGCDRCustom
-- ^ @CUSTOM@
-- A custom date range specified using the \`start_date\` and \`end_date\`
-- fields. This is the default if no ReportingDateRange is provided.
| ARGCDRToday
-- ^ @TODAY@
-- Current day.
| ARGCDRYesterday
-- ^ @YESTERDAY@
-- Yesterday.
| ARGCDRMonthToDate
-- ^ @MONTH_TO_DATE@
-- From the start of the current month to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-03-01,
-- 2020-03-12].
| ARGCDRYearToDate
-- ^ @YEAR_TO_DATE@
-- From the start of the current year to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-01-01,
-- 2020-03-12].
| ARGCDRLast7Days
-- ^ @LAST_7_DAYS@
-- Last 7 days, excluding current day.
| ARGCDRLast30Days
-- ^ @LAST_30_DAYS@
-- Last 30 days, excluding current day.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateCSVDateRange
instance FromHttpApiData AccountsReportsGenerateCSVDateRange where
parseQueryParam = \case
"REPORTING_DATE_RANGE_UNSPECIFIED" -> Right ARGCDRReportingDateRangeUnspecified
"CUSTOM" -> Right ARGCDRCustom
"TODAY" -> Right ARGCDRToday
"YESTERDAY" -> Right ARGCDRYesterday
"MONTH_TO_DATE" -> Right ARGCDRMonthToDate
"YEAR_TO_DATE" -> Right ARGCDRYearToDate
"LAST_7_DAYS" -> Right ARGCDRLast7Days
"LAST_30_DAYS" -> Right ARGCDRLast30Days
x -> Left ("Unable to parse AccountsReportsGenerateCSVDateRange from: " <> x)
instance ToHttpApiData AccountsReportsGenerateCSVDateRange where
toQueryParam = \case
ARGCDRReportingDateRangeUnspecified -> "REPORTING_DATE_RANGE_UNSPECIFIED"
ARGCDRCustom -> "CUSTOM"
ARGCDRToday -> "TODAY"
ARGCDRYesterday -> "YESTERDAY"
ARGCDRMonthToDate -> "MONTH_TO_DATE"
ARGCDRYearToDate -> "YEAR_TO_DATE"
ARGCDRLast7Days -> "LAST_7_DAYS"
ARGCDRLast30Days -> "LAST_30_DAYS"
instance FromJSON AccountsReportsGenerateCSVDateRange where
parseJSON = parseJSONText "AccountsReportsGenerateCSVDateRange"
instance ToJSON AccountsReportsGenerateCSVDateRange where
toJSON = toJSONText
-- | State of the ad unit.
data AdUnitState
= AUSStateUnspecified
-- ^ @STATE_UNSPECIFIED@
-- State unspecified.
| AUSActive
-- ^ @ACTIVE@
-- Ad unit has been activated by the user and can serve ads.
| AUSArchived
-- ^ @ARCHIVED@
-- Ad unit has been archived by the user and can no longer serve ads.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AdUnitState
instance FromHttpApiData AdUnitState where
parseQueryParam = \case
"STATE_UNSPECIFIED" -> Right AUSStateUnspecified
"ACTIVE" -> Right AUSActive
"ARCHIVED" -> Right AUSArchived
x -> Left ("Unable to parse AdUnitState from: " <> x)
instance ToHttpApiData AdUnitState where
toQueryParam = \case
AUSStateUnspecified -> "STATE_UNSPECIFIED"
AUSActive -> "ACTIVE"
AUSArchived -> "ARCHIVED"
instance FromJSON AdUnitState where
parseJSON = parseJSONText "AdUnitState"
instance ToJSON AdUnitState where
toJSON = toJSONText
-- | Date range of the report, if unset the range will be considered CUSTOM.
data AccountsReportsGenerateDateRange
= ARGDRReportingDateRangeUnspecified
-- ^ @REPORTING_DATE_RANGE_UNSPECIFIED@
-- Unspecified date range.
| ARGDRCustom
-- ^ @CUSTOM@
-- A custom date range specified using the \`start_date\` and \`end_date\`
-- fields. This is the default if no ReportingDateRange is provided.
| ARGDRToday
-- ^ @TODAY@
-- Current day.
| ARGDRYesterday
-- ^ @YESTERDAY@
-- Yesterday.
| ARGDRMonthToDate
-- ^ @MONTH_TO_DATE@
-- From the start of the current month to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-03-01,
-- 2020-03-12].
| ARGDRYearToDate
-- ^ @YEAR_TO_DATE@
-- From the start of the current year to the current day. e.g. if the
-- current date is 2020-03-12 then the range will be [2020-01-01,
-- 2020-03-12].
| ARGDRLast7Days
-- ^ @LAST_7_DAYS@
-- Last 7 days, excluding current day.
| ARGDRLast30Days
-- ^ @LAST_30_DAYS@
-- Last 30 days, excluding current day.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateDateRange
instance FromHttpApiData AccountsReportsGenerateDateRange where
parseQueryParam = \case
"REPORTING_DATE_RANGE_UNSPECIFIED" -> Right ARGDRReportingDateRangeUnspecified
"CUSTOM" -> Right ARGDRCustom
"TODAY" -> Right ARGDRToday
"YESTERDAY" -> Right ARGDRYesterday
"MONTH_TO_DATE" -> Right ARGDRMonthToDate
"YEAR_TO_DATE" -> Right ARGDRYearToDate
"LAST_7_DAYS" -> Right ARGDRLast7Days
"LAST_30_DAYS" -> Right ARGDRLast30Days
x -> Left ("Unable to parse AccountsReportsGenerateDateRange from: " <> x)
instance ToHttpApiData AccountsReportsGenerateDateRange where
toQueryParam = \case
ARGDRReportingDateRangeUnspecified -> "REPORTING_DATE_RANGE_UNSPECIFIED"
ARGDRCustom -> "CUSTOM"
ARGDRToday -> "TODAY"
ARGDRYesterday -> "YESTERDAY"
ARGDRMonthToDate -> "MONTH_TO_DATE"
ARGDRYearToDate -> "YEAR_TO_DATE"
ARGDRLast7Days -> "LAST_7_DAYS"
ARGDRLast30Days -> "LAST_30_DAYS"
instance FromJSON AccountsReportsGenerateDateRange where
parseJSON = parseJSONText "AccountsReportsGenerateDateRange"
instance ToJSON AccountsReportsGenerateDateRange where
toJSON = toJSONText
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
-- | Output only. Severity of this alert.
data AlertSeverity
= SeverityUnspecified
-- ^ @SEVERITY_UNSPECIFIED@
-- Unspecified severity.
| Info
-- ^ @INFO@
-- Info.
| Warning
-- ^ @WARNING@
-- Warning.
| Severe
-- ^ @SEVERE@
-- Severe.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AlertSeverity
instance FromHttpApiData AlertSeverity where
parseQueryParam = \case
"SEVERITY_UNSPECIFIED" -> Right SeverityUnspecified
"INFO" -> Right Info
"WARNING" -> Right Warning
"SEVERE" -> Right Severe
x -> Left ("Unable to parse AlertSeverity from: " <> x)
instance ToHttpApiData AlertSeverity where
toQueryParam = \case
SeverityUnspecified -> "SEVERITY_UNSPECIFIED"
Info -> "INFO"
Warning -> "WARNING"
Severe -> "SEVERE"
instance FromJSON AlertSeverity where
parseJSON = parseJSONText "AlertSeverity"
instance ToJSON AlertSeverity where
toJSON = toJSONText
-- | Required. Reporting metrics.
data AccountsReportsGenerateMetrics
= ARGMMetricUnspecified
-- ^ @METRIC_UNSPECIFIED@
-- Unspecified metric.
| ARGMPageViews
-- ^ @PAGE_VIEWS@
-- Number of page views.
| ARGMAdRequests
-- ^ @AD_REQUESTS@
-- Number of ad units that requested ads (for content ads) or search
-- queries (for search ads). An ad request may result in zero, one, or
-- multiple individual ad impressions depending on the size of the ad unit
-- and whether any ads were available.
| ARGMMatchedAdRequests
-- ^ @MATCHED_AD_REQUESTS@
-- Requests that returned at least one ad.
| ARGMTotalImpressions
-- ^ @TOTAL_IMPRESSIONS@
-- Impressions. An impression is counted for each ad request where at least
-- one ad has been downloaded to the user’s device and has begun to load.
-- It is the number of ad units (for content ads) or search queries (for
-- search ads) that showed ads.
| ARGMImpressions
-- ^ @IMPRESSIONS@
-- Impressions. An impression is counted for each ad request where at least
-- one ad has been downloaded to the user’s device and has begun to load.
-- It is the number of ad units (for content ads) or search queries (for
-- search ads) that showed ads.
| ARGMIndividualAdImpressions
-- ^ @INDIVIDUAL_AD_IMPRESSIONS@
-- Ads shown. Different ad formats will display varying numbers of ads. For
-- example, a vertical banner may consist of 2 or more ads. Also, the
-- number of ads in an ad unit may vary depending on whether the ad unit is
-- displaying standard text ads, expanded text ads or image ads.
| ARGMClicks
-- ^ @CLICKS@
-- Number of times a user clicked on a standard content ad.
| ARGMPageViewsSpamRatio
-- ^ @PAGE_VIEWS_SPAM_RATIO@
-- Fraction of page views considered to be spam. Only available to premium
-- accounts.
| ARGMAdRequestsSpamRatio
-- ^ @AD_REQUESTS_SPAM_RATIO@
-- Fraction of ad requests considered to be spam. Only available to premium
-- accounts.
| ARGMMatchedAdRequestsSpamRatio
-- ^ @MATCHED_AD_REQUESTS_SPAM_RATIO@
-- Fraction of ad requests that returned ads considered to be spam. Only
-- available to premium accounts.
| ARGMImpressionsSpamRatio
-- ^ @IMPRESSIONS_SPAM_RATIO@
-- Fraction of impressions considered to be spam. Only available to premium
-- accounts.
| ARGMIndividualAdImpressionsSpamRatio
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO@
-- Fraction of ad impressions considered to be spam. Only available to
-- premium accounts.
| ARGMClicksSpamRatio
-- ^ @CLICKS_SPAM_RATIO@
-- Fraction of clicks considered to be spam. Only available to premium
-- accounts.
| ARGMAdRequestsCoverage
-- ^ @AD_REQUESTS_COVERAGE@
-- Ratio of requested ad units or queries to the number returned to the
-- site.
| ARGMPageViewsCtr
-- ^ @PAGE_VIEWS_CTR@
-- Ratio of individual page views that resulted in a click.
| ARGMAdRequestsCtr
-- ^ @AD_REQUESTS_CTR@
-- Ratio of ad requests that resulted in a click.
| ARGMMatchedAdRequestsCtr
-- ^ @MATCHED_AD_REQUESTS_CTR@
-- Ratio of clicks to matched requests.
| ARGMImpressionsCtr
-- ^ @IMPRESSIONS_CTR@
-- Ratio of IMPRESSIONS that resulted in a click.
| ARGMIndividualAdImpressionsCtr
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_CTR@
-- Ratio of individual ad impressions that resulted in a click.
| ARGMActiveViewMeasurability
-- ^ @ACTIVE_VIEW_MEASURABILITY@
-- Ratio of requests that were measurable for viewability.
| ARGMActiveViewViewability
-- ^ @ACTIVE_VIEW_VIEWABILITY@
-- Ratio of requests that were viewable.
| ARGMActiveViewTime
-- ^ @ACTIVE_VIEW_TIME@
-- Mean time an ad was displayed on screen.
| ARGMEstimatedEarnings
-- ^ @ESTIMATED_EARNINGS@
-- Estimated earnings of the publisher. Note that earnings up to yesterday
-- are accurate, more recent earnings are estimated due to the possibility
-- of spam, or exchange rate fluctuations.
| ARGMPageViewsRpm
-- ^ @PAGE_VIEWS_RPM@
-- Revenue per thousand page views. This is calculated by dividing the
-- estimated revenue by the number of page views multiplied by 1000.
| ARGMAdRequestsRpm
-- ^ @AD_REQUESTS_RPM@
-- Revenue per thousand ad requests. This is calculated by dividing
-- estimated revenue by the number of ad requests multiplied by 1000.
| ARGMMatchedAdRequestsRpm
-- ^ @MATCHED_AD_REQUESTS_RPM@
-- Revenue per thousand matched ad requests. This is calculated by dividing
-- estimated revenue by the number of matched ad requests multiplied by
-- 1000.
| ARGMImpressionsRpm
-- ^ @IMPRESSIONS_RPM@
-- Revenue per thousand ad impressions. This is calculated by dividing
-- estimated revenue by the number of ad impressions multiplied by 1000.
| ARGMIndividualAdImpressionsRpm
-- ^ @INDIVIDUAL_AD_IMPRESSIONS_RPM@
-- Revenue per thousand individual ad impressions. This is calculated by
-- dividing estimated revenue by the number of individual ad impressions
-- multiplied by 1000.
| ARGMCostPerClick
-- ^ @COST_PER_CLICK@
-- Amount the publisher earns each time a user clicks on an ad. CPC is
-- calculated by dividing the estimated revenue by the number of clicks
-- received.
| ARGMAdsPerImpression
-- ^ @ADS_PER_IMPRESSION@
-- Number of ad views per impression.
| ARGMTotalEarnings
-- ^ @TOTAL_EARNINGS@
-- Total earnings.
| ARGMWebsearchResultPages
-- ^ @WEBSEARCH_RESULT_PAGES@
-- Number of results pages.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateMetrics
instance FromHttpApiData AccountsReportsGenerateMetrics where
parseQueryParam = \case
"METRIC_UNSPECIFIED" -> Right ARGMMetricUnspecified
"PAGE_VIEWS" -> Right ARGMPageViews
"AD_REQUESTS" -> Right ARGMAdRequests
"MATCHED_AD_REQUESTS" -> Right ARGMMatchedAdRequests
"TOTAL_IMPRESSIONS" -> Right ARGMTotalImpressions
"IMPRESSIONS" -> Right ARGMImpressions
"INDIVIDUAL_AD_IMPRESSIONS" -> Right ARGMIndividualAdImpressions
"CLICKS" -> Right ARGMClicks
"PAGE_VIEWS_SPAM_RATIO" -> Right ARGMPageViewsSpamRatio
"AD_REQUESTS_SPAM_RATIO" -> Right ARGMAdRequestsSpamRatio
"MATCHED_AD_REQUESTS_SPAM_RATIO" -> Right ARGMMatchedAdRequestsSpamRatio
"IMPRESSIONS_SPAM_RATIO" -> Right ARGMImpressionsSpamRatio
"INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO" -> Right ARGMIndividualAdImpressionsSpamRatio
"CLICKS_SPAM_RATIO" -> Right ARGMClicksSpamRatio
"AD_REQUESTS_COVERAGE" -> Right ARGMAdRequestsCoverage
"PAGE_VIEWS_CTR" -> Right ARGMPageViewsCtr
"AD_REQUESTS_CTR" -> Right ARGMAdRequestsCtr
"MATCHED_AD_REQUESTS_CTR" -> Right ARGMMatchedAdRequestsCtr
"IMPRESSIONS_CTR" -> Right ARGMImpressionsCtr
"INDIVIDUAL_AD_IMPRESSIONS_CTR" -> Right ARGMIndividualAdImpressionsCtr
"ACTIVE_VIEW_MEASURABILITY" -> Right ARGMActiveViewMeasurability
"ACTIVE_VIEW_VIEWABILITY" -> Right ARGMActiveViewViewability
"ACTIVE_VIEW_TIME" -> Right ARGMActiveViewTime
"ESTIMATED_EARNINGS" -> Right ARGMEstimatedEarnings
"PAGE_VIEWS_RPM" -> Right ARGMPageViewsRpm
"AD_REQUESTS_RPM" -> Right ARGMAdRequestsRpm
"MATCHED_AD_REQUESTS_RPM" -> Right ARGMMatchedAdRequestsRpm
"IMPRESSIONS_RPM" -> Right ARGMImpressionsRpm
"INDIVIDUAL_AD_IMPRESSIONS_RPM" -> Right ARGMIndividualAdImpressionsRpm
"COST_PER_CLICK" -> Right ARGMCostPerClick
"ADS_PER_IMPRESSION" -> Right ARGMAdsPerImpression
"TOTAL_EARNINGS" -> Right ARGMTotalEarnings
"WEBSEARCH_RESULT_PAGES" -> Right ARGMWebsearchResultPages
x -> Left ("Unable to parse AccountsReportsGenerateMetrics from: " <> x)
instance ToHttpApiData AccountsReportsGenerateMetrics where
toQueryParam = \case
ARGMMetricUnspecified -> "METRIC_UNSPECIFIED"
ARGMPageViews -> "PAGE_VIEWS"
ARGMAdRequests -> "AD_REQUESTS"
ARGMMatchedAdRequests -> "MATCHED_AD_REQUESTS"
ARGMTotalImpressions -> "TOTAL_IMPRESSIONS"
ARGMImpressions -> "IMPRESSIONS"
ARGMIndividualAdImpressions -> "INDIVIDUAL_AD_IMPRESSIONS"
ARGMClicks -> "CLICKS"
ARGMPageViewsSpamRatio -> "PAGE_VIEWS_SPAM_RATIO"
ARGMAdRequestsSpamRatio -> "AD_REQUESTS_SPAM_RATIO"
ARGMMatchedAdRequestsSpamRatio -> "MATCHED_AD_REQUESTS_SPAM_RATIO"
ARGMImpressionsSpamRatio -> "IMPRESSIONS_SPAM_RATIO"
ARGMIndividualAdImpressionsSpamRatio -> "INDIVIDUAL_AD_IMPRESSIONS_SPAM_RATIO"
ARGMClicksSpamRatio -> "CLICKS_SPAM_RATIO"
ARGMAdRequestsCoverage -> "AD_REQUESTS_COVERAGE"
ARGMPageViewsCtr -> "PAGE_VIEWS_CTR"
ARGMAdRequestsCtr -> "AD_REQUESTS_CTR"
ARGMMatchedAdRequestsCtr -> "MATCHED_AD_REQUESTS_CTR"
ARGMImpressionsCtr -> "IMPRESSIONS_CTR"
ARGMIndividualAdImpressionsCtr -> "INDIVIDUAL_AD_IMPRESSIONS_CTR"
ARGMActiveViewMeasurability -> "ACTIVE_VIEW_MEASURABILITY"
ARGMActiveViewViewability -> "ACTIVE_VIEW_VIEWABILITY"
ARGMActiveViewTime -> "ACTIVE_VIEW_TIME"
ARGMEstimatedEarnings -> "ESTIMATED_EARNINGS"
ARGMPageViewsRpm -> "PAGE_VIEWS_RPM"
ARGMAdRequestsRpm -> "AD_REQUESTS_RPM"
ARGMMatchedAdRequestsRpm -> "MATCHED_AD_REQUESTS_RPM"
ARGMImpressionsRpm -> "IMPRESSIONS_RPM"
ARGMIndividualAdImpressionsRpm -> "INDIVIDUAL_AD_IMPRESSIONS_RPM"
ARGMCostPerClick -> "COST_PER_CLICK"
ARGMAdsPerImpression -> "ADS_PER_IMPRESSION"
ARGMTotalEarnings -> "TOTAL_EARNINGS"
ARGMWebsearchResultPages -> "WEBSEARCH_RESULT_PAGES"
instance FromJSON AccountsReportsGenerateMetrics where
parseJSON = parseJSONText "AccountsReportsGenerateMetrics"
instance ToJSON AccountsReportsGenerateMetrics where
toJSON = toJSONText
-- | Dimensions to base the report on.
data AccountsReportsGenerateCSVDimensions
= ARGCDDimensionUnspecified
-- ^ @DIMENSION_UNSPECIFIED@
-- Unspecified dimension.
| ARGCDDate
-- ^ @DATE@
-- Date dimension in YYYY-MM-DD format (e.g. \"2010-02-10\").
| ARGCDWeek
-- ^ @WEEK@
-- Week dimension in YYYY-MM-DD format, representing the first day of each
-- week (e.g. \"2010-02-08\"). The first day of the week is determined by
-- the language_code specified in a report generation request (so e.g. this
-- would be a Monday for \"en-GB\" or \"es\", but a Sunday for \"en\" or
-- \"fr-CA\").
| ARGCDMonth
-- ^ @MONTH@
-- Month dimension in YYYY-MM format (e.g. \"2010-02\").
| ARGCDAccountName
-- ^ @ACCOUNT_NAME@
-- Account name. The members of this dimension match the values from
-- Account.display_name.
| ARGCDAdClientId
-- ^ @AD_CLIENT_ID@
-- Unique ID of an ad client. The members of this dimension match the
-- values from AdClient.reporting_dimension_id.
| ARGCDProductName
-- ^ @PRODUCT_NAME@
-- Localized product name (e.g. \"AdSense for Content\", \"AdSense for
-- Search\").
| ARGCDProductCode
-- ^ @PRODUCT_CODE@
-- Product code (e.g. \"AFC\", \"AFS\"). The members of this dimension
-- match the values from AdClient.product_code.
| ARGCDAdUnitName
-- ^ @AD_UNIT_NAME@
-- Ad unit name (within which an ad was served). The members of this
-- dimension match the values from AdUnit.display_name.
| ARGCDAdUnitId
-- ^ @AD_UNIT_ID@
-- Unique ID of an ad unit (within which an ad was served). The members of
-- this dimension match the values from AdUnit.reporting_dimension_id.
| ARGCDAdUnitSizeName
-- ^ @AD_UNIT_SIZE_NAME@
-- Localized size of an ad unit (e.g. \"728x90\", \"Responsive\").
| ARGCDAdUnitSizeCode
-- ^ @AD_UNIT_SIZE_CODE@
-- The size code of an ad unit (e.g. \"728x90\", \"responsive\").
| ARGCDCustomChannelName
-- ^ @CUSTOM_CHANNEL_NAME@
-- Custom channel name. The members of this dimension match the values from
-- CustomChannel.display_name.
| ARGCDCustomChannelId
-- ^ @CUSTOM_CHANNEL_ID@
-- Unique ID of a custom channel. The members of this dimension match the
-- values from CustomChannel.reporting_dimension_id.
| ARGCDOwnedSiteDomainName
-- ^ @OWNED_SITE_DOMAIN_NAME@
-- Domain name of a verified site (e.g. \"example.com\"). The members of
-- this dimension match the values from Site.domain.
| ARGCDOwnedSiteId
-- ^ @OWNED_SITE_ID@
-- Unique ID of a verified site. The members of this dimension match the
-- values from Site.reporting_dimension_id.
| ARGCDURLChannelName
-- ^ @URL_CHANNEL_NAME@
-- Name of a URL channel. The members of this dimension match the values
-- from UrlChannel.uri_pattern.
| ARGCDURLChannelId
-- ^ @URL_CHANNEL_ID@
-- Unique ID of a URL channel. The members of this dimension match the
-- values from UrlChannel.reporting_dimension_id.
| ARGCDBuyerNetworkName
-- ^ @BUYER_NETWORK_NAME@
-- Name of an ad network that returned the winning ads for an ad request
-- (e.g. \"Google AdWords\"). Note that unlike other \"NAME\" dimensions,
-- the members of this dimensions are not localized.
| ARGCDBuyerNetworkId
-- ^ @BUYER_NETWORK_ID@
-- Unique (opaque) ID of an ad network that returned the winning ads for an
-- ad request.
| ARGCDBidTypeName
-- ^ @BID_TYPE_NAME@
-- Localized bid type name (e.g. \"CPC bids\", \"CPM bids\") for a served
-- ad.
| ARGCDBidTypeCode
-- ^ @BID_TYPE_CODE@
-- Type of a bid (e.g. \"cpc\", \"cpm\") for a served ad.
| ARGCDCreativeSizeName
-- ^ @CREATIVE_SIZE_NAME@
-- Localized creative size name (e.g. \"728x90\", \"Dynamic\") of a served
-- ad.
| ARGCDCreativeSizeCode
-- ^ @CREATIVE_SIZE_CODE@
-- Creative size code (e.g. \"728x90\", \"dynamic\") of a served ad.
| ARGCDDomainName
-- ^ @DOMAIN_NAME@
-- Localized name of a host on which an ad was served, after IDNA decoding
-- (e.g. \"www.google.com\", \"Web caches and other\", \"bücher.example\").
| ARGCDDomainCode
-- ^ @DOMAIN_CODE@
-- Name of a host on which an ad was served (e.g. \"www.google.com\",
-- \"webcaches\", \"xn--bcher-kva.example\").
| ARGCDCountryName
-- ^ @COUNTRY_NAME@
-- Localized region name of a user viewing an ad (e.g. \"United States\",
-- \"France\").
| ARGCDCountryCode
-- ^ @COUNTRY_CODE@
-- CLDR region code of a user viewing an ad (e.g. \"US\", \"FR\").
| ARGCDPlatformTypeName
-- ^ @PLATFORM_TYPE_NAME@
-- Localized platform type name (e.g. \"High-end mobile devices\",
-- \"Desktop\").
| ARGCDPlatformTypeCode
-- ^ @PLATFORM_TYPE_CODE@
-- Platform type code (e.g. \"HighEndMobile\", \"Desktop\").
| ARGCDTargetingTypeName
-- ^ @TARGETING_TYPE_NAME@
-- Localized targeting type name (e.g. \"Contextual\", \"Personalized\",
-- \"Run of Network\").
| ARGCDTargetingTypeCode
-- ^ @TARGETING_TYPE_CODE@
-- Targeting type code (e.g. \"Keyword\", \"UserInterest\",
-- \"RunOfNetwork\").
| ARGCDContentPlatformName
-- ^ @CONTENT_PLATFORM_NAME@
-- Localized content platform name an ad request was made from (e.g.
-- \"AMP\", \"Web\").
| ARGCDContentPlatformCode
-- ^ @CONTENT_PLATFORM_CODE@
-- Content platform code an ad request was made from (e.g. \"AMP\",
-- \"HTML\").
| ARGCDAdPlacementName
-- ^ @AD_PLACEMENT_NAME@
-- Localized ad placement name (e.g. \"Ad unit\", \"Global settings\",
-- \"Manual\").
| ARGCDAdPlacementCode
-- ^ @AD_PLACEMENT_CODE@
-- Ad placement code (e.g. \"AD_UNIT\", \"ca-pub-123456:78910\",
-- \"OTHER\").
| ARGCDRequestedAdTypeName
-- ^ @REQUESTED_AD_TYPE_NAME@
-- Localized requested ad type name (e.g. \"Display\", \"Link unit\",
-- \"Other\").
| ARGCDRequestedAdTypeCode
-- ^ @REQUESTED_AD_TYPE_CODE@
-- Requested ad type code (e.g. \"IMAGE\", \"RADLINK\", \"OTHER\").
| ARGCDServedAdTypeName
-- ^ @SERVED_AD_TYPE_NAME@
-- Localized served ad type name (e.g. \"Display\", \"Link unit\",
-- \"Other\").
| ARGCDServedAdTypeCode
-- ^ @SERVED_AD_TYPE_CODE@
-- Served ad type code (e.g. \"IMAGE\", \"RADLINK\", \"OTHER\").
| ARGCDAdFormatName
-- ^ @AD_FORMAT_NAME@
-- Localized ad format name indicating the way an ad is shown to the users
-- on your site (e.g. \"In-page\", \"Anchor\", \"Vignette\").
| ARGCDAdFormatCode
-- ^ @AD_FORMAT_CODE@
-- Ad format code indicating the way an ad is shown to the users on your
-- site (e.g. \"ON_PAGE\", \"ANCHOR\", \"INTERSTITIAL\").
| ARGCDCustomSearchStyleName
-- ^ @CUSTOM_SEARCH_STYLE_NAME@
-- Custom search style name.
| ARGCDCustomSearchStyleId
-- ^ @CUSTOM_SEARCH_STYLE_ID@
-- Custom search style id.
| ARGCDDomainRegistrant
-- ^ @DOMAIN_REGISTRANT@
-- Domain registrants.
| ARGCDWebsearchQueryString
-- ^ @WEBSEARCH_QUERY_STRING@
-- Query strings for web searches.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateCSVDimensions
instance FromHttpApiData AccountsReportsGenerateCSVDimensions where
parseQueryParam = \case
"DIMENSION_UNSPECIFIED" -> Right ARGCDDimensionUnspecified
"DATE" -> Right ARGCDDate
"WEEK" -> Right ARGCDWeek
"MONTH" -> Right ARGCDMonth
"ACCOUNT_NAME" -> Right ARGCDAccountName
"AD_CLIENT_ID" -> Right ARGCDAdClientId
"PRODUCT_NAME" -> Right ARGCDProductName
"PRODUCT_CODE" -> Right ARGCDProductCode
"AD_UNIT_NAME" -> Right ARGCDAdUnitName
"AD_UNIT_ID" -> Right ARGCDAdUnitId
"AD_UNIT_SIZE_NAME" -> Right ARGCDAdUnitSizeName
"AD_UNIT_SIZE_CODE" -> Right ARGCDAdUnitSizeCode
"CUSTOM_CHANNEL_NAME" -> Right ARGCDCustomChannelName
"CUSTOM_CHANNEL_ID" -> Right ARGCDCustomChannelId
"OWNED_SITE_DOMAIN_NAME" -> Right ARGCDOwnedSiteDomainName
"OWNED_SITE_ID" -> Right ARGCDOwnedSiteId
"URL_CHANNEL_NAME" -> Right ARGCDURLChannelName
"URL_CHANNEL_ID" -> Right ARGCDURLChannelId
"BUYER_NETWORK_NAME" -> Right ARGCDBuyerNetworkName
"BUYER_NETWORK_ID" -> Right ARGCDBuyerNetworkId
"BID_TYPE_NAME" -> Right ARGCDBidTypeName
"BID_TYPE_CODE" -> Right ARGCDBidTypeCode
"CREATIVE_SIZE_NAME" -> Right ARGCDCreativeSizeName
"CREATIVE_SIZE_CODE" -> Right ARGCDCreativeSizeCode
"DOMAIN_NAME" -> Right ARGCDDomainName
"DOMAIN_CODE" -> Right ARGCDDomainCode
"COUNTRY_NAME" -> Right ARGCDCountryName
"COUNTRY_CODE" -> Right ARGCDCountryCode
"PLATFORM_TYPE_NAME" -> Right ARGCDPlatformTypeName
"PLATFORM_TYPE_CODE" -> Right ARGCDPlatformTypeCode
"TARGETING_TYPE_NAME" -> Right ARGCDTargetingTypeName
"TARGETING_TYPE_CODE" -> Right ARGCDTargetingTypeCode
"CONTENT_PLATFORM_NAME" -> Right ARGCDContentPlatformName
"CONTENT_PLATFORM_CODE" -> Right ARGCDContentPlatformCode
"AD_PLACEMENT_NAME" -> Right ARGCDAdPlacementName
"AD_PLACEMENT_CODE" -> Right ARGCDAdPlacementCode
"REQUESTED_AD_TYPE_NAME" -> Right ARGCDRequestedAdTypeName
"REQUESTED_AD_TYPE_CODE" -> Right ARGCDRequestedAdTypeCode
"SERVED_AD_TYPE_NAME" -> Right ARGCDServedAdTypeName
"SERVED_AD_TYPE_CODE" -> Right ARGCDServedAdTypeCode
"AD_FORMAT_NAME" -> Right ARGCDAdFormatName
"AD_FORMAT_CODE" -> Right ARGCDAdFormatCode
"CUSTOM_SEARCH_STYLE_NAME" -> Right ARGCDCustomSearchStyleName
"CUSTOM_SEARCH_STYLE_ID" -> Right ARGCDCustomSearchStyleId
"DOMAIN_REGISTRANT" -> Right ARGCDDomainRegistrant
"WEBSEARCH_QUERY_STRING" -> Right ARGCDWebsearchQueryString
x -> Left ("Unable to parse AccountsReportsGenerateCSVDimensions from: " <> x)
instance ToHttpApiData AccountsReportsGenerateCSVDimensions where
toQueryParam = \case
ARGCDDimensionUnspecified -> "DIMENSION_UNSPECIFIED"
ARGCDDate -> "DATE"
ARGCDWeek -> "WEEK"
ARGCDMonth -> "MONTH"
ARGCDAccountName -> "ACCOUNT_NAME"
ARGCDAdClientId -> "AD_CLIENT_ID"
ARGCDProductName -> "PRODUCT_NAME"
ARGCDProductCode -> "PRODUCT_CODE"
ARGCDAdUnitName -> "AD_UNIT_NAME"
ARGCDAdUnitId -> "AD_UNIT_ID"
ARGCDAdUnitSizeName -> "AD_UNIT_SIZE_NAME"
ARGCDAdUnitSizeCode -> "AD_UNIT_SIZE_CODE"
ARGCDCustomChannelName -> "CUSTOM_CHANNEL_NAME"
ARGCDCustomChannelId -> "CUSTOM_CHANNEL_ID"
ARGCDOwnedSiteDomainName -> "OWNED_SITE_DOMAIN_NAME"
ARGCDOwnedSiteId -> "OWNED_SITE_ID"
ARGCDURLChannelName -> "URL_CHANNEL_NAME"
ARGCDURLChannelId -> "URL_CHANNEL_ID"
ARGCDBuyerNetworkName -> "BUYER_NETWORK_NAME"
ARGCDBuyerNetworkId -> "BUYER_NETWORK_ID"
ARGCDBidTypeName -> "BID_TYPE_NAME"
ARGCDBidTypeCode -> "BID_TYPE_CODE"
ARGCDCreativeSizeName -> "CREATIVE_SIZE_NAME"
ARGCDCreativeSizeCode -> "CREATIVE_SIZE_CODE"
ARGCDDomainName -> "DOMAIN_NAME"
ARGCDDomainCode -> "DOMAIN_CODE"
ARGCDCountryName -> "COUNTRY_NAME"
ARGCDCountryCode -> "COUNTRY_CODE"
ARGCDPlatformTypeName -> "PLATFORM_TYPE_NAME"
ARGCDPlatformTypeCode -> "PLATFORM_TYPE_CODE"
ARGCDTargetingTypeName -> "TARGETING_TYPE_NAME"
ARGCDTargetingTypeCode -> "TARGETING_TYPE_CODE"
ARGCDContentPlatformName -> "CONTENT_PLATFORM_NAME"
ARGCDContentPlatformCode -> "CONTENT_PLATFORM_CODE"
ARGCDAdPlacementName -> "AD_PLACEMENT_NAME"
ARGCDAdPlacementCode -> "AD_PLACEMENT_CODE"
ARGCDRequestedAdTypeName -> "REQUESTED_AD_TYPE_NAME"
ARGCDRequestedAdTypeCode -> "REQUESTED_AD_TYPE_CODE"
ARGCDServedAdTypeName -> "SERVED_AD_TYPE_NAME"
ARGCDServedAdTypeCode -> "SERVED_AD_TYPE_CODE"
ARGCDAdFormatName -> "AD_FORMAT_NAME"
ARGCDAdFormatCode -> "AD_FORMAT_CODE"
ARGCDCustomSearchStyleName -> "CUSTOM_SEARCH_STYLE_NAME"
ARGCDCustomSearchStyleId -> "CUSTOM_SEARCH_STYLE_ID"
ARGCDDomainRegistrant -> "DOMAIN_REGISTRANT"
ARGCDWebsearchQueryString -> "WEBSEARCH_QUERY_STRING"
instance FromJSON AccountsReportsGenerateCSVDimensions where
parseJSON = parseJSONText "AccountsReportsGenerateCSVDimensions"
instance ToJSON AccountsReportsGenerateCSVDimensions where
toJSON = toJSONText
-- | Timezone in which to generate the report. If unspecified, this defaults
-- to the account timezone. For more information, see [changing the time
-- zone of your
-- reports](https:\/\/support.google.com\/adsense\/answer\/9830725).
data AccountsReportsGenerateReportingTimeZone
= ARGRTZReportingTimeZoneUnspecified
-- ^ @REPORTING_TIME_ZONE_UNSPECIFIED@
-- Unspecified timezone.
| ARGRTZAccountTimeZone
-- ^ @ACCOUNT_TIME_ZONE@
-- Use the account timezone in the report.
| ARGRTZGoogleTimeZone
-- ^ @GOOGLE_TIME_ZONE@
-- Use the Google timezone in the report (America\/Los_Angeles).
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AccountsReportsGenerateReportingTimeZone
instance FromHttpApiData AccountsReportsGenerateReportingTimeZone where
parseQueryParam = \case
"REPORTING_TIME_ZONE_UNSPECIFIED" -> Right ARGRTZReportingTimeZoneUnspecified
"ACCOUNT_TIME_ZONE" -> Right ARGRTZAccountTimeZone
"GOOGLE_TIME_ZONE" -> Right ARGRTZGoogleTimeZone
x -> Left ("Unable to parse AccountsReportsGenerateReportingTimeZone from: " <> x)
instance ToHttpApiData AccountsReportsGenerateReportingTimeZone where
toQueryParam = \case
ARGRTZReportingTimeZoneUnspecified -> "REPORTING_TIME_ZONE_UNSPECIFIED"
ARGRTZAccountTimeZone -> "ACCOUNT_TIME_ZONE"
ARGRTZGoogleTimeZone -> "GOOGLE_TIME_ZONE"
instance FromJSON AccountsReportsGenerateReportingTimeZone where
parseJSON = parseJSONText "AccountsReportsGenerateReportingTimeZone"
instance ToJSON AccountsReportsGenerateReportingTimeZone where
toJSON = toJSONText
|
brendanhay/gogol
|
gogol-adsense/gen/Network/Google/AdSense/Types/Sum.hs
|
mpl-2.0
| 70,725 | 0 | 11 | 16,166 | 6,975 | 3,846 | 3,129 | 911 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Webmasters.Sitemaps.Submit
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Submits a sitemap for a site.
--
-- /See:/ <https://developers.google.com/webmaster-tools/ Search Console API Reference> for @webmasters.sitemaps.submit@.
module Network.Google.Resource.Webmasters.Sitemaps.Submit
(
-- * REST Resource
SitemapsSubmitResource
-- * Creating a Request
, sitemapsSubmit
, SitemapsSubmit
-- * Request Lenses
, ssFeedpath
, ssSiteURL
) where
import Network.Google.Prelude
import Network.Google.WebmasterTools.Types
-- | A resource alias for @webmasters.sitemaps.submit@ method which the
-- 'SitemapsSubmit' request conforms to.
type SitemapsSubmitResource =
"webmasters" :>
"v3" :>
"sites" :>
Capture "siteUrl" Text :>
"sitemaps" :>
Capture "feedpath" Text :>
QueryParam "alt" AltJSON :> Put '[JSON] ()
-- | Submits a sitemap for a site.
--
-- /See:/ 'sitemapsSubmit' smart constructor.
data SitemapsSubmit =
SitemapsSubmit'
{ _ssFeedpath :: !Text
, _ssSiteURL :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SitemapsSubmit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssFeedpath'
--
-- * 'ssSiteURL'
sitemapsSubmit
:: Text -- ^ 'ssFeedpath'
-> Text -- ^ 'ssSiteURL'
-> SitemapsSubmit
sitemapsSubmit pSsFeedpath_ pSsSiteURL_ =
SitemapsSubmit' {_ssFeedpath = pSsFeedpath_, _ssSiteURL = pSsSiteURL_}
-- | The URL of the sitemap to add. For example:
-- http:\/\/www.example.com\/sitemap.xml
ssFeedpath :: Lens' SitemapsSubmit Text
ssFeedpath
= lens _ssFeedpath (\ s a -> s{_ssFeedpath = a})
-- | The site\'s URL, including protocol. For example:
-- http:\/\/www.example.com\/
ssSiteURL :: Lens' SitemapsSubmit Text
ssSiteURL
= lens _ssSiteURL (\ s a -> s{_ssSiteURL = a})
instance GoogleRequest SitemapsSubmit where
type Rs SitemapsSubmit = ()
type Scopes SitemapsSubmit =
'["https://www.googleapis.com/auth/webmasters"]
requestClient SitemapsSubmit'{..}
= go _ssSiteURL _ssFeedpath (Just AltJSON)
webmasterToolsService
where go
= buildClient (Proxy :: Proxy SitemapsSubmitResource)
mempty
|
brendanhay/gogol
|
gogol-webmaster-tools/gen/Network/Google/Resource/Webmasters/Sitemaps/Submit.hs
|
mpl-2.0
| 3,086 | 0 | 14 | 688 | 387 | 233 | 154 | 60 | 1 |
-- Implicit CAD. Copyright (C) 2011, Christopher Olah ([email protected])
-- Copyright 2014 2015 2016, Julia Longtin ([email protected])
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.Export.Render.GetLoops (getLoops) where
-- Explicitly include what we want from Prelude.
import Prelude (head, last, tail, (==), Bool(False), (.), null, error, (++))
-- We're working with 3D points here.
import Graphics.Implicit.Definitions (ℝ3)
import Data.List (partition)
-- | The goal of getLoops is to extract loops from a list of segments.
-- The input is a list of segments.
-- The output a list of loops, where each loop is a list of
-- segments, which each piece representing a "side".
-- For example:
-- Given points [[1,2],[5,1],[3,4,5], ... ]
-- notice that there is a loop 1,2,3,4,5... <repeat>
-- But we give the output [ [ [1,2], [3,4,5], [5,1] ], ... ]
-- so that we have the loop, and also knowledge of how
-- the list is built (the "sides" of it).
getLoops :: [[ℝ3]] -> [[[ℝ3]]]
getLoops a = getLoops' a []
-- We will be actually doing the loop extraction with
-- getLoops'
-- getLoops' has a first argument of the segments as before,
-- but a *second argument* which is the loop presently being
-- built.
-- so we begin with the "building loop" being empty.
getLoops' :: [[ℝ3]] -> [[ℝ3]] -> [[[ℝ3]]]
-- | If there aren't any segments, and the "building loop" is empty, produce no loops.
getLoops' [] [] = []
-- | If the building loop is empty, stick the first segment we have onto it to give us something to build on.
getLoops' (x:xs) [] = getLoops' xs [x]
-- | A loop is finished if its start and end are the same.
-- Return it and start searching for another loop.
getLoops' segs workingLoop | head (head workingLoop) == last (last workingLoop) =
workingLoop : getLoops' segs []
-- Finally, we search for pieces that can continue the working loop,
-- and stick one on if we find it.
-- Otherwise... something is really screwed up.
getLoops' segs workingLoop =
let
presEnd :: [[ℝ3]] -> ℝ3
presEnd = last . last
connects (x:_) = x == presEnd workingLoop
connects [] = False -- Handle the empty case.
-- divide our set into sequences that connect, and sequences that don't.
(possibleConts, nonConts) = partition connects segs
(next, unused) = if null possibleConts
then error "unclosed loop in paths given"
else (head possibleConts, tail possibleConts ++ nonConts)
in
if null next
then workingLoop : getLoops' segs []
else getLoops' unused (workingLoop ++ [next])
|
krakrjak/ImplicitCAD
|
Graphics/Implicit/Export/Render/GetLoops.hs
|
agpl-3.0
| 2,647 | 0 | 12 | 566 | 440 | 257 | 183 | 24 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.NodeDaemonEndpoints where
import GHC.Generics
import Kubernetes.V1.DaemonEndpoint
import qualified Data.Aeson
-- | NodeDaemonEndpoints lists ports opened by daemons running on the Node.
data NodeDaemonEndpoints = NodeDaemonEndpoints
{ kubeletEndpoint :: Maybe DaemonEndpoint -- ^ Endpoint on which Kubelet is listening.
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON NodeDaemonEndpoints
instance Data.Aeson.ToJSON NodeDaemonEndpoints
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/NodeDaemonEndpoints.hs
|
apache-2.0
| 649 | 0 | 9 | 83 | 84 | 51 | 33 | 14 | 0 |
module Staircase.A282427Spec (main, spec) where
import Test.Hspec
import Staircase.A282427 (a282427)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A282427" $
it "correctly computes the first 5 elements" $
take 5 (map a282427 [1..]) `shouldBe` expectedValue where
expectedValue = [1,3,7,39,47]
|
peterokagey/haskellOEIS
|
test/Staircase/A282427Spec.hs
|
apache-2.0
| 323 | 0 | 10 | 59 | 115 | 65 | 50 | 10 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Tetris.Block.Coords where
import Tetris.Block.Dir (DirTo)
data Zero = Zero
data One = One
data Two = Two
data Three = Three
class CanMove t e where move :: t -> e
instance CanMove Zero One where move Zero = One
instance CanMove One Two where move One = Two
instance CanMove Two Three where move Two = Three
class CanMoveUp t e where moveUp :: t -> e
instance CanMoveUp One Zero where moveUp One = Zero
instance CanMoveUp Two One where moveUp Two = One
instance CanMoveUp Three Two where moveUp Three = Two
instance DirTo Right (x,y) (x,y') where
type Ctx Right (x,y) (x,y') = CanMove y y'
toF Right (x,y) = (x,move y)
instance DirTo Down (x,y) (x',y) where
type Ctx Down (x,y) (x',y) = CanMove x x'
toF Down (x,y) = (move x,y)
instance DirTo RightUp (x,y) (x',y') where
type Ctx RightUp (x,y) (x',y') = (CanMoveUp x x',CanMove y y')
toF RightUp (x,y) = (moveUp x,move y)
|
melrief/tetris
|
src/Tetris/Block/Coords.hs
|
apache-2.0
| 966 | 0 | 7 | 215 | 446 | 245 | 201 | -1 | -1 |
{- Copyright 2013 Matthew Gordon.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express orimplied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Test.Module (tests) where
import Test.HUnit
import Test.Framework (testGroup)
import qualified Test.Framework
import Test.Framework.Providers.HUnit
import Funk.Module
import Funk.AST (Decl(..),Def(..),Expr(..),Type(TypeName))
import Funk.Names
import Control.Monad (forM_)
tests :: Test.Framework.Test
tests =
testGroup "Module Tests" [
testGroup "getDecl returns..." [
testCase "...Nothing for empty Module"
testGetDeclEmptyModule,
testCase "...matching Decl in Module with one Decl"
testGetDeclWithOneDecl,
testCase "...Nothing if name doesn't match"
testGetDeclWithWrongName,
testCase "...matching Decls in Module with two Decls"
testGetDeclWithTwoDecls,
testCase "...Nothing for Module with only defs"
testGetDeclWithOnlyDefs ],
testGroup "getDecls returns..." [
testCase "...empty list for empty Module"
testGetDeclsEmptyModule,
testCase "...one Decl for Module with one Decl"
testGetDeclsCount,
testCase "...matching Decl for Module with one Decl"
testGetDeclsValue,
testCase "...two Decls for Module with one Decl"
testGetDeclsCount2,
testCase "...matching Decl for Module with two Decls"
testGetDeclsValue2,
testCase "...empty list for Module with only defs"
testGetDeclsWithOnlyDefs ],
testGroup "getDefs returns..." [
testCase "...empty list for empty Module"
testGetDefsEmptyModule,
testCase "...one Def for Module with one Def"
testGetDefsCount,
testCase "...matching Def for Module with one Def"
testGetDefsValue,
testCase "...two Defs for Module with two Defs"
testGetDefsCount2,
testCase "...matching Defs for Module with two Defs"
testGetDefsValue2,
testCase "...empty list for Module with only Decls"
testGetDefsWithOnlyDecls ]]
testGetDeclEmptyModule = (getDecl empty "foo") @?= Nothing
testGetDeclWithOneDecl = (getDecl m "foo") @?= Just d
where m = addDecl d empty
d = Decl "foo" (TypeName "Foo")
testGetDeclWithWrongName = (getDecl m "food") @?= Nothing
where m = addDecl d empty
d = Decl "foo" (TypeName "Foo")
testGetDeclWithTwoDecls = do
(getDecl m "foo") @?= Just d1
(getDecl m "bar") @?= Just d2
where m = foldr addDecl empty [d1,d2]
d1 = Decl "foo" (TypeName "Foo")
d2 = Decl "bar" (TypeName "Bar")
testGetDeclWithOnlyDefs = (getDecl m "foo") @?= Nothing
where
m = addDef d empty
d = Def "foo" [] (FloatLiteral 1)
testGetDeclsEmptyModule = length (getDecls empty) @?= 0
testGetDeclsCount = length (getDecls m) @?= 1
where m = addDecl d empty
d = Decl "foo" (TypeName "Foo")
testGetDeclsValue = (getDecl m "foo") @?= Just d
where m = addDecl d empty
d = Decl "foo" (TypeName "Foo")
testGetDeclsCount2 = length (getDecls m) @?= 2
where m = foldr addDecl empty [d1,d2]
d1 = Decl "foo" (TypeName "Foo")
d2 = Decl "bar" (TypeName "Bar")
testGetDeclsValue2 = forM_ [d1,d2] $ \d ->
assert $ d `elem` (getDecls m)
where m = foldr addDecl empty [d1,d2]
d1 = Decl "foo" (TypeName "Foo")
d2 = Decl "bar" (TypeName "Bar")
testGetDeclsWithOnlyDefs = length (getDecls m) @?= 0
where
m = addDef d empty
d = Def "foo" [] (FloatLiteral 1)
testGetDefsEmptyModule =
getDefs empty @=? ([] :: [Def ResolvedName])
testGetDefsCount = length (getDefs m) @?= 1
where
m = addDef d empty
d = Def "bar" [] (FloatLiteral 1)
testGetDefsValue = assert $ d `elem` getDefs m
where
m = addDef d empty
d = Def "foo" [] (FloatLiteral 1)
testGetDefsCount2 = length (getDefs m) @?= 2
where
m = foldr addDef empty [d1,d2]
d1 = Def "foo" [] (FloatLiteral 1)
d2 = Def "bar" [] (FloatLiteral 2.3)
testGetDefsValue2 = forM_ [d1,d2] $ \d ->
assert $ d `elem` (getDefs m)
where
m = foldr addDef empty [d1,d2]
d1 = Def "foo" [] (FloatLiteral 1)
d2 = Def "bar" [] (FloatLiteral 2.3)
testGetDefsWithOnlyDecls = length (getDefs m) @?= 0
where m = addDecl d empty
d = Decl "foo" (TypeName "Foo")
|
matthewscottgordon/funk
|
test/Test/Module.hs
|
apache-2.0
| 5,051 | 0 | 9 | 1,446 | 1,197 | 626 | 571 | 104 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-| The WConfd functions for direct configuration manipulation
This module contains the client functions exported by WConfD for
specific configuration manipulation.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.WConfd.ConfigModifications where
import Control.Lens.Setter ((.~))
import Control.Lens.Traversal (mapMOf)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Data.Maybe (isJust, maybeToList)
import Language.Haskell.TH (Name)
import System.Time (getClockTime)
import Text.Printf (printf)
import qualified Data.Map as M
import qualified Data.Set as S
import Ganeti.BasicTypes (GenericResult(..), toError)
import Ganeti.Errors (GanetiException(..))
import Ganeti.JSON (GenericContainer(..), alterContainerL)
import Ganeti.Locking.Locks (ClientId, ciIdentifier)
import Ganeti.Logging.Lifted (logDebug)
import Ganeti.Objects
import Ganeti.Objects.Lens
import Ganeti.WConfd.ConfigState (ConfigState, csConfigData, csConfigDataL)
import Ganeti.WConfd.Monad (WConfdMonad, modifyConfigWithLock)
import qualified Ganeti.WConfd.TempRes as T
-- * getters
-- | Gets all logical volumes in the cluster
getAllLVs :: ConfigState -> S.Set String
getAllLVs = S.fromList . concatMap getLVsOfDisk . M.elems
. fromContainer . configDisks . csConfigData
where convert (LogicalVolume lvG lvV) = lvG ++ "/" ++ lvV
getDiskLV :: Disk -> Maybe String
getDiskLV disk = case diskLogicalId disk of
Just (LIDPlain lv) -> Just (convert lv)
_ -> Nothing
getLVsOfDisk :: Disk -> [String]
getLVsOfDisk disk = maybeToList (getDiskLV disk)
++ concatMap getLVsOfDisk (diskChildren disk)
-- | Gets the ids of nodes, instances, node groups,
-- networks, disks, nics, and the custer itself.
getAllIDs :: ConfigState -> S.Set String
getAllIDs cs =
let lvs = getAllLVs cs
keysFromC :: GenericContainer a b -> [a]
keysFromC = M.keys . fromContainer
valuesFromC :: GenericContainer a b -> [b]
valuesFromC = M.elems . fromContainer
instKeys = keysFromC . configInstances . csConfigData $ cs
nodeKeys = keysFromC . configNodes . csConfigData $ cs
instValues = map uuidOf . valuesFromC
. configInstances . csConfigData $ cs
nodeValues = map uuidOf . valuesFromC . configNodes . csConfigData $ cs
nodeGroupValues = map uuidOf . valuesFromC
. configNodegroups . csConfigData $ cs
networkValues = map uuidOf . valuesFromC
. configNetworks . csConfigData $ cs
disksValues = map uuidOf . valuesFromC . configDisks . csConfigData $ cs
nics = map nicUuid . concatMap instNics
. valuesFromC . configInstances . csConfigData $ cs
cluster = uuidOf . configCluster . csConfigData $ cs
in S.union lvs . S.fromList $ instKeys ++ nodeKeys ++ instValues ++ nodeValues
++ nodeGroupValues ++ networkValues ++ disksValues ++ nics ++ [cluster]
getAllMACs :: ConfigState -> S.Set String
getAllMACs = S.fromList . map nicMac . concatMap instNics . M.elems
. fromContainer . configInstances . csConfigData
-- * UUID config checks
-- | Checks if the config has the given UUID
checkUUIDpresent :: UuidObject a
=> ConfigState
-> a
-> Bool
checkUUIDpresent cs a = uuidOf a `S.member` getAllIDs cs
-- | Checks if the given UUID is new (i.e., no in the config)
checkUniqueUUID :: UuidObject a
=> ConfigState
-> a
-> Bool
checkUniqueUUID cs a = not $ checkUUIDpresent cs a
-- * RPC checks
-- | Verifications done before adding an instance.
-- Currently confirms that the instance's macs are not
-- in use, and that the instance's UUID being
-- present (or not present) in the config based on
-- weather the instance is being replaced (or not).
--
-- TODO: add more verifications to this call;
-- the client should have a lock on the name of the instance.
addInstanceChecks :: Instance
-> Bool
-> ConfigState
-> GenericResult GanetiException ()
addInstanceChecks inst replace cs = do
let macsInUse = S.fromList (map nicMac (instNics inst))
`S.intersection` getAllMACs cs
unless (S.null macsInUse) . Bad . ConfigurationError $ printf
"Cannot add instance %s; MAC addresses %s already in use"
(show $ instName inst) (show macsInUse)
if replace
then do
let check = checkUUIDpresent cs inst
unless check . Bad . ConfigurationError $ printf
"Cannot add %s: UUID %s already in use"
(show $ instName inst) (instUuid inst)
else do
let check = checkUniqueUUID cs inst
unless check . Bad . ConfigurationError $ printf
"Cannot replace %s: UUID %s not present"
(show $ instName inst) (instUuid inst)
-- * RPCs
-- | Add a new instance to the configuration, release DRBD minors,
-- and commit temporary IPs, all while temporarily holding the config
-- lock. Return True upon success and False if the config lock was not
-- available and the client should retry.
addInstance :: Instance -> ClientId -> Bool -> WConfdMonad Bool
addInstance inst cid replace = do
ct <- liftIO getClockTime
logDebug $ "AddInstance: client " ++ show (ciIdentifier cid)
++ " adding instance " ++ uuidOf inst
++ " with name " ++ show (instName inst)
let setCtime = instCtimeL .~ ct
setMtime = instMtimeL .~ ct
addInst i = csConfigDataL . configInstancesL . alterContainerL (uuidOf i)
.~ Just i
commitRes tr = mapMOf csConfigDataL $ T.commitReservedIps cid tr
r <- modifyConfigWithLock
(\tr cs -> do
toError $ addInstanceChecks inst replace cs
commitRes tr $ addInst (setMtime . setCtime $ inst) cs)
. T.releaseDRBDMinors $ uuidOf inst
logDebug $ "AddInstance: result of config modification is " ++ show r
return $ isJust r
-- * The list of functions exported to RPC.
exportedFunctions :: [Name]
exportedFunctions = [ 'addInstance
]
|
dimara/ganeti
|
src/Ganeti/WConfd/ConfigModifications.hs
|
bsd-2-clause
| 7,452 | 0 | 19 | 1,726 | 1,440 | 755 | 685 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
Module : Main
Copyright : 2009–2012 Roel van Dijk
License : BSD3 (see the file LICENSE)
Maintainer : Roel van Dijk <[email protected]>
-}
module Main where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "ansi-wl-pprint" Text.PrettyPrint.ANSI.Leijen ( putDoc, plain )
import "base" Control.Applicative ( (<$>), (<*>) )
import "base" Control.Monad ( (=<<) )
import "base" Data.Bool ( Bool(False, True), otherwise )
import "base" Data.Function ( ($), const, id )
import "base" Data.Functor ( fmap )
import "base" Data.Int ( Int )
import "base" Data.List ( foldr, map )
import "base" Data.Word ( Word8 )
import "base" Data.Version ( showVersion )
import "base" Prelude ( fromIntegral )
import "base" System.IO ( IO, putStrLn )
#if __GLASGOW_HASKELL__ < 700
import "base" Control.Monad ( (>>=), (>>), fail )
#endif
import "base-unicode-symbols" Data.Function.Unicode ( (∘) )
import "base-unicode-symbols" Data.Bool.Unicode ( (∧), (∨) )
import "base-unicode-symbols" Data.Eq.Unicode ( (≡) )
import "cmdtheline" System.Console.CmdTheLine
( Term, TermInfo
, defTI, flag, opt, optAll, optDoc, optInfo
, optName, run, termName, termDoc, value, version
)
import "this" PrettyDevList ( DescribedDevice(..), ppDevices, brightStyle, darkStyle )
import qualified "this" Paths_ls_usb as This ( version )
import "usb" System.USB.Initialization
( Verbosity(PrintNothing), newCtx, setDebug )
import "usb" System.USB.Enumeration
( getDevices, busNumber, deviceAddress )
import "usb" System.USB.Descriptors
( VendorId, ProductId, getDeviceDesc, deviceVendorId, deviceProductId )
import "usb-id-database" System.USB.IDDB.LinuxUsbIdRepo ( staticDb )
import qualified "vector" Data.Vector as V
--------------------------------------------------------------------------------
-- Main
--------------------------------------------------------------------------------
termInfo ∷ TermInfo
termInfo = defTI { termName = "ls-usb"
, version = showVersion This.version
, termDoc = "Lists connected USB devices."
}
verboseT ∷ Term Bool
verboseT = value $ flag (optInfo ["verbose", "V"])
{ optDoc = "Be verbose." }
colourT ∷ Term Bool
colourT = value $ opt True (optInfo ["colour", "c"])
{ optDoc = "Colour the output." }
darkerT ∷ Term Bool
darkerT = value $ flag (optInfo ["darker"])
{ optDoc = "Use darker colours (for bright backgrounds)." }
vidT ∷ Term [Int]
vidT = value $ optAll [] (optInfo ["vid", "v"])
{ optName = "VID"
, optDoc = "List devices with this VID."
}
pidT ∷ Term [Int]
pidT = value $ optAll [] (optInfo ["pid", "p"])
{ optName = "PID"
, optDoc = "List devices with this PID."
}
busT ∷ Term [Int]
busT = value $ optAll [] (optInfo ["bus", "b"])
{ optName = "BUS"
, optDoc = "List devices on this BUS."
}
addressT ∷ Term [Int]
addressT = value $ optAll [] (optInfo ["address", "a"])
{ optName = "ADDRESS"
, optDoc = "List devices with this ADDRESS."
}
main ∷ IO ()
main = run (term, termInfo)
term ∷ Term (IO ())
term = listUSB <$> verboseT
<*> colourT
<*> darkerT
<*> vidT
<*> pidT
<*> busT
<*> addressT
listUSB ∷ Bool → Bool → Bool → [Int] → [Int] → [Int] → [Int] → IO ()
listUSB verbose colour darker vs ps bs as = do
db ← staticDb
ctx ← newCtx
setDebug ctx PrintNothing
devs ← fmap (V.toList ∘ V.filter filter) ∘
V.mapM (\dev -> DD dev <$> getDeviceDesc dev) =<<
getDevices ctx
let style | darker = darkStyle
| otherwise = brightStyle
(putDoc ∘ if colour then id else plain) =<< ppDevices style db verbose devs
putStrLn ""
where
filter ∷ F DescribedDevice
filter = andF $ map filterNonEmpty
[ map (matchVID ∘ fromIntegral) vs
, map (matchPID ∘ fromIntegral) ps
, map (matchBus ∘ fromIntegral) bs
, map (matchDevAddr ∘ fromIntegral) as
]
--------------------------------------------------------------------------------
-- Filters
--------------------------------------------------------------------------------
type F α = α → Bool
-- Construct a filter combinator from a binary boolean operator.
binBoolOpToFComb ∷ (Bool → Bool → Bool) → F α → F α → F α
binBoolOpToFComb (⊗) f g = \x → f x ⊗ g x
(<∨>) ∷ F α → F α → F α
(<∨>) = binBoolOpToFComb (∨)
(<∧>) ∷ F α → F α → F α
(<∧>) = binBoolOpToFComb (∧)
andF ∷ [F α] → F α
andF = foldr (<∧>) (const True)
orF ∷ [F α] → F α
orF = foldr (<∨>) (const False)
filterNonEmpty ∷ [F α] → F α
filterNonEmpty [] = const True
filterNonEmpty xs = foldr (<∨>) (const False) xs
--------------------------------------------------------------------------------
-- Specific Device filters
--------------------------------------------------------------------------------
matchVID ∷ VendorId → F DescribedDevice
matchVID vid' = (vid' ≡) ∘ deviceVendorId ∘ deviceDesc
matchPID ∷ ProductId → F DescribedDevice
matchPID pid' = (pid' ≡) ∘ deviceProductId ∘ deviceDesc
matchBus ∷ Word8 → F DescribedDevice
matchBus bus' = (bus' ≡) ∘ busNumber ∘ device
matchDevAddr ∷ Word8 → F DescribedDevice
matchDevAddr addr = (addr ≡) ∘ deviceAddress ∘ device
|
roelvandijk/ls-usb
|
ls-usb.hs
|
bsd-3-clause
| 5,925 | 0 | 14 | 1,382 | 1,567 | 903 | 664 | 114 | 2 |
{-# LANGUAGE DataKinds, GADTs, KindSignatures, TypeOperators #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.Fin.Unary (
Fin (..)
) where
import Data.Nat
import Data.Fin
data Fin :: PNat -> * where
FZ :: Fin (S n)
FS :: Fin n -> Fin (S n)
finToInteger :: Fin n -> Integer
finToInteger x = case x of
FZ -> 1
FS n -> let i = finToInteger n in i `seq` 1 + i
instance Show (Fin n) where
show = show . finToInteger
instance Finite Fin where
zero = FZ
succ = FS
elimFin z s n = case n of
FZ -> z
FS n' -> s n'
--times :: pnat m -> pnat n -> pnat (m * n + m + n)
|
bmsherman/haskell-vect
|
Data/Fin/Unary.hs
|
bsd-3-clause
| 634 | 0 | 12 | 163 | 214 | 114 | 100 | 22 | 2 |
module IptAdmin.DelChainPage where
import Control.Monad.Error
import Happstack.Server.SimpleHTTP
import IptAdmin.DelChainPage.Render
import IptAdmin.Render
import IptAdmin.System
import IptAdmin.Template
import IptAdmin.Types
import IptAdmin.Utils
import Iptables
import Iptables.Types
import Text.Blaze.Renderer.Pretty (renderHtml)
pageHandlers :: IptAdmin Response
pageHandlers = msum [ methodSP GET pageHandlerGet
, methodSP POST pageHandlerPost
]
pageHandlerGet :: IptAdmin Response
pageHandlerGet = do
tableName <- getInputNonEmptyString "table"
chainName <- getInputNonEmptyString "chain"
table <- getTable tableName
let chainMay = getChainByName chainName table
case chainMay of
Nothing -> throwError $ "Invalid chain name: " ++ chainName
Just chain ->
if not $ null $ cRules chain
then
return $ buildResponse $ renderHtml $ htmlWrapper $
header tableName $ "'" ++ chainName
++ "' chain is not empty. Please, remove all rules before deleting a chain"
else
let linkedChains = scanTableForLink (cName chain) table
in
if not $ null linkedChains
then return $ buildResponse $ renderHtml $ htmlWrapper $
header tableName $ "There are links to the '" ++ chainName
++ "' chain from chains : " ++ show linkedChains
++ ". Please, remove all links before deleting a chain"
else
return $ buildResponse $ renderHtml $ htmlWrapper $ do
header tableName $ "Delete '" ++ chainName
++ "' user defined chain from '"
++ tableName ++ "' table"
delChainForm tableName chainName
pageHandlerPost :: IptAdmin Response
pageHandlerPost = do
tableName <- getInputNonEmptyString "table"
chainName <- getInputNonEmptyString "chain"
table <- getTable tableName
let chainMay = getChainByName chainName table
case chainMay of
Nothing -> throwError $ "Ivalid chain name: " ++ chainName
Just chain ->
if not $ null $ cRules chain
then throwError $ "Chain " ++ chainName
++ " is not empty. Please, remove all rules before deleting a chain"
else
let linkedChains = scanTableForLink (cName chain) table
in
if not $ null linkedChains
then throwError $ "There are links to the '"
++ chainName ++ "' chain from chains : "
++ show linkedChains
else do
tryChange $ deleteChain tableName chainName
-- redir $ "/show?table=" ++ tableName
return $ buildResponse "ok"
|
etarasov/iptadmin
|
src/IptAdmin/DelChainPage.hs
|
bsd-3-clause
| 3,242 | 2 | 24 | 1,337 | 514 | 263 | 251 | 59 | 4 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file. An additional grant of patent rights can
-- be found in the PATENTS file.
{-# LANGUAGE DeriveDataTypeable, GADTs, OverloadedStrings,
StandaloneDeriving #-}
-- | Most users should import "Haxl.Core" instead of importing this
-- module directly.
module Haxl.Core.Memo (memo, memoFingerprint, MemoFingerprintKey(..)) where
import Data.Text (Text)
import Data.Typeable
import Data.Hashable
import Data.Word
import Haxl.Core.Monad (GenHaxl, cachedComputation)
-- -----------------------------------------------------------------------------
-- A key type that can be used for memoizing computations by a Text key
-- | Memoize a computation using an arbitrary key. The result will be
-- calculated once; the second and subsequent time it will be returned
-- immediately. It is the caller's responsibility to ensure that for
-- every two calls @memo key haxl@, if they have the same @key@ then
-- they compute the same result.
memo
:: (Typeable a, Typeable k, Hashable k, Eq k)
=> k -> GenHaxl u a -> GenHaxl u a
memo key = cachedComputation (MemoKey key)
{-# RULES "memo/Text"
memo = memoText :: (Typeable a) => Text -> GenHaxl u a -> GenHaxl u a
#-}
{-# NOINLINE memo #-}
data MemoKey k a where
MemoKey :: (Typeable k, Hashable k, Eq k) => k -> MemoKey k a
deriving Typeable
deriving instance Eq (MemoKey k a)
instance Hashable (MemoKey k a) where
hashWithSalt s (MemoKey t) = hashWithSalt s t
-- An optimised memo key for Text keys. This is used automatically
-- when the key is Text, due to the RULES pragma above.
data MemoTextKey a where
MemoText :: Text -> MemoTextKey a
deriving Typeable
deriving instance Eq (MemoTextKey a)
deriving instance Show (MemoTextKey a)
instance Hashable (MemoTextKey a) where
hashWithSalt s (MemoText t) = hashWithSalt s t
memoText :: (Typeable a) => Text -> GenHaxl u a -> GenHaxl u a
memoText key = cachedComputation (MemoText key)
-- | A memo key derived from a 128-bit MD5 hash. Do not use this directly,
-- it is for use by automatically-generated memoization.
data MemoFingerprintKey a where
MemoFingerprintKey
:: {-# UNPACK #-} !Word64
-> {-# UNPACK #-} !Word64 -> MemoFingerprintKey a
deriving Typeable
deriving instance Eq (MemoFingerprintKey a)
deriving instance Show (MemoFingerprintKey a)
instance Hashable (MemoFingerprintKey a) where
hashWithSalt s (MemoFingerprintKey x _) =
hashWithSalt s (fromIntegral x :: Int)
-- This is optimised for cheap call sites: when we have a call
--
-- memoFingerprint (MemoFingerprintKey 1234 5678) e
--
-- then the MemoFingerprintKey constructor will be statically
-- allocated (with two 64-bit fields), and shared by all calls to
-- memo. So the memo call will not allocate, unlike memoText.
--
{-# NOINLINE memoFingerprint #-}
memoFingerprint
:: (Show a, Typeable a) => MemoFingerprintKey a -> GenHaxl u a -> GenHaxl u a
memoFingerprint key = cachedComputation key
|
tolysz/Haxl
|
Haxl/Core/Memo.hs
|
bsd-3-clause
| 3,088 | 0 | 8 | 550 | 542 | 299 | 243 | 44 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Arity and eta expansion
-}
{-# LANGUAGE CPP #-}
-- | Arity and eta expansion
module CoreArity (
manifestArity, exprArity, typeArity, exprBotStrictness_maybe,
exprEtaExpandArity, findRhsArity, CheapFun, etaExpand
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreUtils
import CoreSubst
import Demand
import Var
import VarEnv
import Id
import Type
import TyCon ( initRecTc, checkRecTc )
import Coercion
import BasicTypes
import Unique
import DynFlags ( DynFlags, GeneralFlag(..), gopt )
import Outputable
import FastString
import Pair
import Util ( debugIsOn )
{-
************************************************************************
* *
manifestArity and exprArity
* *
************************************************************************
exprArity is a cheap-and-cheerful version of exprEtaExpandArity.
It tells how many things the expression can be applied to before doing
any work. It doesn't look inside cases, lets, etc. The idea is that
exprEtaExpandArity will do the hard work, leaving something that's easy
for exprArity to grapple with. In particular, Simplify uses exprArity to
compute the ArityInfo for the Id.
Originally I thought that it was enough just to look for top-level lambdas, but
it isn't. I've seen this
foo = PrelBase.timesInt
We want foo to get arity 2 even though the eta-expander will leave it
unchanged, in the expectation that it'll be inlined. But occasionally it
isn't, because foo is blacklisted (used in a rule).
Similarly, see the ok_note check in exprEtaExpandArity. So
f = __inline_me (\x -> e)
won't be eta-expanded.
And in any case it seems more robust to have exprArity be a bit more intelligent.
But note that (\x y z -> f x y z)
should have arity 3, regardless of f's arity.
-}
manifestArity :: CoreExpr -> Arity
-- ^ manifestArity sees how many leading value lambdas there are,
-- after looking through casts
manifestArity (Lam v e) | isId v = 1 + manifestArity e
| otherwise = manifestArity e
manifestArity (Tick t e) | not (tickishIsCode t) = manifestArity e
manifestArity (Cast e _) = manifestArity e
manifestArity _ = 0
---------------
exprArity :: CoreExpr -> Arity
-- ^ An approximate, fast, version of 'exprEtaExpandArity'
exprArity e = go e
where
go (Var v) = idArity v
go (Lam x e) | isId x = go e + 1
| otherwise = go e
go (Tick t e) | not (tickishIsCode t) = go e
go (Cast e co) = trim_arity (go e) (pSnd (coercionKind co))
-- Note [exprArity invariant]
go (App e (Type _)) = go e
go (App f a) | exprIsTrivial a = (go f - 1) `max` 0
-- See Note [exprArity for applications]
-- NB: coercions count as a value argument
go _ = 0
trim_arity :: Arity -> Type -> Arity
trim_arity arity ty = arity `min` length (typeArity ty)
---------------
typeArity :: Type -> [OneShotInfo]
-- How many value arrows are visible in the type?
-- We look through foralls, and newtypes
-- See Note [exprArity invariant]
typeArity ty
= go initRecTc ty
where
go rec_nts ty
| Just (_, ty') <- splitForAllTy_maybe ty
= go rec_nts ty'
| Just (arg,res) <- splitFunTy_maybe ty
= typeOneShot arg : go rec_nts res
| Just (tc,tys) <- splitTyConApp_maybe ty
, Just (ty', _) <- instNewTyCon_maybe tc tys
, Just rec_nts' <- checkRecTc rec_nts tc -- See Note [Expanding newtypes]
-- in TyCon
-- , not (isClassTyCon tc) -- Do not eta-expand through newtype classes
-- -- See Note [Newtype classes and eta expansion]
-- (no longer required)
= go rec_nts' ty'
-- Important to look through non-recursive newtypes, so that, eg
-- (f x) where f has arity 2, f :: Int -> IO ()
-- Here we want to get arity 1 for the result!
--
-- AND through a layer of recursive newtypes
-- e.g. newtype Stream m a b = Stream (m (Either b (a, Stream m a b)))
| otherwise
= []
---------------
exprBotStrictness_maybe :: CoreExpr -> Maybe (Arity, StrictSig)
-- A cheap and cheerful function that identifies bottoming functions
-- and gives them a suitable strictness signatures. It's used during
-- float-out
exprBotStrictness_maybe e
= case getBotArity (arityType env e) of
Nothing -> Nothing
Just ar -> Just (ar, sig ar)
where
env = AE { ae_ped_bot = True, ae_cheap_fn = \ _ _ -> False }
sig ar = mkClosedStrictSig (replicate ar topDmd) botRes
-- For this purpose we can be very simple
{-
Note [exprArity invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~
exprArity has the following invariant:
(1) If typeArity (exprType e) = n,
then manifestArity (etaExpand e n) = n
That is, etaExpand can always expand as much as typeArity says
So the case analysis in etaExpand and in typeArity must match
(2) exprArity e <= typeArity (exprType e)
(3) Hence if (exprArity e) = n, then manifestArity (etaExpand e n) = n
That is, if exprArity says "the arity is n" then etaExpand really
can get "n" manifest lambdas to the top.
Why is this important? Because
- In TidyPgm we use exprArity to fix the *final arity* of
each top-level Id, and in
- In CorePrep we use etaExpand on each rhs, so that the visible lambdas
actually match that arity, which in turn means
that the StgRhs has the right number of lambdas
An alternative would be to do the eta-expansion in TidyPgm, at least
for top-level bindings, in which case we would not need the trim_arity
in exprArity. That is a less local change, so I'm going to leave it for today!
Note [Newtype classes and eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: this nasty special case is no longer required, because
for newtype classes we don't use the class-op rule mechanism
at all. See Note [Single-method classes] in TcInstDcls. SLPJ May 2013
-------- Old out of date comments, just for interest -----------
We have to be careful when eta-expanding through newtypes. In general
it's a good idea, but annoyingly it interacts badly with the class-op
rule mechanism. Consider
class C a where { op :: a -> a }
instance C b => C [b] where
op x = ...
These translate to
co :: forall a. (a->a) ~ C a
$copList :: C b -> [b] -> [b]
$copList d x = ...
$dfList :: C b -> C [b]
{-# DFunUnfolding = [$copList] #-}
$dfList d = $copList d |> co@[b]
Now suppose we have:
dCInt :: C Int
blah :: [Int] -> [Int]
blah = op ($dfList dCInt)
Now we want the built-in op/$dfList rule will fire to give
blah = $copList dCInt
But with eta-expansion 'blah' might (and in Trac #3772, which is
slightly more complicated, does) turn into
blah = op (\eta. ($dfList dCInt |> sym co) eta)
and now it is *much* harder for the op/$dfList rule to fire, because
exprIsConApp_maybe won't hold of the argument to op. I considered
trying to *make* it hold, but it's tricky and I gave up.
The test simplCore/should_compile/T3722 is an excellent example.
-------- End of old out of date comments, just for interest -----------
Note [exprArity for applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we come to an application we check that the arg is trivial.
eg f (fac x) does not have arity 2,
even if f has arity 3!
* We require that is trivial rather merely cheap. Suppose f has arity 2.
Then f (Just y)
has arity 0, because if we gave it arity 1 and then inlined f we'd get
let v = Just y in \w. <f-body>
which has arity 0. And we try to maintain the invariant that we don't
have arity decreases.
* The `max 0` is important! (\x y -> f x) has arity 2, even if f is
unknown, hence arity 0
************************************************************************
* *
Computing the "arity" of an expression
* *
************************************************************************
Note [Definition of arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The "arity" of an expression 'e' is n if
applying 'e' to *fewer* than n *value* arguments
converges rapidly
Or, to put it another way
there is no work lost in duplicating the partial
application (e x1 .. x(n-1))
In the divegent case, no work is lost by duplicating because if the thing
is evaluated once, that's the end of the program.
Or, to put it another way, in any context C
C[ (\x1 .. xn. e x1 .. xn) ]
is as efficient as
C[ e ]
It's all a bit more subtle than it looks:
Note [One-shot lambdas]
~~~~~~~~~~~~~~~~~~~~~~~
Consider one-shot lambdas
let x = expensive in \y z -> E
We want this to have arity 1 if the \y-abstraction is a 1-shot lambda.
Note [Dealing with bottom]
~~~~~~~~~~~~~~~~~~~~~~~~~~
A Big Deal with computing arities is expressions like
f = \x -> case x of
True -> \s -> e1
False -> \s -> e2
This happens all the time when f :: Bool -> IO ()
In this case we do eta-expand, in order to get that \s to the
top, and give f arity 2.
This isn't really right in the presence of seq. Consider
(f bot) `seq` 1
This should diverge! But if we eta-expand, it won't. We ignore this
"problem" (unless -fpedantic-bottoms is on), because being scrupulous
would lose an important transformation for many programs. (See
Trac #5587 for an example.)
Consider also
f = \x -> error "foo"
Here, arity 1 is fine. But if it is
f = \x -> case x of
True -> error "foo"
False -> \y -> x+y
then we want to get arity 2. Technically, this isn't quite right, because
(f True) `seq` 1
should diverge, but it'll converge if we eta-expand f. Nevertheless, we
do so; it improves some programs significantly, and increasing convergence
isn't a bad thing. Hence the ABot/ATop in ArityType.
So these two transformations aren't always the Right Thing, and we
have several tickets reporting unexpected bahaviour resulting from
this transformation. So we try to limit it as much as possible:
(1) Do NOT move a lambda outside a known-bottom case expression
case undefined of { (a,b) -> \y -> e }
This showed up in Trac #5557
(2) Do NOT move a lambda outside a case if all the branches of
the case are known to return bottom.
case x of { (a,b) -> \y -> error "urk" }
This case is less important, but the idea is that if the fn is
going to diverge eventually anyway then getting the best arity
isn't an issue, so we might as well play safe
(3) Do NOT move a lambda outside a case unless
(a) The scrutinee is ok-for-speculation, or
(b) more liberally: the scrutinee is cheap (e.g. a variable), and
-fpedantic-bottoms is not enforced (see Trac #2915 for an example)
Of course both (1) and (2) are readily defeated by disguising the bottoms.
4. Note [Newtype arity]
~~~~~~~~~~~~~~~~~~~~~~~~
Non-recursive newtypes are transparent, and should not get in the way.
We do (currently) eta-expand recursive newtypes too. So if we have, say
newtype T = MkT ([T] -> Int)
Suppose we have
e = coerce T f
where f has arity 1. Then: etaExpandArity e = 1;
that is, etaExpandArity looks through the coerce.
When we eta-expand e to arity 1: eta_expand 1 e T
we want to get: coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
HOWEVER, note that if you use coerce bogusly you can ge
coerce Int negate
And since negate has arity 2, you might try to eta expand. But you can't
decopose Int to a function type. Hence the final case in eta_expand.
Note [The state-transformer hack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
f = e
where e has arity n. Then, if we know from the context that f has
a usage type like
t1 -> ... -> tn -1-> t(n+1) -1-> ... -1-> tm -> ...
then we can expand the arity to m. This usage type says that
any application (x e1 .. en) will be applied to uniquely to (m-n) more args
Consider f = \x. let y = <expensive>
in case x of
True -> foo
False -> \(s:RealWorld) -> e
where foo has arity 1. Then we want the state hack to
apply to foo too, so we can eta expand the case.
Then we expect that if f is applied to one arg, it'll be applied to two
(that's the hack -- we don't really know, and sometimes it's false)
See also Id.isOneShotBndr.
Note [State hack and bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's a terrible idea to use the state hack on a bottoming function.
Here's what happens (Trac #2861):
f :: String -> IO T
f = \p. error "..."
Eta-expand, using the state hack:
f = \p. (\s. ((error "...") |> g1) s) |> g2
g1 :: IO T ~ (S -> (S,T))
g2 :: (S -> (S,T)) ~ IO T
Extrude the g2
f' = \p. \s. ((error "...") |> g1) s
f = f' |> (String -> g2)
Discard args for bottomming function
f' = \p. \s. ((error "...") |> g1 |> g3
g3 :: (S -> (S,T)) ~ (S,T)
Extrude g1.g3
f'' = \p. \s. (error "...")
f' = f'' |> (String -> S -> g1.g3)
And now we can repeat the whole loop. Aargh! The bug is in applying the
state hack to a function which then swallows the argument.
This arose in another guise in Trac #3959. Here we had
catch# (throw exn >> return ())
Note that (throw :: forall a e. Exn e => e -> a) is called with [a = IO ()].
After inlining (>>) we get
catch# (\_. throw {IO ()} exn)
We must *not* eta-expand to
catch# (\_ _. throw {...} exn)
because 'catch#' expects to get a (# _,_ #) after applying its argument to
a State#, not another function!
In short, we use the state hack to allow us to push let inside a lambda,
but not to introduce a new lambda.
Note [ArityType]
~~~~~~~~~~~~~~~~
ArityType is the result of a compositional analysis on expressions,
from which we can decide the real arity of the expression (extracted
with function exprEtaExpandArity).
Here is what the fields mean. If an arbitrary expression 'f' has
ArityType 'at', then
* If at = ABot n, then (f x1..xn) definitely diverges. Partial
applications to fewer than n args may *or may not* diverge.
We allow ourselves to eta-expand bottoming functions, even
if doing so may lose some `seq` sharing,
let x = <expensive> in \y. error (g x y)
==> \y. let x = <expensive> in error (g x y)
* If at = ATop as, and n=length as,
then expanding 'f' to (\x1..xn. f x1 .. xn) loses no sharing,
assuming the calls of f respect the one-shot-ness of
its definition.
NB 'f' is an arbitary expression, eg (f = g e1 e2). This 'f'
can have ArityType as ATop, with length as > 0, only if e1 e2 are
themselves.
* In both cases, f, (f x1), ... (f x1 ... f(n-1)) are definitely
really functions, or bottom, but *not* casts from a data type, in
at least one case branch. (If it's a function in one case branch but
an unsafe cast from a data type in another, the program is bogus.)
So eta expansion is dynamically ok; see Note [State hack and
bottoming functions], the part about catch#
Example:
f = \x\y. let v = <expensive> in
\s(one-shot) \t(one-shot). blah
'f' has ArityType [ManyShot,ManyShot,OneShot,OneShot]
The one-shot-ness means we can, in effect, push that
'let' inside the \st.
Suppose f = \xy. x+y
Then f :: AT [False,False] ATop
f v :: AT [False] ATop
f <expensive> :: AT [] ATop
-------------------- Main arity code ----------------------------
-}
-- See Note [ArityType]
data ArityType = ATop [OneShotInfo] | ABot Arity
-- There is always an explicit lambda
-- to justify the [OneShot], or the Arity
vanillaArityType :: ArityType
vanillaArityType = ATop [] -- Totally uninformative
-- ^ The Arity returned is the number of value args the
-- expression can be applied to without doing much work
exprEtaExpandArity :: DynFlags -> CoreExpr -> Arity
-- exprEtaExpandArity is used when eta expanding
-- e ==> \xy -> e x y
exprEtaExpandArity dflags e
= case (arityType env e) of
ATop oss -> length oss
ABot n -> n
where
env = AE { ae_cheap_fn = mk_cheap_fn dflags isCheapApp
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
getBotArity :: ArityType -> Maybe Arity
-- Arity of a divergent function
getBotArity (ABot n) = Just n
getBotArity _ = Nothing
mk_cheap_fn :: DynFlags -> CheapAppFun -> CheapFun
mk_cheap_fn dflags cheap_app
| not (gopt Opt_DictsCheap dflags)
= \e _ -> exprIsCheap' cheap_app e
| otherwise
= \e mb_ty -> exprIsCheap' cheap_app e
|| case mb_ty of
Nothing -> False
Just ty -> isDictLikeTy ty
----------------------
findRhsArity :: DynFlags -> Id -> CoreExpr -> Arity -> Arity
-- This implements the fixpoint loop for arity analysis
-- See Note [Arity analysis]
findRhsArity dflags bndr rhs old_arity
= go (rhsEtaExpandArity dflags init_cheap_app rhs)
-- We always call exprEtaExpandArity once, but usually
-- that produces a result equal to old_arity, and then
-- we stop right away (since arities should not decrease)
-- Result: the common case is that there is just one iteration
where
init_cheap_app :: CheapAppFun
init_cheap_app fn n_val_args
| fn == bndr = True -- On the first pass, this binder gets infinite arity
| otherwise = isCheapApp fn n_val_args
go :: Arity -> Arity
go cur_arity
| cur_arity <= old_arity = cur_arity
| new_arity == cur_arity = cur_arity
| otherwise = ASSERT( new_arity < cur_arity )
#ifdef DEBUG
pprTrace "Exciting arity"
(vcat [ ppr bndr <+> ppr cur_arity <+> ppr new_arity
, ppr rhs])
#endif
go new_arity
where
new_arity = rhsEtaExpandArity dflags cheap_app rhs
cheap_app :: CheapAppFun
cheap_app fn n_val_args
| fn == bndr = n_val_args < cur_arity
| otherwise = isCheapApp fn n_val_args
-- ^ The Arity returned is the number of value args the
-- expression can be applied to without doing much work
rhsEtaExpandArity :: DynFlags -> CheapAppFun -> CoreExpr -> Arity
-- exprEtaExpandArity is used when eta expanding
-- e ==> \xy -> e x y
rhsEtaExpandArity dflags cheap_app e
= case (arityType env e) of
ATop (os:oss)
| isOneShotInfo os || has_lam e -> 1 + length oss
-- Don't expand PAPs/thunks
-- Note [Eta expanding thunks]
| otherwise -> 0
ATop [] -> 0
ABot n -> n
where
env = AE { ae_cheap_fn = mk_cheap_fn dflags cheap_app
, ae_ped_bot = gopt Opt_PedanticBottoms dflags }
has_lam (Tick _ e) = has_lam e
has_lam (Lam b e) = isId b || has_lam e
has_lam _ = False
{-
Note [Arity analysis]
~~~~~~~~~~~~~~~~~~~~~
The motivating example for arity analysis is this:
f = \x. let g = f (x+1)
in \y. ...g...
What arity does f have? Really it should have arity 2, but a naive
look at the RHS won't see that. You need a fixpoint analysis which
says it has arity "infinity" the first time round.
This example happens a lot; it first showed up in Andy Gill's thesis,
fifteen years ago! It also shows up in the code for 'rnf' on lists
in Trac #4138.
The analysis is easy to achieve because exprEtaExpandArity takes an
argument
type CheapFun = CoreExpr -> Maybe Type -> Bool
used to decide if an expression is cheap enough to push inside a
lambda. And exprIsCheap' in turn takes an argument
type CheapAppFun = Id -> Int -> Bool
which tells when an application is cheap. This makes it easy to
write the analysis loop.
The analysis is cheap-and-cheerful because it doesn't deal with
mutual recursion. But the self-recursive case is the important one.
Note [Eta expanding through dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the experimental -fdicts-cheap flag is on, we eta-expand through
dictionary bindings. This improves arities. Thereby, it also
means that full laziness is less prone to floating out the
application of a function to its dictionary arguments, which
can thereby lose opportunities for fusion. Example:
foo :: Ord a => a -> ...
foo = /\a \(d:Ord a). let d' = ...d... in \(x:a). ....
-- So foo has arity 1
f = \x. foo dInt $ bar x
The (foo DInt) is floated out, and makes ineffective a RULE
foo (bar x) = ...
One could go further and make exprIsCheap reply True to any
dictionary-typed expression, but that's more work.
See Note [Dictionary-like types] in TcType.hs for why we use
isDictLikeTy here rather than isDictTy
Note [Eta expanding thunks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't eta-expand
* Trivial RHSs x = y
* PAPs x = map g
* Thunks f = case y of p -> \x -> blah
When we see
f = case y of p -> \x -> blah
should we eta-expand it? Well, if 'x' is a one-shot state token
then 'yes' because 'f' will only be applied once. But otherwise
we (conservatively) say no. My main reason is to avoid expanding
PAPSs
f = g d ==> f = \x. g d x
because that might in turn make g inline (if it has an inline pragma),
which we might not want. After all, INLINE pragmas say "inline only
when saturated" so we don't want to be too gung-ho about saturating!
-}
arityLam :: Id -> ArityType -> ArityType
arityLam id (ATop as) = ATop (idOneShotInfo id : as)
arityLam _ (ABot n) = ABot (n+1)
floatIn :: Bool -> ArityType -> ArityType
-- We have something like (let x = E in b),
-- where b has the given arity type.
floatIn _ (ABot n) = ABot n
floatIn True (ATop as) = ATop as
floatIn False (ATop as) = ATop (takeWhile isOneShotInfo as)
-- If E is not cheap, keep arity only for one-shots
arityApp :: ArityType -> Bool -> ArityType
-- Processing (fun arg) where at is the ArityType of fun,
-- Knock off an argument and behave like 'let'
arityApp (ABot 0) _ = ABot 0
arityApp (ABot n) _ = ABot (n-1)
arityApp (ATop []) _ = ATop []
arityApp (ATop (_:as)) cheap = floatIn cheap (ATop as)
andArityType :: ArityType -> ArityType -> ArityType -- Used for branches of a 'case'
andArityType (ABot n1) (ABot n2)
= ABot (n1 `min` n2)
andArityType (ATop as) (ABot _) = ATop as
andArityType (ABot _) (ATop bs) = ATop bs
andArityType (ATop as) (ATop bs) = ATop (as `combine` bs)
where -- See Note [Combining case branches]
combine (a:as) (b:bs) = (a `bestOneShot` b) : combine as bs
combine [] bs = takeWhile isOneShotInfo bs
combine as [] = takeWhile isOneShotInfo as
{-
Note [Combining case branches]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
go = \x. let z = go e0
go2 = \x. case x of
True -> z
False -> \s(one-shot). e1
in go2 x
We *really* want to eta-expand go and go2.
When combining the barnches of the case we have
ATop [] `andAT` ATop [OneShotLam]
and we want to get ATop [OneShotLam]. But if the inner
lambda wasn't one-shot we don't want to do this.
(We need a proper arity analysis to justify that.)
So we combine the best of the two branches, on the (slightly dodgy)
basis that if we know one branch is one-shot, then they all must be.
-}
---------------------------
type CheapFun = CoreExpr -> Maybe Type -> Bool
-- How to decide if an expression is cheap
-- If the Maybe is Just, the type is the type
-- of the expression; Nothing means "don't know"
data ArityEnv
= AE { ae_cheap_fn :: CheapFun
, ae_ped_bot :: Bool -- True <=> be pedantic about bottoms
}
arityType :: ArityEnv -> CoreExpr -> ArityType
arityType env (Cast e co)
= case arityType env e of
ATop os -> ATop (take co_arity os)
ABot n -> ABot (n `min` co_arity)
where
co_arity = length (typeArity (pSnd (coercionKind co)))
-- See Note [exprArity invariant] (2); must be true of
-- arityType too, since that is how we compute the arity
-- of variables, and they in turn affect result of exprArity
-- Trac #5441 is a nice demo
-- However, do make sure that ATop -> ATop and ABot -> ABot!
-- Casts don't affect that part. Getting this wrong provoked #5475
arityType _ (Var v)
| strict_sig <- idStrictness v
, not $ isNopSig strict_sig
, (ds, res) <- splitStrictSig strict_sig
, let arity = length ds
= if isBotRes res then ABot arity
else ATop (take arity one_shots)
| otherwise
= ATop (take (idArity v) one_shots)
where
one_shots :: [OneShotInfo] -- One-shot-ness derived from the type
one_shots = typeArity (idType v)
-- Lambdas; increase arity
arityType env (Lam x e)
| isId x = arityLam x (arityType env e)
| otherwise = arityType env e
-- Applications; decrease arity, except for types
arityType env (App fun (Type _))
= arityType env fun
arityType env (App fun arg )
= arityApp (arityType env fun) (ae_cheap_fn env arg Nothing)
-- Case/Let; keep arity if either the expression is cheap
-- or it's a 1-shot lambda
-- The former is not really right for Haskell
-- f x = case x of { (a,b) -> \y. e }
-- ===>
-- f x y = case x of { (a,b) -> e }
-- The difference is observable using 'seq'
--
arityType env (Case scrut _ _ alts)
| exprIsBottom scrut || null alts
= ABot 0 -- Do not eta expand
-- See Note [Dealing with bottom (1)]
| otherwise
= case alts_type of
ABot n | n>0 -> ATop [] -- Don't eta expand
| otherwise -> ABot 0 -- if RHS is bottomming
-- See Note [Dealing with bottom (2)]
ATop as | not (ae_ped_bot env) -- See Note [Dealing with bottom (3)]
, ae_cheap_fn env scrut Nothing -> ATop as
| exprOkForSpeculation scrut -> ATop as
| otherwise -> ATop (takeWhile isOneShotInfo as)
where
alts_type = foldr1 andArityType [arityType env rhs | (_,_,rhs) <- alts]
arityType env (Let b e)
= floatIn (cheap_bind b) (arityType env e)
where
cheap_bind (NonRec b e) = is_cheap (b,e)
cheap_bind (Rec prs) = all is_cheap prs
is_cheap (b,e) = ae_cheap_fn env e (Just (idType b))
arityType env (Tick t e)
| not (tickishIsCode t) = arityType env e
arityType _ _ = vanillaArityType
{-
************************************************************************
* *
The main eta-expander
* *
************************************************************************
We go for:
f = \x1..xn -> N ==> f = \x1..xn y1..ym -> N y1..ym
(n >= 0)
where (in both cases)
* The xi can include type variables
* The yi are all value variables
* N is a NORMAL FORM (i.e. no redexes anywhere)
wanting a suitable number of extra args.
The biggest reason for doing this is for cases like
f = \x -> case x of
True -> \y -> e1
False -> \y -> e2
Here we want to get the lambdas together. A good example is the nofib
program fibheaps, which gets 25% more allocation if you don't do this
eta-expansion.
We may have to sandwich some coerces between the lambdas
to make the types work. exprEtaExpandArity looks through coerces
when computing arity; and etaExpand adds the coerces as necessary when
actually computing the expansion.
Note [No crap in eta-expanded code]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The eta expander is careful not to introduce "crap". In particular,
given a CoreExpr satisfying the 'CpeRhs' invariant (in CorePrep), it
returns a CoreExpr satisfying the same invariant. See Note [Eta
expansion and the CorePrep invariants] in CorePrep.
This means the eta-expander has to do a bit of on-the-fly
simplification but it's not too hard. The alernative, of relying on
a subsequent clean-up phase of the Simplifier to de-crapify the result,
means you can't really use it in CorePrep, which is painful.
Note [Eta expansion and SCCs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that SCCs are not treated specially by etaExpand. If we have
etaExpand 2 (\x -> scc "foo" e)
= (\xy -> (scc "foo" e) y)
So the costs of evaluating 'e' (not 'e y') are attributed to "foo"
Note [Eta expansion and source notes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CorePrep puts floatable ticks outside of value applications, but not
type applications. As a result we might be trying to eta-expand an
expression like
(src<...> v) @a
which we want to lead to code like
\x -> src<...> v @a x
This means that we need to look through type applications and be ready
to re-add floats on the top.
-}
-- | @etaExpand n us e ty@ returns an expression with
-- the same meaning as @e@, but with arity @n@.
--
-- Given:
--
-- > e' = etaExpand n us e ty
--
-- We should have that:
--
-- > ty = exprType e = exprType e'
etaExpand :: Arity -- ^ Result should have this number of value args
-> CoreExpr -- ^ Expression to expand
-> CoreExpr
-- etaExpand deals with for-alls. For example:
-- etaExpand 1 E
-- where E :: forall a. a -> a
-- would return
-- (/\b. \y::a -> E b y)
--
-- It deals with coerces too, though they are now rare
-- so perhaps the extra code isn't worth it
etaExpand n orig_expr
= go n orig_expr
where
-- Strip off existing lambdas and casts
-- Note [Eta expansion and SCCs]
go 0 expr = expr
go n (Lam v body) | isTyVar v = Lam v (go n body)
| otherwise = Lam v (go (n-1) body)
go n (Cast expr co) = Cast (go n expr) co
go n expr
= -- pprTrace "ee" (vcat [ppr orig_expr, ppr expr, ppr etas]) $
retick $ etaInfoAbs etas (etaInfoApp subst' sexpr etas)
where
in_scope = mkInScopeSet (exprFreeVars expr)
(in_scope', etas) = mkEtaWW n orig_expr in_scope (exprType expr)
subst' = mkEmptySubst in_scope'
-- Find ticks behind type apps.
-- See Note [Eta expansion and source notes]
(expr', args) = collectArgs expr
(ticks, expr'') = stripTicksTop tickishFloatable expr'
sexpr = foldl App expr'' args
retick expr = foldr mkTick expr ticks
-- Wrapper Unwrapper
--------------
data EtaInfo = EtaVar Var -- /\a. [], [] a
-- \x. [], [] x
| EtaCo Coercion -- [] |> co, [] |> (sym co)
instance Outputable EtaInfo where
ppr (EtaVar v) = ptext (sLit "EtaVar") <+> ppr v
ppr (EtaCo co) = ptext (sLit "EtaCo") <+> ppr co
pushCoercion :: Coercion -> [EtaInfo] -> [EtaInfo]
pushCoercion co1 (EtaCo co2 : eis)
| isReflCo co = eis
| otherwise = EtaCo co : eis
where
co = co1 `mkTransCo` co2
pushCoercion co eis = EtaCo co : eis
--------------
etaInfoAbs :: [EtaInfo] -> CoreExpr -> CoreExpr
etaInfoAbs [] expr = expr
etaInfoAbs (EtaVar v : eis) expr = Lam v (etaInfoAbs eis expr)
etaInfoAbs (EtaCo co : eis) expr = Cast (etaInfoAbs eis expr) (mkSymCo co)
--------------
etaInfoApp :: Subst -> CoreExpr -> [EtaInfo] -> CoreExpr
-- (etaInfoApp s e eis) returns something equivalent to
-- ((substExpr s e) `appliedto` eis)
etaInfoApp subst (Lam v1 e) (EtaVar v2 : eis)
= etaInfoApp (CoreSubst.extendSubstWithVar subst v1 v2) e eis
etaInfoApp subst (Cast e co1) eis
= etaInfoApp subst e (pushCoercion co' eis)
where
co' = CoreSubst.substCo subst co1
etaInfoApp subst (Case e b ty alts) eis
= Case (subst_expr subst e) b1 (mk_alts_ty (CoreSubst.substTy subst ty) eis) alts'
where
(subst1, b1) = substBndr subst b
alts' = map subst_alt alts
subst_alt (con, bs, rhs) = (con, bs', etaInfoApp subst2 rhs eis)
where
(subst2,bs') = substBndrs subst1 bs
mk_alts_ty ty [] = ty
mk_alts_ty ty (EtaVar v : eis) = mk_alts_ty (applyTypeToArg ty (varToCoreExpr v)) eis
mk_alts_ty _ (EtaCo co : eis) = mk_alts_ty (pSnd (coercionKind co)) eis
etaInfoApp subst (Let b e) eis
= Let b' (etaInfoApp subst' e eis)
where
(subst', b') = subst_bind subst b
etaInfoApp subst (Tick t e) eis
= Tick (substTickish subst t) (etaInfoApp subst e eis)
etaInfoApp subst e eis
= go (subst_expr subst e) eis
where
go e [] = e
go e (EtaVar v : eis) = go (App e (varToCoreExpr v)) eis
go e (EtaCo co : eis) = go (Cast e co) eis
--------------
mkEtaWW :: Arity -> CoreExpr -> InScopeSet -> Type
-> (InScopeSet, [EtaInfo])
-- EtaInfo contains fresh variables,
-- not free in the incoming CoreExpr
-- Outgoing InScopeSet includes the EtaInfo vars
-- and the original free vars
mkEtaWW orig_n orig_expr in_scope orig_ty
= go orig_n empty_subst orig_ty []
where
empty_subst = TvSubst in_scope emptyTvSubstEnv
go n subst ty eis -- See Note [exprArity invariant]
| n == 0
= (getTvInScope subst, reverse eis)
| Just (tv,ty') <- splitForAllTy_maybe ty
, let (subst', tv') = Type.substTyVarBndr subst tv
-- Avoid free vars of the original expression
= go n subst' ty' (EtaVar tv' : eis)
| Just (arg_ty, res_ty) <- splitFunTy_maybe ty
, let (subst', eta_id') = freshEtaId n subst arg_ty
-- Avoid free vars of the original expression
= go (n-1) subst' res_ty (EtaVar eta_id' : eis)
| Just (co, ty') <- topNormaliseNewType_maybe ty
= -- Given this:
-- newtype T = MkT ([T] -> Int)
-- Consider eta-expanding this
-- eta_expand 1 e T
-- We want to get
-- coerce T (\x::[T] -> (coerce ([T]->Int) e) x)
go n subst ty' (EtaCo co : eis)
| otherwise -- We have an expression of arity > 0,
-- but its type isn't a function.
= WARN( True, (ppr orig_n <+> ppr orig_ty) $$ ppr orig_expr )
(getTvInScope subst, reverse eis)
-- This *can* legitmately happen:
-- e.g. coerce Int (\x. x) Essentially the programmer is
-- playing fast and loose with types (Happy does this a lot).
-- So we simply decline to eta-expand. Otherwise we'd end up
-- with an explicit lambda having a non-function type
--------------
-- Avoiding unnecessary substitution; use short-cutting versions
subst_expr :: Subst -> CoreExpr -> CoreExpr
subst_expr = substExprSC (text "CoreArity:substExpr")
subst_bind :: Subst -> CoreBind -> (Subst, CoreBind)
subst_bind = substBindSC
--------------
freshEtaId :: Int -> TvSubst -> Type -> (TvSubst, Id)
-- Make a fresh Id, with specified type (after applying substitution)
-- It should be "fresh" in the sense that it's not in the in-scope set
-- of the TvSubstEnv; and it should itself then be added to the in-scope
-- set of the TvSubstEnv
--
-- The Int is just a reasonable starting point for generating a unique;
-- it does not necessarily have to be unique itself.
freshEtaId n subst ty
= (subst', eta_id')
where
ty' = Type.substTy subst ty
eta_id' = uniqAway (getTvInScope subst) $
mkSysLocal (fsLit "eta") (mkBuiltinUnique n) ty'
subst' = extendTvInScope subst eta_id'
|
gcampax/ghc
|
compiler/coreSyn/CoreArity.hs
|
bsd-3-clause
| 36,420 | 0 | 14 | 10,071 | 4,501 | 2,309 | 2,192 | 272 | 7 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
-- | New-style @.travis.yml@ script generator using cabal 1.24's nix-style
-- tech-preview facilities.
--
-- See also <https://github.com/haskell-CI/haskell-ci>
--
-- NB: This code deliberately avoids relying on non-standard packages and
-- is expected to compile/work with at least GHC 7.0 through GHC 8.0
module HaskellCI (
main,
-- * for tests
parseOptions,
Options (..), defaultOptions,
Config (..), GitConfig (..),
InputType (..),
runDiagnosticsT,
-- ** Variants
bashFromConfigFile,
travisFromConfigFile,
githubFromConfigFile,
) where
import HaskellCI.Prelude
import Control.Exception (try)
import Data.List (nubBy, sort, sortBy, (\\))
import System.Directory (createDirectoryIfMissing, doesFileExist, setCurrentDirectory)
import System.Environment (getArgs)
import System.Exit (ExitCode (..), exitFailure)
import System.FilePath.Posix (takeDirectory)
import System.IO (hClose, hFlush, hPutStrLn, stderr)
import System.IO.Temp (withSystemTempFile)
import System.Process (readProcessWithExitCode)
import Distribution.PackageDescription (GenericPackageDescription, package, packageDescription, testedWith)
import Distribution.Simple.Utils (fromUTF8BS, toUTF8BS)
import Distribution.Text
import Distribution.Version
import qualified Data.ByteString as BS
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as Map
import qualified Data.Set as S
import qualified Data.Traversable as T
import qualified Distribution.Compiler as Compiler
import qualified Distribution.Package as Pkg
import qualified Options.Applicative as O
import Cabal.Parse
import Cabal.Project
import HaskellCI.Bash
import HaskellCI.Cli
import HaskellCI.Compiler
import HaskellCI.Config
import HaskellCI.Config.Dump
import HaskellCI.Diagnostics
import HaskellCI.GitConfig
import HaskellCI.GitHub
import HaskellCI.HeadHackage
import HaskellCI.Jobs
import HaskellCI.Package
import HaskellCI.TestedWith
import HaskellCI.Travis
import HaskellCI.VersionInfo
import HaskellCI.YamlSyntax
import qualified HaskellCI.Bash.Template as Bash
-------------------------------------------------------------------------------
-- Main
-------------------------------------------------------------------------------
main :: IO ()
main = do
argv0 <- getArgs
(cmd, opts) <- O.customExecParser (O.prefs O.subparserInline) cliParserInfo
case cmd of
CommandListGHC -> do
putStrLn $ "Supported GHC versions:"
for_ groupedVersions $ \(v, vs) -> do
putStr $ prettyMajVersion v ++ ": "
putStrLn $ intercalate ", " (map display $ toList vs)
CommandDumpConfig -> do
putStr $ unlines $ runDG configGrammar
CommandRegenerate -> do
regenerateBash opts
regenerateGitHub opts
regenerateTravis opts
CommandBash f -> doBash argv0 f opts
CommandGitHub f -> doGitHub argv0 f opts
CommandTravis f -> doTravis argv0 f opts
CommandVersionInfo -> do
putStrLn $ "haskell-ci " ++ haskellCIVerStr ++ " with dependencies"
ifor_ dependencies $ \p v -> do
putStrLn $ " " ++ p ++ "-" ++ v
where
groupedVersions :: [(Version, NonEmpty Version)]
groupedVersions = map ((\vs -> (head vs, vs)) . NE.sortBy (flip compare))
. groupBy ((==) `on` ghcMajVer)
$ sort knownGhcVersions
prettyMajVersion :: Version -> String
prettyMajVersion v = case ghcMajVer v of
(x, y) -> show x ++ "." ++ show y
ifor_ :: Map.Map k v -> (k -> v -> IO a) -> IO ()
ifor_ xs f = Map.foldlWithKey' (\m k a -> m >> void (f k a)) (return ()) xs
-------------------------------------------------------------------------------
-- Travis
-------------------------------------------------------------------------------
defaultTravisPath :: FilePath
defaultTravisPath = ".travis.yml"
doTravis :: [String] -> FilePath -> Options -> IO ()
doTravis args path opts = do
contents <- travisFromConfigFile args opts path
case optOutput opts of
Nothing -> BS.writeFile defaultTravisPath contents
Just OutputStdout -> BS.putStr contents
Just (OutputFile fp) -> BS.writeFile fp contents
travisFromConfigFile
:: forall m. (MonadIO m, MonadDiagnostics m, MonadMask m)
=> [String]
-> Options
-> FilePath
-> m ByteString
travisFromConfigFile args opts path = do
gitconfig <- liftIO readGitConfig
cabalFiles <- getCabalFiles (optInputType' opts path) path
config' <- findConfigFile (optConfig opts)
let config = optConfigMorphism opts config'
pkgs <- T.mapM (configFromCabalFile config) cabalFiles
(ghcs, prj) <- case checkVersions (cfgTestedWith config) pkgs of
Right x -> return x
Left [] -> putStrLnErr "panic: checkVersions failed without errors"
Left (e:es) -> putStrLnErrs (e :| es)
let prj' | cfgGhcHead config = over (mapped . field @"pkgJobs") (S.insert GHCHead) prj
| otherwise = prj
ls <- genTravisFromConfigs args config gitconfig prj' ghcs
patchTravis config ls
genTravisFromConfigs
:: (Monad m, MonadDiagnostics m)
=> [String]
-> Config
-> GitConfig
-> Project URI Void Package
-> Set CompilerVersion
-> m ByteString
genTravisFromConfigs argv config _gitconfig prj vs = do
let jobVersions = makeJobVersions config vs
case makeTravis argv config prj jobVersions of
Left err -> putStrLnErr $ displayException err
Right travis -> do
describeJobs "Travis-CI config" (cfgTestedWith config) jobVersions (prjPackages prj)
return $ toUTF8BS $
prettyYaml id (reann (travisHeader (cfgInsertVersion config) argv ++) $ toYaml travis)
++ unlines
[ ""
, "# REGENDATA " ++ if cfgInsertVersion config then show (haskellCIVerStr, argv) else show argv
, "# EOF"
]
regenerateTravis :: Options -> IO ()
regenerateTravis opts = do
let fp = defaultTravisPath
-- change the directory
for_ (optCwd opts) setCurrentDirectory
-- read, and then change to the directory
withContents fp noTravisYml $ \contents -> case findRegendataArgv contents of
Nothing -> do
hPutStrLn stderr $ "Error: expected REGENDATA line in " ++ fp
exitFailure
Just (mversion, argv) -> do
-- warn if we regenerate using older haskell-ci
for_ mversion $ \version -> for_ (simpleParsec haskellCIVerStr) $ \haskellCIVer ->
when (haskellCIVer < version) $ do
hPutStrLn stderr $ "Regenerating using older haskell-ci-" ++ haskellCIVerStr
hPutStrLn stderr $ "File generated using haskell-ci-" ++ prettyShow version
(f, opts') <- parseOptions argv
doTravis argv f ( optionsWithOutputFile fp <> opts' <> opts)
where
noTravisYml :: IO ()
noTravisYml = putStrLn "No .travis.yml, skipping travis regeneration"
-------------------------------------------------------------------------------
-- Bash
-------------------------------------------------------------------------------
defaultBashPath :: FilePath
defaultBashPath = "haskell-ci.sh"
doBash :: [String] -> FilePath -> Options -> IO ()
doBash args path opts = do
contents <- bashFromConfigFile args opts path
case optOutput opts of
Nothing -> BS.writeFile defaultBashPath contents
Just OutputStdout -> BS.putStr contents
Just (OutputFile fp) -> BS.writeFile fp contents
bashFromConfigFile
:: forall m. (MonadIO m, MonadDiagnostics m, MonadMask m)
=> [String]
-> Options
-> FilePath
-> m ByteString
bashFromConfigFile args opts path = do
gitconfig <- liftIO readGitConfig
cabalFiles <- getCabalFiles (optInputType' opts path) path
config' <- findConfigFile (optConfig opts)
let config = optConfigMorphism opts config'
pkgs <- T.mapM (configFromCabalFile config) cabalFiles
(ghcs, prj) <- case checkVersions (cfgTestedWith config) pkgs of
Right x -> return x
Left [] -> putStrLnErr "panic: checkVersions failed without errors"
Left (e:es) -> putStrLnErrs (e :| es)
let prj' | cfgGhcHead config = over (mapped . field @"pkgJobs") (S.insert GHCHead) prj
| otherwise = prj
genBashFromConfigs args config gitconfig prj' ghcs
genBashFromConfigs
:: (Monad m, MonadIO m, MonadDiagnostics m)
=> [String]
-> Config
-> GitConfig
-> Project URI Void Package
-> Set CompilerVersion
-> m ByteString
genBashFromConfigs argv config _gitconfig prj vs = do
let jobVersions = makeJobVersions config vs
case makeBash argv config prj jobVersions of
Left err -> putStrLnErr $ displayException err
Right bashZ -> do
describeJobs "Bash script" (cfgTestedWith config) jobVersions (prjPackages prj)
fmap toUTF8BS $ liftIO $ Bash.renderIO bashZ
{ Bash.zRegendata = if cfgInsertVersion config then show (haskellCIVerStr, argv) else show argv
}
regenerateBash :: Options -> IO ()
regenerateBash opts = do
let fp = defaultBashPath
-- change the directory
for_ (optCwd opts) setCurrentDirectory
-- read, and then change to the directory
withContents fp noBashScript $ \contents -> case findRegendataArgv contents of
Nothing -> do
hPutStrLn stderr $ "Error: expected REGENDATA line in " ++ fp
exitFailure
Just (mversion, argv) -> do
-- warn if we regenerate using older haskell-ci
for_ mversion $ \version -> for_ (simpleParsec haskellCIVerStr) $ \haskellCIVer ->
when (haskellCIVer < version) $ do
hPutStrLn stderr $ "Regenerating using older haskell-ci-" ++ haskellCIVerStr
hPutStrLn stderr $ "File generated using haskell-ci-" ++ prettyShow version
(f, opts') <- parseOptions argv
doBash argv f ( optionsWithOutputFile fp <> opts' <> opts)
where
noBashScript :: IO ()
noBashScript = putStrLn "No haskell-ci.sh, skipping bash regeneration"
-------------------------------------------------------------------------------
-- GitHub actions
-------------------------------------------------------------------------------
defaultGitHubPath :: FilePath
defaultGitHubPath = ".github/workflows/haskell-ci.yml"
doGitHub :: [String] -> FilePath -> Options -> IO ()
doGitHub args path opts = do
contents <- githubFromConfigFile args opts path
case optOutput opts of
Nothing -> do
createDir defaultGitHubPath
BS.writeFile defaultGitHubPath contents
Just OutputStdout -> BS.putStr contents
Just (OutputFile fp) -> do
createDir fp
BS.writeFile fp contents
where
createDir p = createDirectoryIfMissing True (takeDirectory p)
githubFromConfigFile
:: forall m. (MonadIO m, MonadDiagnostics m, MonadMask m)
=> [String]
-> Options
-> FilePath
-> m ByteString
githubFromConfigFile args opts path = do
gitconfig <- liftIO readGitConfig
cabalFiles <- getCabalFiles (optInputType' opts path) path
config' <- findConfigFile (optConfig opts)
let config = optConfigMorphism opts config'
pkgs <- T.mapM (configFromCabalFile config) cabalFiles
(ghcs, prj) <- case checkVersions (cfgTestedWith config) pkgs of
Right x -> return x
Left [] -> putStrLnErr "panic: checkVersions failed without errors"
Left (e:es) -> putStrLnErrs (e :| es)
let prj' | cfgGhcHead config = over (mapped . field @"pkgJobs") (S.insert GHCHead) prj
| otherwise = prj
ls <- genGitHubFromConfigs args config gitconfig prj' ghcs
patchGitHub config ls
genGitHubFromConfigs
:: (Monad m, MonadIO m, MonadDiagnostics m)
=> [String]
-> Config
-> GitConfig
-> Project URI Void Package
-> Set CompilerVersion
-> m ByteString
genGitHubFromConfigs argv config gitconfig prj vs = do
let jobVersions = makeJobVersions config vs
case makeGitHub argv config gitconfig prj jobVersions of
Left err -> putStrLnErr $ displayException err
Right github -> do
describeJobs "GitHub config" (cfgTestedWith config) jobVersions (prjPackages prj)
return $ toUTF8BS $ prettyYaml id $ reann (githubHeader (cfgInsertVersion config) argv ++) $ toYaml github
regenerateGitHub :: Options -> IO ()
regenerateGitHub opts = do
-- change the directory
for_ (optCwd opts) setCurrentDirectory
-- read, and then change to the directory
withContents fp noGitHubScript $ \contents -> case findRegendataArgv contents of
Nothing -> do
hPutStrLn stderr $ "Error: expected REGENDATA line in " ++ fp
exitFailure
Just (mversion, argv) -> do
-- warn if we regenerate using older haskell-ci
for_ mversion $ \version -> for_ (simpleParsec haskellCIVerStr) $ \haskellCIVer ->
when (haskellCIVer < version) $ do
hPutStrLn stderr $ "Regenerating using older haskell-ci-" ++ haskellCIVerStr
hPutStrLn stderr $ "File generated using haskell-ci-" ++ prettyShow version
(f, opts') <- parseOptions argv
doGitHub argv f ( optionsWithOutputFile fp <> opts' <> opts)
where
fp = defaultGitHubPath
noGitHubScript :: IO ()
noGitHubScript = putStrLn $ "No " ++ fp ++ ", skipping GitHub config regeneration"
-------------------------------------------------------------------------------
-- Config file
-------------------------------------------------------------------------------
findConfigFile :: MonadIO m => ConfigOpt -> m Config
findConfigFile ConfigOptNo = return emptyConfig
findConfigFile (ConfigOpt fp) = readConfigFile fp
findConfigFile ConfigOptAuto = do
let defaultPath = "cabal.haskell-ci"
exists <- liftIO (doesFileExist defaultPath)
if exists
then readConfigFile defaultPath
else return emptyConfig
-------------------------------------------------------------------------------
-- Patches
-------------------------------------------------------------------------------
patchTravis
:: (MonadIO m, MonadMask m)
=> Config -> ByteString -> m ByteString
patchTravis = patchYAML . cfgTravisPatches
patchGitHub
:: (MonadIO m, MonadMask m)
=> Config -> ByteString -> m ByteString
patchGitHub = patchYAML . cfgGitHubPatches
-- | Adjust the generated YAML output with patch files, if specified.
-- We do this in a temporary file in case the user did not pass --output (as
-- it would be awkward to patch the generated output otherwise).
patchYAML
:: (MonadIO m, MonadMask m)
=> [FilePath] -> ByteString -> m ByteString
patchYAML patches input
| null patches = pure input
| otherwise =
withSystemTempFile "yml.tmp" $ \fp h -> liftIO $ do
BS.hPutStr h input
hFlush h
for_ patches $ applyPatch fp
hClose h
BS.readFile fp
where
applyPatch :: FilePath -- ^ The temporary file path to patch
-> FilePath -- ^ The path of the .patch file
-> IO ()
applyPatch temp patch = do
exists <- doesFileExist patch
unless exists $ putStrLnErr $ "Cannot find " ++ patch
(ec, stdOut, stdErr) <- readProcessWithExitCode
"patch" [ "--input", patch
, "--silent"
, temp
] ""
case ec of
ExitSuccess -> pure ()
ExitFailure n -> putStrLnErr $ unlines
[ "patch returned exit code " ++ show n
, "Stdout: " ++ stdOut
, "Stderr: " ++ stdErr
]
-------------------------------------------------------------------------------
-- Utilities
-------------------------------------------------------------------------------
withContents
:: FilePath -- ^ filepath
-> IO r -- ^ what to do when file don't exist
-> (String -> IO r) -- ^ continuation
-> IO r
withContents path no kont = do
e <- try (BS.readFile path) :: IO (Either IOError BS.ByteString)
case e of
Left _ -> no
Right contents -> kont (fromUTF8BS contents)
-- | Find @REGENDATA@ in a string
findRegendataArgv :: String -> Maybe (Maybe Version, [String])
findRegendataArgv contents = do
l <- findMaybe (afterInfix "REGENDATA") (lines contents)
first simpleParsec <$> (readMaybe l :: Maybe (String, [String]))
<|> (,) Nothing <$> (readMaybe l :: Maybe [String])
-- | Read project file and associated .cabal files.
getCabalFiles
:: (MonadDiagnostics m, MonadIO m)
=> InputType
-> FilePath
-> m (Project URI Void (FilePath, GenericPackageDescription))
getCabalFiles InputTypeProject path = do
contents <- liftIO $ BS.readFile path
prj0 <- either (putStrLnErr . renderParseError) return $ parseProject path contents
prj1 <- either (putStrLnErr . renderResolveError) return =<< liftIO (resolveProject path prj0)
either (putStrLnErr . renderParseError) return =<< liftIO (readPackagesOfProject prj1)
getCabalFiles InputTypePackage path = do
e <- liftIO $ readPackagesOfProject (emptyProject & field @"prjPackages" .~ [path])
either (putStrLnErr . renderParseError) return e
-------------------------------------------------------------------------------
-- Config
-------------------------------------------------------------------------------
configFromCabalFile
:: (MonadIO m, MonadDiagnostics m)
=> Config -> (FilePath, GenericPackageDescription) -> m Package
configFromCabalFile cfg (cabalFile, gpd) = do
let compilers = testedWith $ packageDescription gpd
pkgNameStr = display $ Pkg.pkgName $ package $ packageDescription gpd
let unknownComps = nub [ c | (c,_) <- compilers, c /= Compiler.GHC, c /= Compiler.GHCJS ]
ghcVerConstrs = [ vc | (Compiler.GHC,vc) <- compilers ]
ghcVerConstrs' = simplifyVersionRange $ foldr unionVersionRanges noVersion ghcVerConstrs
specificGhcVers = nub $ mapMaybe isSpecificVersion ghcVerConstrs
ghcjsVerConstrs = [ vc | (Compiler.GHCJS,vc) <- compilers ]
ghcjsVerConstrs' = simplifyVersionRange $ foldr unionVersionRanges noVersion ghcjsVerConstrs
specificGhcjsVers = nub $ mapMaybe isSpecificVersion ghcjsVerConstrs
twoDigitGhcVerConstrs = mapMaybe isTwoDigitGhcVersion ghcVerConstrs :: [Version]
unless (null twoDigitGhcVerConstrs) $ do
putStrLnWarn $ "'tested-with:' uses two digit GHC versions (which don't match any existing GHC version): " ++ intercalate ", " (map display twoDigitGhcVerConstrs)
putStrLnInfo $ "Either use wild-card format, for example 'tested-with: GHC ==7.10.*' or a specific existing version 'tested-with: GHC ==7.10.3'"
when (null compilers) $ do
putStrLnErr (unlines $
[ "empty or missing top-level 'tested-with:' definition in " ++ cabalFile ++ " file; example definition:"
, ""
, "tested-with: " ++ intercalate ", " [ "GHC==" ++ display v | v <- lastStableGhcVers ]
])
unless (null unknownComps) $ do
putStrLnWarn $ "ignoring unsupported compilers mentioned in tested-with: " ++ show unknownComps
when (null ghcVerConstrs) $ do
putStrLnErr "'tested-with:' doesn't mention any 'GHC' version"
when (isNoVersion ghcVerConstrs') $ do
putStrLnErr "'tested-with:' describes an empty version range for 'GHC'"
when (isAnyVersion ghcVerConstrs') $ do
putStrLnErr "'tested-with:' allows /any/ 'GHC' version"
let unknownGhcVers = sort $ specificGhcVers \\ knownGhcVersions
unless (null unknownGhcVers) $ do
putStrLnErr ("'tested-with:' specifically refers to unknown 'GHC' versions: "
++ intercalate ", " (map display unknownGhcVers) ++ "\n"
++ "Known GHC versions: " ++ intercalate ", " (map display knownGhcVersions))
let unknownGhcjsVers = sort $ specificGhcjsVers \\ knownGhcjsVersions
unless (null unknownGhcjsVers) $ do
putStrLnErr ("'tested-with:' specifically refers to unknown 'GHCJS' versions: "
++ intercalate ", " (map display unknownGhcjsVers) ++ "\n"
++ "Known GHCJS versions: " ++ intercalate ", " (map display knownGhcjsVersions))
let knownGhcVersions'
| cfgLastInSeries cfg = filterLastMajor knownGhcVersions
| otherwise = knownGhcVersions
let testedGhcVersions = filter (`withinRange` ghcVerConstrs') knownGhcVersions'
let testedGhcjsVersions = filter (`withinRange` ghcjsVerConstrs') knownGhcjsVersions
when (null testedGhcVersions) $ do
putStrLnErr "no known GHC version is allowed by the 'tested-with' specification"
let compilerRange :: Set CompilerVersion
compilerRange = S.fromList $
[ GHC v
| v <- testedGhcVersions
] ++
[ GHCJS v
| v <- testedGhcjsVersions
]
let pkg = Pkg pkgNameStr compilerRange (takeDirectory cabalFile) gpd
return pkg
where
lastStableGhcVers
= nubBy ((==) `on` ghcMajVer)
$ sortBy (flip compare)
$ filter (not . previewGHC defaultHeadHackage . GHC)
$ knownGhcVersions
isTwoDigitGhcVersion :: VersionRange -> Maybe Version
isTwoDigitGhcVersion vr = isSpecificVersion vr >>= t
where
t v | [_,_] <- versionNumbers v = Just v
t _ = Nothing
filterLastMajor = map maximum . groupBy ((==) `on` ghcMajVer)
|
hvr/multi-ghc-travis
|
src/HaskellCI.hs
|
bsd-3-clause
| 22,256 | 0 | 23 | 5,583 | 5,519 | 2,735 | 2,784 | 420 | 7 |
module Servant.Server.Auth.Token.LevelDB(
LevelDBBackendT
, runLevelDBBackendT
, LevelDBEnv
, newLevelDBEnv
) where
import Control.Monad.Catch
import Control.Monad.Except
import Control.Monad.IO.Unlift
import Control.Monad.Reader
import Control.Monad.Trans.Resource
import Servant.Server
import Servant.Server.Auth.Token.Config
import Servant.Server.Auth.Token.LevelDB.Schema (LevelDBEnv, newLevelDBEnv)
import Servant.Server.Auth.Token.Model
import qualified Servant.Server.Auth.Token.LevelDB.Schema as S
-- | Monad transformer that implements storage backend
newtype LevelDBBackendT m a = LevelDBBackendT { unLevelDBBackendT :: ReaderT (AuthConfig, LevelDBEnv) (ResourceT m) a }
deriving (Functor, Applicative, Monad, MonadIO, MonadReader (AuthConfig, LevelDBEnv), MonadThrow, MonadCatch)
deriving instance (MonadThrow m, MonadIO m) => MonadResource (LevelDBBackendT m)
instance MonadCatch m => MonadError ServantErr (LevelDBBackendT m) where
throwError = throwM
catchError = catch
instance Monad m => HasAuthConfig (LevelDBBackendT m) where
getAuthConfig = fst <$> LevelDBBackendT ask
instance MonadUnliftIO m => MonadUnliftIO (LevelDBBackendT m) where
askUnliftIO = LevelDBBackendT $ withUnliftIO $ \u -> pure (UnliftIO (unliftIO u . unLevelDBBackendT))
-- newtype StMLevelDBBackendT m a = StMLevelDBBackendT { unStMLevelDBBackendT :: StM (ReaderT (AuthConfig, LevelDBEnv) (ExceptT ServantErr m)) a }
--
-- instance MonadBaseControl IO m => MonadBaseControl IO (LevelDBBackendT m) where
-- type StM (LevelDBBackendT m) a = StMLevelDBBackendT m a
-- liftBaseWith f = LevelDBBackendT $ liftBaseWith $ \q -> f (fmap StMLevelDBBackendT . q . unLevelDBBackendT)
-- restoreM = LevelDBBackendT . restoreM . unStMLevelDBBackendT
-- | Execute backend action with given connection pool.
runLevelDBBackendT :: (MonadUnliftIO m, MonadCatch m) => AuthConfig -> LevelDBEnv -> LevelDBBackendT m a -> m (Either ServantErr a)
runLevelDBBackendT cfg db ma = do
let ma' = runResourceT $ runReaderT (unLevelDBBackendT ma) (cfg, db)
catch (Right <$> ma') $ \e -> pure $ Left e
-- | Helper to extract LevelDB reference
getEnv :: Monad m => LevelDBBackendT m LevelDBEnv
getEnv = snd <$> LevelDBBackendT ask
-- | Helper to lift low-level LevelDB queries to backend monad
liftEnv :: Monad m => (LevelDBEnv -> ResourceT m a) -> LevelDBBackendT m a
liftEnv f = LevelDBBackendT . ReaderT $ f . snd
instance (MonadIO m, MonadThrow m, MonadMask m) => HasStorage (LevelDBBackendT m) where
getUserImpl = liftEnv . flip S.load
getUserImplByLogin = liftEnv . S.getUserImplByLogin
listUsersPaged page size = liftEnv $ S.listUsersPaged page size
getUserImplPermissions = liftEnv . S.getUserImplPermissions
deleteUserPermissions = liftEnv . S.deleteUserPermissions
insertUserPerm = liftEnv . S.insertUserPerm
insertUserImpl = liftEnv . S.insertUserImpl
replaceUserImpl i v = liftEnv $ S.replaceUserImpl i v
deleteUserImpl = liftEnv . S.deleteUserImpl
hasPerm i p = liftEnv $ S.hasPerm i p
getFirstUserByPerm = liftEnv . S.getFirstUserByPerm
selectUserImplGroups = liftEnv . S.selectUserImplGroups
clearUserImplGroups = liftEnv . S.clearUserImplGroups
insertAuthUserGroup = liftEnv . S.insertAuthUserGroup
insertAuthUserGroupUsers = liftEnv . S.insertAuthUserGroupUsers
insertAuthUserGroupPerms = liftEnv . S.insertAuthUserGroupPerms
getAuthUserGroup = liftEnv . flip S.load
listAuthUserGroupPermissions = liftEnv . S.listAuthUserGroupPermissions
listAuthUserGroupUsers = liftEnv . S.listAuthUserGroupUsers
replaceAuthUserGroup i v = liftEnv $ S.replaceAuthUserGroup i v
clearAuthUserGroupUsers = liftEnv . S.clearAuthUserGroupUsers
clearAuthUserGroupPerms = liftEnv . S.clearAuthUserGroupPerms
deleteAuthUserGroup = liftEnv . S.deleteAuthUserGroup
listGroupsPaged page size = liftEnv $ S.listGroupsPaged page size
setAuthUserGroupName i n = liftEnv $ S.setAuthUserGroupName i n
setAuthUserGroupParent i mp = liftEnv $ S.setAuthUserGroupParent i mp
insertSingleUseCode = liftEnv . S.insertSingleUseCode
setSingleUseCodeUsed i mt = liftEnv $ S.setSingleUseCodeUsed i mt
getUnusedCode c i t = liftEnv $ S.getUnusedCode c i t
invalidatePermanentCodes i t = liftEnv $ S.invalidatePermanentCodes i t
selectLastRestoreCode i t = liftEnv $ S.selectLastRestoreCode i t
insertUserRestore = liftEnv . S.insertUserRestore
findRestoreCode i rc t = liftEnv $ S.findRestoreCode i rc t
replaceRestoreCode i v = liftEnv $ S.replaceRestoreCode i v
findAuthToken i t = liftEnv $ S.findAuthToken i t
findAuthTokenByValue t = liftEnv $ S.findAuthTokenByValue t
insertAuthToken = liftEnv . S.insertAuthToken
replaceAuthToken i v = liftEnv $ S.replaceAuthToken i v
{-# INLINE getUserImpl #-}
{-# INLINE getUserImplByLogin #-}
{-# INLINE listUsersPaged #-}
{-# INLINE getUserImplPermissions #-}
{-# INLINE deleteUserPermissions #-}
{-# INLINE insertUserPerm #-}
{-# INLINE insertUserImpl #-}
{-# INLINE replaceUserImpl #-}
{-# INLINE deleteUserImpl #-}
{-# INLINE hasPerm #-}
{-# INLINE getFirstUserByPerm #-}
{-# INLINE selectUserImplGroups #-}
{-# INLINE clearUserImplGroups #-}
{-# INLINE insertAuthUserGroup #-}
{-# INLINE insertAuthUserGroupUsers #-}
{-# INLINE insertAuthUserGroupPerms #-}
{-# INLINE getAuthUserGroup #-}
{-# INLINE listAuthUserGroupPermissions #-}
{-# INLINE listAuthUserGroupUsers #-}
{-# INLINE replaceAuthUserGroup #-}
{-# INLINE clearAuthUserGroupUsers #-}
{-# INLINE clearAuthUserGroupPerms #-}
{-# INLINE deleteAuthUserGroup #-}
{-# INLINE listGroupsPaged #-}
{-# INLINE setAuthUserGroupName #-}
{-# INLINE setAuthUserGroupParent #-}
{-# INLINE insertSingleUseCode #-}
{-# INLINE setSingleUseCodeUsed #-}
{-# INLINE getUnusedCode #-}
{-# INLINE invalidatePermanentCodes #-}
{-# INLINE selectLastRestoreCode #-}
{-# INLINE insertUserRestore #-}
{-# INLINE findRestoreCode #-}
{-# INLINE replaceRestoreCode #-}
{-# INLINE findAuthToken #-}
{-# INLINE findAuthTokenByValue #-}
{-# INLINE insertAuthToken #-}
{-# INLINE replaceAuthToken #-}
|
NCrashed/servant-auth-token
|
servant-auth-token-leveldb/src/Servant/Server/Auth/Token/LevelDB.hs
|
bsd-3-clause
| 6,113 | 0 | 13 | 933 | 1,203 | 655 | 548 | -1 | -1 |
{-
TexGen.hs (adapted from texgen.c which is (c) Silicon Graphics, Inc)
Copyright (c) Sven Panne 2002-2005 <[email protected]>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This program draws a texture mapped teapot with automatically generated
texture coordinates. The texture is rendered as stripes on the teapot.
Initially, the object is drawn with texture coordinates based upon the
object coordinates of the vertex and distance from the plane x = 0.
Pressing the 'e' key changes the coordinate generation to eye coordinates
of the vertex. Pressing the 'o' key switches it back to the object
coordinates. Pressing the 's' key changes the plane to a slanted one
(x + y + z = 0). Pressing the 'x' key switches it back to x = 0.
-}
import Control.Monad ( when )
import Data.Char ( toLower )
import Data.Maybe ( isJust, listToMaybe )
import Foreign ( withArray )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
stripeImageWidth :: TextureSize1D
stripeImageWidth = TextureSize1D 32
xEqualZero, slanted :: Plane GLdouble
xEqualZero = Plane 1 0 0 0
slanted = Plane 1 1 1 0
withStripeImage :: (PixelData (Color4 GLubyte) -> IO a) -> IO a
withStripeImage act =
withArray [ Color4 (if j <= 4 then 255 else 0)
(if j > 4 then 255 else 0)
0
255
| j <- [ 0 .. w - 1 ] ] $
act . PixelData RGBA UnsignedByte
where TextureSize1D w = stripeImageWidth
myInit :: IO (Maybe TextureObject)
myInit = do
clearColor $= Color4 0 0 0 0
depthFunc $= Just Less
shadeModel $= Smooth
rowAlignment Unpack $= 1
exts <- get glExtensions
mbTexName <- if "GL_EXT_texture_object" `elem` exts
then fmap listToMaybe $ genObjectNames 1
else return Nothing
when (isJust mbTexName) $ textureBinding Texture1D $= mbTexName
textureWrapMode Texture1D S $= (Repeated, Repeat)
textureFilter Texture1D $= ((Linear', Nothing), Linear')
withStripeImage $ texImage1D NoProxy 0 RGBA' stripeImageWidth 0
textureFunction $= Modulate
textureGenMode S $= Just (ObjectLinear xEqualZero)
texture Texture1D $= Enabled
lighting $= Enabled
light (Light 0) $= Enabled
autoNormal $= Enabled
normalize $= Enabled
frontFace $= CW
cullFace $= Just Back
materialShininess Front $= 64
return mbTexName
display :: Maybe TextureObject -> DisplayCallback
display mbTexName = do
clear [ ColorBuffer, DepthBuffer ]
preservingMatrix $ do
rotate (45 :: GLfloat) (Vector3 0 0 1)
when (isJust mbTexName) $ textureBinding Texture1D $= mbTexName
renderObject Solid (Teapot 2)
flush
reshape :: ReshapeCallback
reshape size@(Size w h) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
let wf = fromIntegral w
hf = fromIntegral h
if w <= h
then ortho (-3.5) 3.5 (-3.5*hf/wf) (3.5*hf/wf) (-3.5) 3.5
else ortho (-3.5*wf/hf) (3.5*wf/hf) (-3.5) 3.5 (-3.5) 3.5
matrixMode $= Modelview 0
loadIdentity
keyboard :: KeyboardMouseCallback
keyboard (Char c) Down _ _ = case toLower c of
'e' -> setGenMode EyeLinear
'o' -> setGenMode ObjectLinear
's' -> setPlane slanted
'x' -> setPlane xEqualZero
'\27' -> exitWith ExitSuccess
_ -> return ()
keyboard _ _ _ _ = return ()
setGenMode :: (Plane GLdouble -> TextureGenMode) -> IO ()
setGenMode mode = do
currentGenMode <- get (textureGenMode S)
case currentGenMode of
Just (EyeLinear plane) -> textureGenMode S $= Just (mode plane)
Just (ObjectLinear plane) -> textureGenMode S $= Just (mode plane)
_ -> error "setGenMode: should never happen..."
postRedisplay Nothing
setPlane :: Plane GLdouble -> IO ()
setPlane plane = do
currentGenMode <- get (textureGenMode S)
case currentGenMode of
Just (EyeLinear _) -> textureGenMode S $= Just (EyeLinear plane)
Just (ObjectLinear _) -> textureGenMode S $= Just (ObjectLinear plane)
_ -> error "setPlane: should never happen..."
postRedisplay Nothing
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode, WithDepthBuffer ]
initialWindowSize $= Size 256 256
initialWindowPosition $= Position 100 100
createWindow progName
mbTexName <- myInit
displayCallback $= display mbTexName
reshapeCallback $= Just reshape
keyboardMouseCallback $= Just keyboard
mainLoop
|
FranklinChen/hugs98-plus-Sep2006
|
packages/GLUT/examples/RedBook/TexGen.hs
|
bsd-3-clause
| 4,580 | 0 | 14 | 1,081 | 1,311 | 629 | 682 | 102 | 6 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Metrology.Parser
-- Copyright : (C) 2014 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg ([email protected])
-- Stability : experimental
-- Portability : non-portable
--
-- This module exports functions allowing users to create their own unit
-- quasiquoters to make for compact unit expressions.
--
-- A typical use case is this:
--
-- > $(makeQuasiQuoter "unit" [''Kilo, ''Milli] [''Meter, ''Second])
--
-- and then, /in a separate module/ (due to GHC's staging constraints)
--
-- > x = 3 % [unit| m/s^2 ]
--
-- The unit expressions can refer to the prefixes and units specified in
-- the call to 'makeQuasiQuoter'. The spellings of the prefixes and units
-- are taken from their @Show@ instances.
--
-- The syntax for these expressions is like
-- F#'s. There are four arithmetic operators (@*@, @/@, @^@, and juxtaposition).
-- Exponentiation binds the tightest, and it allows an integer to its right
-- (possibly with minus signs and parentheses). Next tightest is juxtaposition,
-- which indicates multiplication. Because juxtaposition binds tighter than division,
-- the expressions @m/s^2@ and @m/s s@ are equivalent. Multiplication and
-- division bind the loosest and are left-associative, meaning that @m/s*s@
-- is equivalent to @(m/s)*s@, probably not what you meant. Parentheses in
-- unit expressions are allowed, of course.
--
-- Within a unit string (that is, a unit with an optional prefix), there may
-- be ambiguity. If a unit string can be interpreted as a unit without a
-- prefix, that parsing is preferred. Thus, @min@ would be minutes, not
-- milli-inches (assuming appropriate prefixes and units available.) There still
-- may be ambiguity between unit strings, even interpreting the string as a prefix
-- and a base unit. If a unit string is amiguous in this way, it is rejected.
-- For example, if we have prefixes @da@ and @d@ and units @m@ and @am@, then
-- @dam@ is ambiguous like this.
-----------------------------------------------------------------------------
{-# LANGUAGE TemplateHaskell, CPP #-}
{-# OPTIONS_HADDOCK prune #-}
module Data.Metrology.Parser (
-- * Quasiquoting interface
makeQuasiQuoter, allUnits, allPrefixes,
-- * Direct interface
-- | The definitions below allow users to access the unit parser directly.
-- The parser produces 'UnitExp's which can then be further processed as
-- necessary.
parseUnit,
UnitExp(..), SymbolTable,
mkSymbolTable,
-- for internal use only
parseUnitExp, parseUnitType
) where
import Prelude hiding ( exp )
import Language.Haskell.TH hiding ( Pred )
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Desugar.Lift () -- get the Lift Name instance
import Data.Maybe
import Control.Monad
import Data.Metrology.Parser.Internal
import Data.Metrology
import Data.Metrology.TH
----------------------------------------------------------------------
-- TH conversions
----------------------------------------------------------------------
parseUnitExp :: SymbolTable Name Name -> String -> Either String Exp
parseUnitExp tab s = to_exp `liftM` parseUnit tab s -- the Either monad
where
to_exp Unity = ConE 'Number
to_exp (Unit (Just pre) unit) = ConE '(:@) `AppE` of_type pre `AppE` of_type unit
to_exp (Unit Nothing unit) = of_type unit
to_exp (Mult e1 e2) = ConE '(:*) `AppE` to_exp e1 `AppE` to_exp e2
to_exp (Div e1 e2) = ConE '(:/) `AppE` to_exp e1 `AppE` to_exp e2
to_exp (Pow e i) = ConE '(:^) `AppE` to_exp e `AppE` mk_sing i
of_type :: Name -> Exp
of_type n = (VarE 'undefined) `SigE` (ConT n)
mk_sing :: Integer -> Exp
mk_sing n
| n < 0 = VarE 'sPred `AppE` mk_sing (n + 1)
| n > 0 = VarE 'sSucc `AppE` mk_sing (n - 1)
| otherwise = VarE 'sZero
parseUnitType :: SymbolTable Name Name -> String -> Either String Type
parseUnitType tab s = to_type `liftM` parseUnit tab s -- the Either monad
where
to_type Unity = ConT ''Number
to_type (Unit (Just pre) unit) = ConT ''(:@) `AppT` ConT pre `AppT` ConT unit
to_type (Unit Nothing unit) = ConT unit
to_type (Mult e1 e2) = ConT ''(:*) `AppT` to_type e1 `AppT` to_type e2
to_type (Div e1 e2) = ConT ''(:/) `AppT` to_type e1 `AppT` to_type e2
to_type (Pow e i) = ConT ''(:^) `AppT` to_type e `AppT` mk_z i
mk_z :: Integer -> Type
mk_z n
| n < 0 = ConT ''Pred `AppT` mk_z (n + 1)
| n > 0 = ConT ''Succ `AppT` mk_z (n - 1)
| otherwise = ConT 'Zero -- single quote as it's a data constructor!
----------------------------------------------------------------------
-- QuasiQuoters
----------------------------------------------------------------------
emptyQQ :: QuasiQuoter
emptyQQ = QuasiQuoter { quoteExp = \_ -> fail "No quasi-quoter for expressions"
, quotePat = \_ -> fail "No quasi-quoter for patterns"
, quoteType = \_ -> fail "No quasi-quoter for types"
, quoteDec = \_ -> fail "No quasi-quoter for declarations" }
errorQQ :: String -> QuasiQuoter
errorQQ msg = QuasiQuoter { quoteExp = \_ -> fail msg
, quotePat = \_ -> fail msg
, quoteType = \_ -> fail msg
, quoteDec = \_ -> fail msg }
-- | @makeQuasiQuoter "qq" prefixes units@ makes a quasi-quoter named @qq@
-- that considers the prefixes and units provided. These are provided via
-- names of the /type/ constructors, /not/ the data constructors. See the
-- module documentation for more info and an example.
makeQuasiQuoter :: String -> [Name] -> [Name] -> Q [Dec]
makeQuasiQuoter qq_name_str prefix_names unit_names = do
mapM_ checkIsType prefix_names
mapM_ checkIsType unit_names
qq <- [| case $sym_tab of
Left err -> errorQQ err
Right computed_sym_tab ->
emptyQQ { quoteExp = \unit_exp ->
case parseUnitExp computed_sym_tab unit_exp of
Left err2 -> fail err2
Right exp -> return exp
, quoteType = \unit_exp ->
case parseUnitType computed_sym_tab unit_exp of
Left err2 -> fail err2
Right typ -> return typ
} |]
return [ SigD qq_name (ConT ''QuasiQuoter)
, ValD (VarP qq_name) (NormalB qq) []]
where
qq_name = mkName qq_name_str
mk_pair :: Name -> Q Exp -- Exp is of type (String, Name)
mk_pair n = [| (show (undefined :: $( return $ ConT n )), n) |]
sym_tab :: Q Exp -- Exp is of type (Either String SymbolTable)
sym_tab = do
prefix_pairs <- mapM mk_pair prefix_names
unit_pairs <- mapM mk_pair unit_names
[| mkSymbolTable $( return $ ListE prefix_pairs ) $( return $ ListE unit_pairs ) |]
----------------------------------------------------------------------
-- Getting instances
----------------------------------------------------------------------
getInstanceNames :: Name -> Q [Name]
getInstanceNames class_name = do
ClassI _ insts <- reify class_name
m_names <- forM insts $ \inst ->
case inst of
InstanceD _ ((ConT class_name') `AppT` (ConT unit_name)) []
| class_name == class_name'
-> do show_insts <- reifyInstances ''Show [ConT unit_name]
case show_insts of
[_show_inst] -> return $ Just unit_name
_ -> return Nothing
_ -> return Nothing
return $ catMaybes m_names
#if __GLASGOW_HASKELL__ < 709
{-# WARNING allUnits, allPrefixes "Retrieving the list of all units and prefixes in scope does not work under GHC 7.8.*. Please upgrade GHC to use these functions." #-}
#endif
-- | Gets a list of the names of all units with @Show@ instances in scope.
-- Example usage:
--
-- > $( do units <- allUnits
-- > makeQuasiQuoter "unit" [] units )
--
allUnits :: Q [Name]
allUnits = getInstanceNames ''Unit
-- | Gets a list of the names of all unit prefixes with @Show@ instances in
-- scope. Example usage:
--
-- > $( do units <- allUnits
-- > prefixes <- allPrefixes
-- > makeQuasiQuoter "unit" prefixes units )
--
allPrefixes :: Q [Name]
allPrefixes = getInstanceNames ''UnitPrefix
|
hesiod/units
|
Data/Metrology/Parser.hs
|
bsd-3-clause
| 8,516 | 0 | 20 | 2,091 | 1,460 | 812 | 648 | 98 | 6 |
{-# LANGUAGE BangPatterns, CPP, NondecreasingIndentation, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-- NB: we specifically ignore deprecations. GHC 7.6 marks the .QSem module as
-- deprecated, although it became un-deprecated later. As a result, using 7.6
-- as your bootstrap compiler throws annoying warnings.
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2011
--
-- This module implements multi-module compilation, and is used
-- by --make and GHCi.
--
-- -----------------------------------------------------------------------------
module GhcMake(
depanal,
load, LoadHowMuch(..),
topSortModuleGraph,
ms_home_srcimps, ms_home_imps,
noModError, cyclicModuleErr
) where
#include "HsVersions.h"
#ifdef GHCI
import qualified Linker ( unload )
#endif
import DriverPhases
import DriverPipeline
import DynFlags
import ErrUtils
import Finder
import GhcMonad
import HeaderInfo
import HscTypes
import Module
import TcIface ( typecheckIface )
import TcRnMonad ( initIfaceCheck )
import Bag ( listToBag )
import BasicTypes
import Digraph
import Exception ( tryIO, gbracket, gfinally )
import FastString
import Maybes ( expectJust )
import Name
import MonadUtils ( allM, MonadIO )
import Outputable
import Panic
import SrcLoc
import StringBuffer
import SysTools
import UniqFM
import Util
import qualified GHC.LanguageExtensions as LangExt
import Data.Either ( rights, partitionEithers )
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import qualified FiniteMap as Map ( insertListWith )
import Control.Concurrent ( forkIOWithUnmask, killThread )
import qualified GHC.Conc as CC
import Control.Concurrent.MVar
import Control.Concurrent.QSem
import Control.Exception
import Control.Monad
import Data.IORef
import Data.List
import qualified Data.List as List
import Data.Maybe
import Data.Ord ( comparing )
import Data.Time
import System.Directory
import System.FilePath
import System.IO ( fixIO )
import System.IO.Error ( isDoesNotExistError )
import GHC.Conc ( getNumProcessors, getNumCapabilities, setNumCapabilities )
label_self :: String -> IO ()
label_self thread_name = do
self_tid <- CC.myThreadId
CC.labelThread self_tid thread_name
-- -----------------------------------------------------------------------------
-- Loading the program
-- | Perform a dependency analysis starting from the current targets
-- and update the session with the new module graph.
--
-- Dependency analysis entails parsing the @import@ directives and may
-- therefore require running certain preprocessors.
--
-- Note that each 'ModSummary' in the module graph caches its 'DynFlags'.
-- These 'DynFlags' are determined by the /current/ session 'DynFlags' and the
-- @OPTIONS@ and @LANGUAGE@ pragmas of the parsed module. Thus if you want
-- changes to the 'DynFlags' to take effect you need to call this function
-- again.
--
depanal :: GhcMonad m =>
[ModuleName] -- ^ excluded modules
-> Bool -- ^ allow duplicate roots
-> m ModuleGraph
depanal excluded_mods allow_dup_roots = do
hsc_env <- getSession
let
dflags = hsc_dflags hsc_env
targets = hsc_targets hsc_env
old_graph = hsc_mod_graph hsc_env
liftIO $ showPass dflags "Chasing dependencies"
liftIO $ debugTraceMsg dflags 2 (hcat [
text "Chasing modules from: ",
hcat (punctuate comma (map pprTarget targets))])
mod_graphE <- liftIO $ downsweep hsc_env old_graph excluded_mods allow_dup_roots
mod_graph <- reportImportErrors mod_graphE
modifySession $ \_ -> hsc_env { hsc_mod_graph = mod_graph }
return mod_graph
-- | Describes which modules of the module graph need to be loaded.
data LoadHowMuch
= LoadAllTargets
-- ^ Load all targets and its dependencies.
| LoadUpTo ModuleName
-- ^ Load only the given module and its dependencies.
| LoadDependenciesOf ModuleName
-- ^ Load only the dependencies of the given module, but not the module
-- itself.
-- | Try to load the program. See 'LoadHowMuch' for the different modes.
--
-- This function implements the core of GHC's @--make@ mode. It preprocesses,
-- compiles and loads the specified modules, avoiding re-compilation wherever
-- possible. Depending on the target (see 'DynFlags.hscTarget') compiling
-- and loading may result in files being created on disk.
--
-- Calls the 'defaultWarnErrLogger' after each compiling each module, whether
-- successful or not.
--
-- Throw a 'SourceError' if errors are encountered before the actual
-- compilation starts (e.g., during dependency analysis). All other errors
-- are reported using the 'defaultWarnErrLogger'.
--
load :: GhcMonad m => LoadHowMuch -> m SuccessFlag
load how_much = do
mod_graph <- depanal [] False
guessOutputFile
hsc_env <- getSession
let hpt1 = hsc_HPT hsc_env
let dflags = hsc_dflags hsc_env
-- The "bad" boot modules are the ones for which we have
-- B.hs-boot in the module graph, but no B.hs
-- The downsweep should have ensured this does not happen
-- (see msDeps)
let all_home_mods = [ms_mod_name s
| s <- mod_graph, not (isBootSummary s)]
-- TODO: Figure out what the correct form of this assert is. It's violated
-- when you have HsBootMerge nodes in the graph: then you'll have hs-boot
-- files without corresponding hs files.
-- bad_boot_mods = [s | s <- mod_graph, isBootSummary s,
-- not (ms_mod_name s `elem` all_home_mods)]
-- ASSERT( null bad_boot_mods ) return ()
-- check that the module given in HowMuch actually exists, otherwise
-- topSortModuleGraph will bomb later.
let checkHowMuch (LoadUpTo m) = checkMod m
checkHowMuch (LoadDependenciesOf m) = checkMod m
checkHowMuch _ = id
checkMod m and_then
| m `elem` all_home_mods = and_then
| otherwise = do
liftIO $ errorMsg dflags (text "no such module:" <+>
quotes (ppr m))
return Failed
checkHowMuch how_much $ do
-- mg2_with_srcimps drops the hi-boot nodes, returning a
-- graph with cycles. Among other things, it is used for
-- backing out partially complete cycles following a failed
-- upsweep, and for removing from hpt all the modules
-- not in strict downwards closure, during calls to compile.
let mg2_with_srcimps :: [SCC ModSummary]
mg2_with_srcimps = topSortModuleGraph True mod_graph Nothing
-- If we can determine that any of the {-# SOURCE #-} imports
-- are definitely unnecessary, then emit a warning.
warnUnnecessarySourceImports mg2_with_srcimps
let
-- check the stability property for each module.
stable_mods@(stable_obj,stable_bco)
= checkStability hpt1 mg2_with_srcimps all_home_mods
-- prune bits of the HPT which are definitely redundant now,
-- to save space.
pruned_hpt = pruneHomePackageTable hpt1
(flattenSCCs mg2_with_srcimps)
stable_mods
_ <- liftIO $ evaluate pruned_hpt
-- before we unload anything, make sure we don't leave an old
-- interactive context around pointing to dead bindings. Also,
-- write the pruned HPT to allow the old HPT to be GC'd.
setSession $ discardIC $ hsc_env { hsc_HPT = pruned_hpt }
liftIO $ debugTraceMsg dflags 2 (text "Stable obj:" <+> ppr stable_obj $$
text "Stable BCO:" <+> ppr stable_bco)
-- Unload any modules which are going to be re-linked this time around.
let stable_linkables = [ linkable
| m <- stable_obj++stable_bco,
Just hmi <- [lookupUFM pruned_hpt m],
Just linkable <- [hm_linkable hmi] ]
liftIO $ unload hsc_env stable_linkables
-- We could at this point detect cycles which aren't broken by
-- a source-import, and complain immediately, but it seems better
-- to let upsweep_mods do this, so at least some useful work gets
-- done before the upsweep is abandoned.
--hPutStrLn stderr "after tsort:\n"
--hPutStrLn stderr (showSDoc (vcat (map ppr mg2)))
-- Now do the upsweep, calling compile for each module in
-- turn. Final result is version 3 of everything.
-- Topologically sort the module graph, this time including hi-boot
-- nodes, and possibly just including the portion of the graph
-- reachable from the module specified in the 2nd argument to load.
-- This graph should be cycle-free.
-- If we're restricting the upsweep to a portion of the graph, we
-- also want to retain everything that is still stable.
let full_mg :: [SCC ModSummary]
full_mg = topSortModuleGraph False mod_graph Nothing
maybe_top_mod = case how_much of
LoadUpTo m -> Just m
LoadDependenciesOf m -> Just m
_ -> Nothing
partial_mg0 :: [SCC ModSummary]
partial_mg0 = topSortModuleGraph False mod_graph maybe_top_mod
-- LoadDependenciesOf m: we want the upsweep to stop just
-- short of the specified module (unless the specified module
-- is stable).
partial_mg
| LoadDependenciesOf _mod <- how_much
= ASSERT( case last partial_mg0 of
AcyclicSCC ms -> ms_mod_name ms == _mod; _ -> False )
List.init partial_mg0
| otherwise
= partial_mg0
stable_mg =
[ AcyclicSCC ms
| AcyclicSCC ms <- full_mg,
ms_mod_name ms `elem` stable_obj++stable_bco ]
-- the modules from partial_mg that are not also stable
-- NB. also keep cycles, we need to emit an error message later
unstable_mg = filter not_stable partial_mg
where not_stable (CyclicSCC _) = True
not_stable (AcyclicSCC ms)
= ms_mod_name ms `notElem` stable_obj++stable_bco
-- Load all the stable modules first, before attempting to load
-- an unstable module (#7231).
mg = stable_mg ++ unstable_mg
-- clean up between compilations
let cleanup hsc_env = intermediateCleanTempFiles (hsc_dflags hsc_env)
(flattenSCCs mg2_with_srcimps)
hsc_env
liftIO $ debugTraceMsg dflags 2 (hang (text "Ready for upsweep")
2 (ppr mg))
n_jobs <- case parMakeCount dflags of
Nothing -> liftIO getNumProcessors
Just n -> return n
let upsweep_fn | n_jobs > 1 = parUpsweep n_jobs
| otherwise = upsweep
setSession hsc_env{ hsc_HPT = emptyHomePackageTable }
(upsweep_ok, modsUpswept)
<- upsweep_fn pruned_hpt stable_mods cleanup mg
-- Make modsDone be the summaries for each home module now
-- available; this should equal the domain of hpt3.
-- Get in in a roughly top .. bottom order (hence reverse).
let modsDone = reverse modsUpswept
-- Try and do linking in some form, depending on whether the
-- upsweep was completely or only partially successful.
if succeeded upsweep_ok
then
-- Easy; just relink it all.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep completely successful.")
-- Clean up after ourselves
hsc_env1 <- getSession
liftIO $ intermediateCleanTempFiles dflags modsDone hsc_env1
-- Issue a warning for the confusing case where the user
-- said '-o foo' but we're not going to do any linking.
-- We attempt linking if either (a) one of the modules is
-- called Main, or (b) the user said -no-hs-main, indicating
-- that main() is going to come from somewhere else.
--
let ofile = outputFile dflags
let no_hs_main = gopt Opt_NoHsMain dflags
let
main_mod = mainModIs dflags
a_root_is_Main = any ((==main_mod).ms_mod) mod_graph
do_linking = a_root_is_Main || no_hs_main || ghcLink dflags == LinkDynLib || ghcLink dflags == LinkStaticLib
-- link everything together
linkresult <- liftIO $ link (ghcLink dflags) dflags do_linking (hsc_HPT hsc_env1)
if ghcLink dflags == LinkBinary && isJust ofile && not do_linking
then do
liftIO $ errorMsg dflags $ text
("output was redirected with -o, " ++
"but no output will be generated\n" ++
"because there is no " ++
moduleNameString (moduleName main_mod) ++ " module.")
-- This should be an error, not a warning (#10895).
loadFinish Failed linkresult
else
loadFinish Succeeded linkresult
else
-- Tricky. We need to back out the effects of compiling any
-- half-done cycles, both so as to clean up the top level envs
-- and to avoid telling the interactive linker to link them.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep partially successful.")
let modsDone_names
= map ms_mod modsDone
let mods_to_zap_names
= findPartiallyCompletedCycles modsDone_names
mg2_with_srcimps
let mods_to_keep
= filter ((`notElem` mods_to_zap_names).ms_mod)
modsDone
hsc_env1 <- getSession
let hpt4 = retainInTopLevelEnvs (map ms_mod_name mods_to_keep)
(hsc_HPT hsc_env1)
-- Clean up after ourselves
liftIO $ intermediateCleanTempFiles dflags mods_to_keep hsc_env1
-- there should be no Nothings where linkables should be, now
ASSERT(all (isJust.hm_linkable) (eltsUFM (hsc_HPT hsc_env))) do
-- Link everything together
linkresult <- liftIO $ link (ghcLink dflags) dflags False hpt4
modifySession $ \hsc_env -> hsc_env{ hsc_HPT = hpt4 }
loadFinish Failed linkresult
-- | Finish up after a load.
loadFinish :: GhcMonad m => SuccessFlag -> SuccessFlag -> m SuccessFlag
-- If the link failed, unload everything and return.
loadFinish _all_ok Failed
= do hsc_env <- getSession
liftIO $ unload hsc_env []
modifySession discardProg
return Failed
-- Empty the interactive context and set the module context to the topmost
-- newly loaded module, or the Prelude if none were loaded.
loadFinish all_ok Succeeded
= do modifySession discardIC
return all_ok
-- | Forget the current program, but retain the persistent info in HscEnv
discardProg :: HscEnv -> HscEnv
discardProg hsc_env
= discardIC $ hsc_env { hsc_mod_graph = emptyMG
, hsc_HPT = emptyHomePackageTable }
-- | Discard the contents of the InteractiveContext, but keep the DynFlags.
-- It will also keep ic_int_print and ic_monad if their names are from
-- external packages.
discardIC :: HscEnv -> HscEnv
discardIC hsc_env
= hsc_env { hsc_IC = new_ic { ic_int_print = keep_external_name ic_int_print
, ic_monad = keep_external_name ic_monad } }
where
dflags = ic_dflags old_ic
old_ic = hsc_IC hsc_env
new_ic = emptyInteractiveContext dflags
keep_external_name ic_name
| nameIsFromExternalPackage this_pkg old_name = old_name
| otherwise = ic_name new_ic
where
this_pkg = thisPackage dflags
old_name = ic_name old_ic
intermediateCleanTempFiles :: DynFlags -> [ModSummary] -> HscEnv -> IO ()
intermediateCleanTempFiles dflags summaries hsc_env
= do notIntermediate <- readIORef (filesToNotIntermediateClean dflags)
cleanTempFilesExcept dflags (notIntermediate ++ except)
where
except =
-- Save preprocessed files. The preprocessed file *might* be
-- the same as the source file, but that doesn't do any
-- harm.
map ms_hspp_file summaries ++
-- Save object files for loaded modules. The point of this
-- is that we might have generated and compiled a stub C
-- file, and in the case of GHCi the object file will be a
-- temporary file which we must not remove because we need
-- to load/link it later.
hptObjs (hsc_HPT hsc_env)
-- | If there is no -o option, guess the name of target executable
-- by using top-level source file name as a base.
guessOutputFile :: GhcMonad m => m ()
guessOutputFile = modifySession $ \env ->
let dflags = hsc_dflags env
mod_graph = hsc_mod_graph env
mainModuleSrcPath :: Maybe String
mainModuleSrcPath = do
let isMain = (== mainModIs dflags) . ms_mod
[ms] <- return (filter isMain mod_graph)
ml_hs_file (ms_location ms)
name = fmap dropExtension mainModuleSrcPath
name_exe = do
#if defined(mingw32_HOST_OS)
-- we must add the .exe extention unconditionally here, otherwise
-- when name has an extension of its own, the .exe extension will
-- not be added by DriverPipeline.exeFileName. See #2248
name' <- fmap (<.> "exe") name
#else
name' <- name
#endif
mainModuleSrcPath' <- mainModuleSrcPath
-- #9930: don't clobber input files (unless they ask for it)
if name' == mainModuleSrcPath'
then throwGhcException . UsageError $
"default output name would overwrite the input file; " ++
"must specify -o explicitly"
else Just name'
in
case outputFile dflags of
Just _ -> env
Nothing -> env { hsc_dflags = dflags { outputFile = name_exe } }
-- -----------------------------------------------------------------------------
--
-- | Prune the HomePackageTable
--
-- Before doing an upsweep, we can throw away:
--
-- - For non-stable modules:
-- - all ModDetails, all linked code
-- - all unlinked code that is out of date with respect to
-- the source file
--
-- This is VERY IMPORTANT otherwise we'll end up requiring 2x the
-- space at the end of the upsweep, because the topmost ModDetails of the
-- old HPT holds on to the entire type environment from the previous
-- compilation.
pruneHomePackageTable :: HomePackageTable
-> [ModSummary]
-> ([ModuleName],[ModuleName])
-> HomePackageTable
pruneHomePackageTable hpt summ (stable_obj, stable_bco)
= mapUFM prune hpt
where prune hmi
| is_stable modl = hmi'
| otherwise = hmi'{ hm_details = emptyModDetails }
where
modl = moduleName (mi_module (hm_iface hmi))
hmi' | Just l <- hm_linkable hmi, linkableTime l < ms_hs_date ms
= hmi{ hm_linkable = Nothing }
| otherwise
= hmi
where ms = expectJust "prune" (lookupUFM ms_map modl)
ms_map = listToUFM [(ms_mod_name ms, ms) | ms <- summ]
is_stable m = m `elem` stable_obj || m `elem` stable_bco
-- -----------------------------------------------------------------------------
--
-- | Return (names of) all those in modsDone who are part of a cycle as defined
-- by theGraph.
findPartiallyCompletedCycles :: [Module] -> [SCC ModSummary] -> [Module]
findPartiallyCompletedCycles modsDone theGraph
= chew theGraph
where
chew [] = []
chew ((AcyclicSCC _):rest) = chew rest -- acyclic? not interesting.
chew ((CyclicSCC vs):rest)
= let names_in_this_cycle = nub (map ms_mod vs)
mods_in_this_cycle
= nub ([done | done <- modsDone,
done `elem` names_in_this_cycle])
chewed_rest = chew rest
in
if notNull mods_in_this_cycle
&& length mods_in_this_cycle < length names_in_this_cycle
then mods_in_this_cycle ++ chewed_rest
else chewed_rest
-- ---------------------------------------------------------------------------
--
-- | Unloading
unload :: HscEnv -> [Linkable] -> IO ()
unload hsc_env stable_linkables -- Unload everthing *except* 'stable_linkables'
= case ghcLink (hsc_dflags hsc_env) of
#ifdef GHCI
LinkInMemory -> Linker.unload hsc_env stable_linkables
#else
LinkInMemory -> panic "unload: no interpreter"
-- urgh. avoid warnings:
hsc_env stable_linkables
#endif
_other -> return ()
-- -----------------------------------------------------------------------------
{- |
Stability tells us which modules definitely do not need to be recompiled.
There are two main reasons for having stability:
- avoid doing a complete upsweep of the module graph in GHCi when
modules near the bottom of the tree have not changed.
- to tell GHCi when it can load object code: we can only load object code
for a module when we also load object code fo all of the imports of the
module. So we need to know that we will definitely not be recompiling
any of these modules, and we can use the object code.
The stability check is as follows. Both stableObject and
stableBCO are used during the upsweep phase later.
@
stable m = stableObject m || stableBCO m
stableObject m =
all stableObject (imports m)
&& old linkable does not exist, or is == on-disk .o
&& date(on-disk .o) > date(.hs)
stableBCO m =
all stable (imports m)
&& date(BCO) > date(.hs)
@
These properties embody the following ideas:
- if a module is stable, then:
- if it has been compiled in a previous pass (present in HPT)
then it does not need to be compiled or re-linked.
- if it has not been compiled in a previous pass,
then we only need to read its .hi file from disk and
link it to produce a 'ModDetails'.
- if a modules is not stable, we will definitely be at least
re-linking, and possibly re-compiling it during the 'upsweep'.
All non-stable modules can (and should) therefore be unlinked
before the 'upsweep'.
- Note that objects are only considered stable if they only depend
on other objects. We can't link object code against byte code.
-}
checkStability
:: HomePackageTable -- HPT from last compilation
-> [SCC ModSummary] -- current module graph (cyclic)
-> [ModuleName] -- all home modules
-> ([ModuleName], -- stableObject
[ModuleName]) -- stableBCO
checkStability hpt sccs all_home_mods = foldl checkSCC ([],[]) sccs
where
checkSCC (stable_obj, stable_bco) scc0
| stableObjects = (scc_mods ++ stable_obj, stable_bco)
| stableBCOs = (stable_obj, scc_mods ++ stable_bco)
| otherwise = (stable_obj, stable_bco)
where
scc = flattenSCC scc0
scc_mods = map ms_mod_name scc
home_module m = m `elem` all_home_mods && m `notElem` scc_mods
scc_allimps = nub (filter home_module (concatMap ms_home_allimps scc))
-- all imports outside the current SCC, but in the home pkg
stable_obj_imps = map (`elem` stable_obj) scc_allimps
stable_bco_imps = map (`elem` stable_bco) scc_allimps
stableObjects =
and stable_obj_imps
&& all object_ok scc
stableBCOs =
and (zipWith (||) stable_obj_imps stable_bco_imps)
&& all bco_ok scc
object_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| Just t <- ms_obj_date ms = t >= ms_hs_date ms
&& same_as_prev t
| otherwise = False
where
same_as_prev t = case lookupUFM hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi
-> isObjectLinkable l && t == linkableTime l
_other -> True
-- why '>=' rather than '>' above? If the filesystem stores
-- times to the nearset second, we may occasionally find that
-- the object & source have the same modification time,
-- especially if the source was automatically generated
-- and compiled. Using >= is slightly unsafe, but it matches
-- make's behaviour.
--
-- But see #5527, where someone ran into this and it caused
-- a problem.
bco_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| otherwise = case lookupUFM hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi ->
not (isObjectLinkable l) &&
linkableTime l >= ms_hs_date ms
_other -> False
{- Parallel Upsweep
-
- The parallel upsweep attempts to concurrently compile the modules in the
- compilation graph using multiple Haskell threads.
-
- The Algorithm
-
- A Haskell thread is spawned for each module in the module graph, waiting for
- its direct dependencies to finish building before it itself begins to build.
-
- Each module is associated with an initially empty MVar that stores the
- result of that particular module's compile. If the compile succeeded, then
- the HscEnv (synchronized by an MVar) is updated with the fresh HMI of that
- module, and the module's HMI is deleted from the old HPT (synchronized by an
- IORef) to save space.
-
- Instead of immediately outputting messages to the standard handles, all
- compilation output is deferred to a per-module TQueue. A QSem is used to
- limit the number of workers that are compiling simultaneously.
-
- Meanwhile, the main thread sequentially loops over all the modules in the
- module graph, outputting the messages stored in each module's TQueue.
-}
-- | Each module is given a unique 'LogQueue' to redirect compilation messages
-- to. A 'Nothing' value contains the result of compilation, and denotes the
-- end of the message queue.
data LogQueue = LogQueue !(IORef [Maybe (WarnReason, Severity, SrcSpan, PprStyle, MsgDoc)])
!(MVar ())
-- | The graph of modules to compile and their corresponding result 'MVar' and
-- 'LogQueue'.
type CompilationGraph = [(ModSummary, MVar SuccessFlag, LogQueue)]
-- | Build a 'CompilationGraph' out of a list of strongly-connected modules,
-- also returning the first, if any, encountered module cycle.
buildCompGraph :: [SCC ModSummary] -> IO (CompilationGraph, Maybe [ModSummary])
buildCompGraph [] = return ([], Nothing)
buildCompGraph (scc:sccs) = case scc of
AcyclicSCC ms -> do
mvar <- newEmptyMVar
log_queue <- do
ref <- newIORef []
sem <- newEmptyMVar
return (LogQueue ref sem)
(rest,cycle) <- buildCompGraph sccs
return ((ms,mvar,log_queue):rest, cycle)
CyclicSCC mss -> return ([], Just mss)
-- A Module and whether it is a boot module.
type BuildModule = (Module, IsBoot)
-- | 'Bool' indicating if a module is a boot module or not. We need to treat
-- boot modules specially when building compilation graphs, since they break
-- cycles. Regular source files and signature files are treated equivalently.
data IsBoot = IsBoot | NotBoot
deriving (Ord, Eq, Show, Read)
-- | Tests if an 'HscSource' is a boot file, primarily for constructing
-- elements of 'BuildModule'.
hscSourceToIsBoot :: HscSource -> IsBoot
hscSourceToIsBoot HsBootFile = IsBoot
hscSourceToIsBoot _ = NotBoot
mkBuildModule :: ModSummary -> BuildModule
mkBuildModule ms = (ms_mod ms, if isBootSummary ms then IsBoot else NotBoot)
-- | The entry point to the parallel upsweep.
--
-- See also the simpler, sequential 'upsweep'.
parUpsweep
:: GhcMonad m
=> Int
-- ^ The number of workers we wish to run in parallel
-> HomePackageTable
-> ([ModuleName],[ModuleName])
-> (HscEnv -> IO ())
-> [SCC ModSummary]
-> m (SuccessFlag,
[ModSummary])
parUpsweep n_jobs old_hpt stable_mods cleanup sccs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
-- The bits of shared state we'll be using:
-- The global HscEnv is updated with the module's HMI when a module
-- successfully compiles.
hsc_env_var <- liftIO $ newMVar hsc_env
-- The old HPT is used for recompilation checking in upsweep_mod. When a
-- module successfully gets compiled, its HMI is pruned from the old HPT.
old_hpt_var <- liftIO $ newIORef old_hpt
-- What we use to limit parallelism with.
par_sem <- liftIO $ newQSem n_jobs
let updNumCapabilities = liftIO $ do
n_capabilities <- getNumCapabilities
unless (n_capabilities /= 1) $ setNumCapabilities n_jobs
return n_capabilities
-- Reset the number of capabilities once the upsweep ends.
let resetNumCapabilities orig_n = liftIO $ setNumCapabilities orig_n
gbracket updNumCapabilities resetNumCapabilities $ \_ -> do
-- Sync the global session with the latest HscEnv once the upsweep ends.
let finallySyncSession io = io `gfinally` do
hsc_env <- liftIO $ readMVar hsc_env_var
setSession hsc_env
finallySyncSession $ do
-- Build the compilation graph out of the list of SCCs. Module cycles are
-- handled at the very end, after some useful work gets done. Note that
-- this list is topologically sorted (by virtue of 'sccs' being sorted so).
(comp_graph,cycle) <- liftIO $ buildCompGraph sccs
let comp_graph_w_idx = zip comp_graph [1..]
-- The list of all loops in the compilation graph.
-- NB: For convenience, the last module of each loop (aka the module that
-- finishes the loop) is prepended to the beginning of the loop.
let comp_graph_loops = go (map fstOf3 (reverse comp_graph))
where
go [] = []
go (ms:mss) | Just loop <- getModLoop ms (ms:mss)
= map mkBuildModule (ms:loop) : go mss
| otherwise
= go mss
-- Build a Map out of the compilation graph with which we can efficiently
-- look up the result MVar associated with a particular home module.
let home_mod_map :: Map BuildModule (MVar SuccessFlag, Int)
home_mod_map =
Map.fromList [ (mkBuildModule ms, (mvar, idx))
| ((ms,mvar,_),idx) <- comp_graph_w_idx ]
liftIO $ label_self "main --make thread"
-- For each module in the module graph, spawn a worker thread that will
-- compile this module.
let { spawnWorkers = forM comp_graph_w_idx $ \((mod,!mvar,!log_queue),!mod_idx) ->
forkIOWithUnmask $ \unmask -> do
liftIO $ label_self $ unwords
[ "worker --make thread"
, "for module"
, show (moduleNameString (ms_mod_name mod))
, "number"
, show mod_idx
]
-- Replace the default log_action with one that writes each
-- message to the module's log_queue. The main thread will
-- deal with synchronously printing these messages.
--
-- Use a local filesToClean var so that we can clean up
-- intermediate files in a timely fashion (as soon as
-- compilation for that module is finished) without having to
-- worry about accidentally deleting a simultaneous compile's
-- important files.
lcl_files_to_clean <- newIORef []
let lcl_dflags = dflags { log_action = parLogAction log_queue
, filesToClean = lcl_files_to_clean }
-- Unmask asynchronous exceptions and perform the thread-local
-- work to compile the module (see parUpsweep_one).
m_res <- try $ unmask $ prettyPrintGhcErrors lcl_dflags $
parUpsweep_one mod home_mod_map comp_graph_loops
lcl_dflags cleanup
par_sem hsc_env_var old_hpt_var
stable_mods mod_idx (length sccs)
res <- case m_res of
Right flag -> return flag
Left exc -> do
-- Don't print ThreadKilled exceptions: they are used
-- to kill the worker thread in the event of a user
-- interrupt, and the user doesn't have to be informed
-- about that.
when (fromException exc /= Just ThreadKilled)
(errorMsg lcl_dflags (text (show exc)))
return Failed
-- Populate the result MVar.
putMVar mvar res
-- Write the end marker to the message queue, telling the main
-- thread that it can stop waiting for messages from this
-- particular compile.
writeLogQueue log_queue Nothing
-- Add the remaining files that weren't cleaned up to the
-- global filesToClean ref, for cleanup later.
files_kept <- readIORef (filesToClean lcl_dflags)
addFilesToClean dflags files_kept
-- Kill all the workers, masking interrupts (since killThread is
-- interruptible). XXX: This is not ideal.
; killWorkers = uninterruptibleMask_ . mapM_ killThread }
-- Spawn the workers, making sure to kill them later. Collect the results
-- of each compile.
results <- liftIO $ bracket spawnWorkers killWorkers $ \_ ->
-- Loop over each module in the compilation graph in order, printing
-- each message from its log_queue.
forM comp_graph $ \(mod,mvar,log_queue) -> do
printLogs dflags log_queue
result <- readMVar mvar
if succeeded result then return (Just mod) else return Nothing
-- Collect and return the ModSummaries of all the successful compiles.
-- NB: Reverse this list to maintain output parity with the sequential upsweep.
let ok_results = reverse (catMaybes results)
-- Handle any cycle in the original compilation graph and return the result
-- of the upsweep.
case cycle of
Just mss -> do
liftIO $ fatalErrorMsg dflags (cyclicModuleErr mss)
return (Failed,ok_results)
Nothing -> do
let success_flag = successIf (all isJust results)
return (success_flag,ok_results)
where
writeLogQueue :: LogQueue -> Maybe (WarnReason,Severity,SrcSpan,PprStyle,MsgDoc) -> IO ()
writeLogQueue (LogQueue ref sem) msg = do
atomicModifyIORef' ref $ \msgs -> (msg:msgs,())
_ <- tryPutMVar sem ()
return ()
-- The log_action callback that is used to synchronize messages from a
-- worker thread.
parLogAction :: LogQueue -> LogAction
parLogAction log_queue _dflags !reason !severity !srcSpan !style !msg = do
writeLogQueue log_queue (Just (reason,severity,srcSpan,style,msg))
-- Print each message from the log_queue using the log_action from the
-- session's DynFlags.
printLogs :: DynFlags -> LogQueue -> IO ()
printLogs !dflags (LogQueue ref sem) = read_msgs
where read_msgs = do
takeMVar sem
msgs <- atomicModifyIORef' ref $ \xs -> ([], reverse xs)
print_loop msgs
print_loop [] = read_msgs
print_loop (x:xs) = case x of
Just (reason,severity,srcSpan,style,msg) -> do
log_action dflags dflags reason severity srcSpan style msg
print_loop xs
-- Exit the loop once we encounter the end marker.
Nothing -> return ()
-- The interruptible subset of the worker threads' work.
parUpsweep_one
:: ModSummary
-- ^ The module we wish to compile
-> Map BuildModule (MVar SuccessFlag, Int)
-- ^ The map of home modules and their result MVar
-> [[BuildModule]]
-- ^ The list of all module loops within the compilation graph.
-> DynFlags
-- ^ The thread-local DynFlags
-> (HscEnv -> IO ())
-- ^ The callback for cleaning up intermediate files
-> QSem
-- ^ The semaphore for limiting the number of simultaneous compiles
-> MVar HscEnv
-- ^ The MVar that synchronizes updates to the global HscEnv
-> IORef HomePackageTable
-- ^ The old HPT
-> ([ModuleName],[ModuleName])
-- ^ Lists of stable objects and BCOs
-> Int
-- ^ The index of this module
-> Int
-- ^ The total number of modules
-> IO SuccessFlag
-- ^ The result of this compile
parUpsweep_one mod home_mod_map comp_graph_loops lcl_dflags cleanup par_sem
hsc_env_var old_hpt_var stable_mods mod_index num_mods = do
let this_build_mod = mkBuildModule mod
let home_imps = map unLoc $ ms_home_imps mod
let home_src_imps = map unLoc $ ms_home_srcimps mod
-- All the textual imports of this module.
let textual_deps = Set.fromList $ mapFst (mkModule (thisPackage lcl_dflags)) $
zip home_imps (repeat NotBoot) ++
zip home_src_imps (repeat IsBoot)
-- Dealing with module loops
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Not only do we have to deal with explicit textual dependencies, we also
-- have to deal with implicit dependencies introduced by import cycles that
-- are broken by an hs-boot file. We have to ensure that:
--
-- 1. A module that breaks a loop must depend on all the modules in the
-- loop (transitively or otherwise). This is normally always fulfilled
-- by the module's textual dependencies except in degenerate loops,
-- e.g.:
--
-- A.hs imports B.hs-boot
-- B.hs doesn't import A.hs
-- C.hs imports A.hs, B.hs
--
-- In this scenario, getModLoop will detect the module loop [A,B] but
-- the loop finisher B doesn't depend on A. So we have to explicitly add
-- A in as a dependency of B when we are compiling B.
--
-- 2. A module that depends on a module in an external loop can't proceed
-- until the entire loop is re-typechecked.
--
-- These two invariants have to be maintained to correctly build a
-- compilation graph with one or more loops.
-- The loop that this module will finish. After this module successfully
-- compiles, this loop is going to get re-typechecked.
let finish_loop = listToMaybe
[ tail loop | loop <- comp_graph_loops
, head loop == this_build_mod ]
-- If this module finishes a loop then it must depend on all the other
-- modules in that loop because the entire module loop is going to be
-- re-typechecked once this module gets compiled. These extra dependencies
-- are this module's "internal" loop dependencies, because this module is
-- inside the loop in question.
let int_loop_deps = Set.fromList $
case finish_loop of
Nothing -> []
Just loop -> filter (/= this_build_mod) loop
-- If this module depends on a module within a loop then it must wait for
-- that loop to get re-typechecked, i.e. it must wait on the module that
-- finishes that loop. These extra dependencies are this module's
-- "external" loop dependencies, because this module is outside of the
-- loop(s) in question.
let ext_loop_deps = Set.fromList
[ head loop | loop <- comp_graph_loops
, any (`Set.member` textual_deps) loop
, this_build_mod `notElem` loop ]
let all_deps = foldl1 Set.union [textual_deps, int_loop_deps, ext_loop_deps]
-- All of the module's home-module dependencies.
let home_deps_with_idx =
[ home_dep | dep <- Set.toList all_deps
, Just home_dep <- [Map.lookup dep home_mod_map] ]
-- Sort the list of dependencies in reverse-topological order. This way, by
-- the time we get woken up by the result of an earlier dependency,
-- subsequent dependencies are more likely to have finished. This step
-- effectively reduces the number of MVars that each thread blocks on.
let home_deps = map fst $ sortBy (flip (comparing snd)) home_deps_with_idx
-- Wait for the all the module's dependencies to finish building.
deps_ok <- allM (fmap succeeded . readMVar) home_deps
-- We can't build this module if any of its dependencies failed to build.
if not deps_ok
then return Failed
else do
-- Any hsc_env at this point is OK to use since we only really require
-- that the HPT contains the HMIs of our dependencies.
hsc_env <- readMVar hsc_env_var
old_hpt <- readIORef old_hpt_var
let logger err = printBagOfErrors lcl_dflags (srcErrorMessages err)
-- Limit the number of parallel compiles.
let withSem sem = bracket_ (waitQSem sem) (signalQSem sem)
mb_mod_info <- withSem par_sem $
handleSourceError (\err -> do logger err; return Nothing) $ do
-- Have the ModSummary and HscEnv point to our local log_action
-- and filesToClean var.
let lcl_mod = localize_mod mod
let lcl_hsc_env = localize_hsc_env hsc_env
-- Compile the module.
mod_info <- upsweep_mod lcl_hsc_env old_hpt stable_mods lcl_mod
mod_index num_mods
return (Just mod_info)
case mb_mod_info of
Nothing -> return Failed
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Prune the old HPT unless this is an hs-boot module.
unless (isBootSummary mod) $
atomicModifyIORef' old_hpt_var $ \old_hpt ->
(delFromUFM old_hpt this_mod, ())
-- Update and fetch the global HscEnv.
lcl_hsc_env' <- modifyMVar hsc_env_var $ \hsc_env -> do
let hsc_env' = hsc_env { hsc_HPT = addToUFM (hsc_HPT hsc_env)
this_mod mod_info }
-- If this module is a loop finisher, now is the time to
-- re-typecheck the loop.
hsc_env'' <- case finish_loop of
Nothing -> return hsc_env'
Just loop -> typecheckLoop lcl_dflags hsc_env' $
map (moduleName . fst) loop
return (hsc_env'', localize_hsc_env hsc_env'')
-- Clean up any intermediate files.
cleanup lcl_hsc_env'
return Succeeded
where
localize_mod mod
= mod { ms_hspp_opts = (ms_hspp_opts mod)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
localize_hsc_env hsc_env
= hsc_env { hsc_dflags = (hsc_dflags hsc_env)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
-- -----------------------------------------------------------------------------
--
-- | The upsweep
--
-- This is where we compile each module in the module graph, in a pass
-- from the bottom to the top of the graph.
--
-- There better had not be any cyclic groups here -- we check for them.
upsweep
:: GhcMonad m
=> HomePackageTable -- ^ HPT from last time round (pruned)
-> ([ModuleName],[ModuleName]) -- ^ stable modules (see checkStability)
-> (HscEnv -> IO ()) -- ^ How to clean up unwanted tmp files
-> [SCC ModSummary] -- ^ Mods to do (the worklist)
-> m (SuccessFlag,
[ModSummary])
-- ^ Returns:
--
-- 1. A flag whether the complete upsweep was successful.
-- 2. The 'HscEnv' in the monad has an updated HPT
-- 3. A list of modules which succeeded loading.
upsweep old_hpt stable_mods cleanup sccs = do
(res, done) <- upsweep' old_hpt [] sccs 1 (length sccs)
return (res, reverse done)
where
upsweep' _old_hpt done
[] _ _
= return (Succeeded, done)
upsweep' _old_hpt done
(CyclicSCC ms:_) _ _
= do dflags <- getSessionDynFlags
liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms)
return (Failed, done)
upsweep' old_hpt done
(AcyclicSCC mod:mods) mod_index nmods
= do -- putStrLn ("UPSWEEP_MOD: hpt = " ++
-- show (map (moduleUserString.moduleName.mi_module.hm_iface)
-- (moduleEnvElts (hsc_HPT hsc_env)))
let logger _mod = defaultWarnErrLogger
hsc_env <- getSession
-- Remove unwanted tmp files between compilations
liftIO (cleanup hsc_env)
mb_mod_info
<- handleSourceError
(\err -> do logger mod (Just err); return Nothing) $ do
mod_info <- liftIO $ upsweep_mod hsc_env old_hpt stable_mods
mod mod_index nmods
logger mod Nothing -- log warnings
return (Just mod_info)
case mb_mod_info of
Nothing -> return (Failed, done)
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Add new info to hsc_env
hpt1 = addToUFM (hsc_HPT hsc_env) this_mod mod_info
hsc_env1 = hsc_env { hsc_HPT = hpt1 }
-- Space-saving: delete the old HPT entry
-- for mod BUT if mod is a hs-boot
-- node, don't delete it. For the
-- interface, the HPT entry is probaby for the
-- main Haskell source file. Deleting it
-- would force the real module to be recompiled
-- every time.
old_hpt1 | isBootSummary mod = old_hpt
| otherwise = delFromUFM old_hpt this_mod
done' = mod:done
-- fixup our HomePackageTable after we've finished compiling
-- a mutually-recursive loop. See reTypecheckLoop, below.
hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done'
setSession hsc_env2
upsweep' old_hpt1 done' mods (mod_index+1) nmods
maybeGetIfaceDate :: DynFlags -> ModLocation -> IO (Maybe UTCTime)
maybeGetIfaceDate dflags location
| writeInterfaceOnlyMode dflags
-- Minor optimization: it should be harmless to check the hi file location
-- always, but it's better to avoid hitting the filesystem if possible.
= modificationTimeIfExists (ml_hi_file location)
| otherwise
= return Nothing
-- | Compile a single module. Always produce a Linkable for it if
-- successful. If no compilation happened, return the old Linkable.
upsweep_mod :: HscEnv
-> HomePackageTable
-> ([ModuleName],[ModuleName])
-> ModSummary
-> Int -- index of module
-> Int -- total number of modules
-> IO HomeModInfo
upsweep_mod hsc_env old_hpt (stable_obj, stable_bco) summary mod_index nmods
= let
this_mod_name = ms_mod_name summary
this_mod = ms_mod summary
mb_obj_date = ms_obj_date summary
mb_if_date = ms_iface_date summary
obj_fn = ml_obj_file (ms_location summary)
hs_date = ms_hs_date summary
is_stable_obj = this_mod_name `elem` stable_obj
is_stable_bco = this_mod_name `elem` stable_bco
old_hmi = lookupUFM old_hpt this_mod_name
-- We're using the dflags for this module now, obtained by
-- applying any options in its LANGUAGE & OPTIONS_GHC pragmas.
dflags = ms_hspp_opts summary
prevailing_target = hscTarget (hsc_dflags hsc_env)
local_target = hscTarget dflags
-- If OPTIONS_GHC contains -fasm or -fllvm, be careful that
-- we don't do anything dodgy: these should only work to change
-- from -fllvm to -fasm and vice-versa, otherwise we could
-- end up trying to link object code to byte code.
target = if prevailing_target /= local_target
&& (not (isObjectTarget prevailing_target)
|| not (isObjectTarget local_target))
then prevailing_target
else local_target
-- store the corrected hscTarget into the summary
summary' = summary{ ms_hspp_opts = dflags { hscTarget = target } }
-- The old interface is ok if
-- a) we're compiling a source file, and the old HPT
-- entry is for a source file
-- b) we're compiling a hs-boot file
-- Case (b) allows an hs-boot file to get the interface of its
-- real source file on the second iteration of the compilation
-- manager, but that does no harm. Otherwise the hs-boot file
-- will always be recompiled
mb_old_iface
= case old_hmi of
Nothing -> Nothing
Just hm_info | isBootSummary summary -> Just iface
| not (mi_boot iface) -> Just iface
| otherwise -> Nothing
where
iface = hm_iface hm_info
compile_it :: Maybe Linkable -> SourceModified -> IO HomeModInfo
compile_it mb_linkable src_modified =
compileOne hsc_env summary' mod_index nmods
mb_old_iface mb_linkable src_modified
compile_it_discard_iface :: Maybe Linkable -> SourceModified
-> IO HomeModInfo
compile_it_discard_iface mb_linkable src_modified =
compileOne hsc_env summary' mod_index nmods
Nothing mb_linkable src_modified
-- With the HscNothing target we create empty linkables to avoid
-- recompilation. We have to detect these to recompile anyway if
-- the target changed since the last compile.
is_fake_linkable
| Just hmi <- old_hmi, Just l <- hm_linkable hmi =
null (linkableUnlinked l)
| otherwise =
-- we have no linkable, so it cannot be fake
False
implies False _ = True
implies True x = x
in
case () of
_
-- Regardless of whether we're generating object code or
-- byte code, we can always use an existing object file
-- if it is *stable* (see checkStability).
| is_stable_obj, Just hmi <- old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable obj mod:" <+> ppr this_mod_name)
return hmi
-- object is stable, and we have an entry in the
-- old HPT: nothing to do
| is_stable_obj, isNothing old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling stable on-disk mod:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn
(expectJust "upsweep1" mb_obj_date)
compile_it (Just linkable) SourceUnmodifiedAndStable
-- object is stable, but we need to load the interface
-- off disk to make a HMI.
| not (isObjectTarget target), is_stable_bco,
(target /= HscNothing) `implies` not is_fake_linkable ->
ASSERT(isJust old_hmi) -- must be in the old_hpt
let Just hmi = old_hmi in do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable BCO mod:" <+> ppr this_mod_name)
return hmi
-- BCO is stable: nothing to do
| not (isObjectTarget target),
Just hmi <- old_hmi,
Just l <- hm_linkable hmi,
not (isObjectLinkable l),
(target /= HscNothing) `implies` not is_fake_linkable,
linkableTime l >= ms_hs_date summary -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling non-stable BCO mod:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
-- we have an old BCO that is up to date with respect
-- to the source: do a recompilation check as normal.
-- When generating object code, if there's an up-to-date
-- object file on the disk, then we can use it.
-- However, if the object file is new (compared to any
-- linkable we had from a previous compilation), then we
-- must discard any in-memory interface, because this
-- means the user has compiled the source file
-- separately and generated a new interface, that we must
-- read from the disk.
--
| isObjectTarget target,
Just obj_date <- mb_obj_date,
obj_date >= hs_date -> do
case old_hmi of
Just hmi
| Just l <- hm_linkable hmi,
isObjectLinkable l && linkableTime l == obj_date -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj2:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn obj_date
compile_it_discard_iface (Just linkable) SourceUnmodified
-- See Note [Recompilation checking when typechecking only]
| writeInterfaceOnlyMode dflags,
Just if_date <- mb_if_date,
if_date >= hs_date -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping tc'd mod:" <+> ppr this_mod_name)
compile_it Nothing SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod:" <+> ppr this_mod_name)
compile_it Nothing SourceModified
-- Note [Recompilation checking when typechecking only]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- If we are compiling with -fno-code -fwrite-interface, there won't
-- be any object code that we can compare against, nor should there
-- be: we're *just* generating interface files. In this case, we
-- want to check if the interface file is new, in lieu of the object
-- file. See also Trac #9243.
-- Filter modules in the HPT
retainInTopLevelEnvs :: [ModuleName] -> HomePackageTable -> HomePackageTable
retainInTopLevelEnvs keep_these hpt
= listToUFM [ (mod, expectJust "retain" mb_mod_info)
| mod <- keep_these
, let mb_mod_info = lookupUFM hpt mod
, isJust mb_mod_info ]
-- ---------------------------------------------------------------------------
-- Typecheck module loops
{-
See bug #930. This code fixes a long-standing bug in --make. The
problem is that when compiling the modules *inside* a loop, a data
type that is only defined at the top of the loop looks opaque; but
after the loop is done, the structure of the data type becomes
apparent.
The difficulty is then that two different bits of code have
different notions of what the data type looks like.
The idea is that after we compile a module which also has an .hs-boot
file, we re-generate the ModDetails for each of the modules that
depends on the .hs-boot file, so that everyone points to the proper
TyCons, Ids etc. defined by the real module, not the boot module.
Fortunately re-generating a ModDetails from a ModIface is easy: the
function TcIface.typecheckIface does exactly that.
Picking the modules to re-typecheck is slightly tricky. Starting from
the module graph consisting of the modules that have already been
compiled, we reverse the edges (so they point from the imported module
to the importing module), and depth-first-search from the .hs-boot
node. This gives us all the modules that depend transitively on the
.hs-boot module, and those are exactly the modules that we need to
re-typecheck.
Following this fix, GHC can compile itself with --make -O2.
-}
reTypecheckLoop :: HscEnv -> ModSummary -> ModuleGraph -> IO HscEnv
reTypecheckLoop hsc_env ms graph
| Just loop <- getModLoop ms graph
, let non_boot = filter (not.isBootSummary) loop
= typecheckLoop (hsc_dflags hsc_env) hsc_env (map ms_mod_name non_boot)
| otherwise
= return hsc_env
getModLoop :: ModSummary -> ModuleGraph -> Maybe [ModSummary]
getModLoop ms graph
| not (isBootSummary ms)
, any (\m -> ms_mod m == this_mod && isBootSummary m) graph
, let mss = reachableBackwards (ms_mod_name ms) graph
= Just mss
| otherwise
= Nothing
where
this_mod = ms_mod ms
typecheckLoop :: DynFlags -> HscEnv -> [ModuleName] -> IO HscEnv
typecheckLoop dflags hsc_env mods = do
debugTraceMsg dflags 2 $
text "Re-typechecking loop: " <> ppr mods
new_hpt <-
fixIO $ \new_hpt -> do
let new_hsc_env = hsc_env{ hsc_HPT = new_hpt }
mds <- initIfaceCheck new_hsc_env $
mapM (typecheckIface . hm_iface) hmis
let new_hpt = addListToUFM old_hpt
(zip mods [ hmi{ hm_details = details }
| (hmi,details) <- zip hmis mds ])
return new_hpt
return hsc_env{ hsc_HPT = new_hpt }
where
old_hpt = hsc_HPT hsc_env
hmis = map (expectJust "typecheckLoop" . lookupUFM old_hpt) mods
reachableBackwards :: ModuleName -> [ModSummary] -> [ModSummary]
reachableBackwards mod summaries
= [ ms | (ms,_,_) <- reachableG (transposeG graph) root ]
where -- the rest just sets up the graph:
(graph, lookup_node) = moduleGraphNodes False summaries
root = expectJust "reachableBackwards" (lookup_node HsBootFile mod)
-- ---------------------------------------------------------------------------
--
-- | Topological sort of the module graph
topSortModuleGraph
:: Bool
-- ^ Drop hi-boot nodes? (see below)
-> [ModSummary]
-> Maybe ModuleName
-- ^ Root module name. If @Nothing@, use the full graph.
-> [SCC ModSummary]
-- ^ Calculate SCCs of the module graph, possibly dropping the hi-boot nodes
-- The resulting list of strongly-connected-components is in topologically
-- sorted order, starting with the module(s) at the bottom of the
-- dependency graph (ie compile them first) and ending with the ones at
-- the top.
--
-- Drop hi-boot nodes (first boolean arg)?
--
-- - @False@: treat the hi-boot summaries as nodes of the graph,
-- so the graph must be acyclic
--
-- - @True@: eliminate the hi-boot nodes, and instead pretend
-- the a source-import of Foo is an import of Foo
-- The resulting graph has no hi-boot nodes, but can be cyclic
topSortModuleGraph drop_hs_boot_nodes summaries mb_root_mod
= map (fmap summaryNodeSummary) $ stronglyConnCompG initial_graph
where
(graph, lookup_node) = moduleGraphNodes drop_hs_boot_nodes summaries
initial_graph = case mb_root_mod of
Nothing -> graph
Just root_mod ->
-- restrict the graph to just those modules reachable from
-- the specified module. We do this by building a graph with
-- the full set of nodes, and determining the reachable set from
-- the specified node.
let root | Just node <- lookup_node HsSrcFile root_mod, graph `hasVertexG` node = node
| otherwise = throwGhcException (ProgramError "module does not exist")
in graphFromEdgedVertices (seq root (reachableG graph root))
type SummaryNode = (ModSummary, Int, [Int])
summaryNodeKey :: SummaryNode -> Int
summaryNodeKey (_, k, _) = k
summaryNodeSummary :: SummaryNode -> ModSummary
summaryNodeSummary (s, _, _) = s
moduleGraphNodes :: Bool -> [ModSummary]
-> (Graph SummaryNode, HscSource -> ModuleName -> Maybe SummaryNode)
moduleGraphNodes drop_hs_boot_nodes summaries = (graphFromEdgedVertices nodes, lookup_node)
where
numbered_summaries = zip summaries [1..]
lookup_node :: HscSource -> ModuleName -> Maybe SummaryNode
lookup_node hs_src mod = Map.lookup (mod, hscSourceToIsBoot hs_src) node_map
lookup_key :: HscSource -> ModuleName -> Maybe Int
lookup_key hs_src mod = fmap summaryNodeKey (lookup_node hs_src mod)
node_map :: NodeMap SummaryNode
node_map = Map.fromList [ ((moduleName (ms_mod s),
hscSourceToIsBoot (ms_hsc_src s)), node)
| node@(s, _, _) <- nodes ]
-- We use integers as the keys for the SCC algorithm
nodes :: [SummaryNode]
nodes = [ (s, key, out_keys)
| (s, key) <- numbered_summaries
-- Drop the hi-boot ones if told to do so
, not (isBootSummary s && drop_hs_boot_nodes)
, let out_keys = out_edge_keys hs_boot_key (map unLoc (ms_home_srcimps s)) ++
out_edge_keys HsSrcFile (map unLoc (ms_home_imps s)) ++
(-- see [boot-edges] below
if drop_hs_boot_nodes || ms_hsc_src s == HsBootFile
then []
else case lookup_key HsBootFile (ms_mod_name s) of
Nothing -> []
Just k -> [k]) ]
-- [boot-edges] if this is a .hs and there is an equivalent
-- .hs-boot, add a link from the former to the latter. This
-- has the effect of detecting bogus cases where the .hs-boot
-- depends on the .hs, by introducing a cycle. Additionally,
-- it ensures that we will always process the .hs-boot before
-- the .hs, and so the HomePackageTable will always have the
-- most up to date information.
-- Drop hs-boot nodes by using HsSrcFile as the key
hs_boot_key | drop_hs_boot_nodes = HsSrcFile
| otherwise = HsBootFile
out_edge_keys :: HscSource -> [ModuleName] -> [Int]
out_edge_keys hi_boot ms = mapMaybe (lookup_key hi_boot) ms
-- If we want keep_hi_boot_nodes, then we do lookup_key with
-- IsBoot; else NotBoot
-- The nodes of the graph are keyed by (mod, is boot?) pairs
-- NB: hsig files show up as *normal* nodes (not boot!), since they don't
-- participate in cycles (for now)
type NodeKey = (ModuleName, IsBoot)
type NodeMap a = Map.Map NodeKey a
msKey :: ModSummary -> NodeKey
msKey (ModSummary { ms_mod = mod, ms_hsc_src = boot })
= (moduleName mod, hscSourceToIsBoot boot)
mkNodeMap :: [ModSummary] -> NodeMap ModSummary
mkNodeMap summaries = Map.fromList [ (msKey s, s) | s <- summaries]
nodeMapElts :: NodeMap a -> [a]
nodeMapElts = Map.elems
-- | If there are {-# SOURCE #-} imports between strongly connected
-- components in the topological sort, then those imports can
-- definitely be replaced by ordinary non-SOURCE imports: if SOURCE
-- were necessary, then the edge would be part of a cycle.
warnUnnecessarySourceImports :: GhcMonad m => [SCC ModSummary] -> m ()
warnUnnecessarySourceImports sccs = do
dflags <- getDynFlags
when (wopt Opt_WarnUnusedImports dflags)
(logWarnings (listToBag (concatMap (check dflags . flattenSCC) sccs)))
where check dflags ms =
let mods_in_this_cycle = map ms_mod_name ms in
[ warn dflags i | m <- ms, i <- ms_home_srcimps m,
unLoc i `notElem` mods_in_this_cycle ]
warn :: DynFlags -> Located ModuleName -> WarnMsg
warn dflags (L loc mod) =
mkPlainErrMsg dflags loc
(text "Warning: {-# SOURCE #-} unnecessary in import of "
<+> quotes (ppr mod))
reportImportErrors :: MonadIO m => [Either ErrMsg b] -> m [b]
reportImportErrors xs | null errs = return oks
| otherwise = throwManyErrors errs
where (errs, oks) = partitionEithers xs
throwManyErrors :: MonadIO m => [ErrMsg] -> m ab
throwManyErrors errs = liftIO $ throwIO $ mkSrcErr $ listToBag errs
-----------------------------------------------------------------------------
--
-- | Downsweep (dependency analysis)
--
-- Chase downwards from the specified root set, returning summaries
-- for all home modules encountered. Only follow source-import
-- links.
--
-- We pass in the previous collection of summaries, which is used as a
-- cache to avoid recalculating a module summary if the source is
-- unchanged.
--
-- The returned list of [ModSummary] nodes has one node for each home-package
-- module, plus one for any hs-boot files. The imports of these nodes
-- are all there, including the imports of non-home-package modules.
downsweep :: HscEnv
-> [ModSummary] -- Old summaries
-> [ModuleName] -- Ignore dependencies on these; treat
-- them as if they were package modules
-> Bool -- True <=> allow multiple targets to have
-- the same module name; this is
-- very useful for ghc -M
-> IO [Either ErrMsg ModSummary]
-- The elts of [ModSummary] all have distinct
-- (Modules, IsBoot) identifiers, unless the Bool is true
-- in which case there can be repeats
downsweep hsc_env old_summaries excl_mods allow_dup_roots
= do
rootSummaries <- mapM getRootSummary roots
rootSummariesOk <- reportImportErrors rootSummaries
let root_map = mkRootMap rootSummariesOk
checkDuplicates root_map
summs <- loop (concatMap calcDeps rootSummariesOk) root_map
return summs
where
-- When we're compiling a signature file, we have an implicit
-- dependency on what-ever the signature's implementation is.
-- (But not when we're type checking!)
calcDeps summ
| HsigFile <- ms_hsc_src summ
, Just m <- getSigOf (hsc_dflags hsc_env) (moduleName (ms_mod summ))
, moduleUnitId m == thisPackage (hsc_dflags hsc_env)
= (noLoc (moduleName m), NotBoot) : msDeps summ
| otherwise = msDeps summ
dflags = hsc_dflags hsc_env
roots = hsc_targets hsc_env
old_summary_map :: NodeMap ModSummary
old_summary_map = mkNodeMap old_summaries
getRootSummary :: Target -> IO (Either ErrMsg ModSummary)
getRootSummary (Target (TargetFile file mb_phase) obj_allowed maybe_buf)
= do exists <- liftIO $ doesFileExist file
if exists
then Right `fmap` summariseFile hsc_env old_summaries file mb_phase
obj_allowed maybe_buf
else return $ Left $ mkPlainErrMsg dflags noSrcSpan $
text "can't find file:" <+> text file
getRootSummary (Target (TargetModule modl) obj_allowed maybe_buf)
= do maybe_summary <- summariseModule hsc_env old_summary_map NotBoot
(L rootLoc modl) obj_allowed
maybe_buf excl_mods
case maybe_summary of
Nothing -> return $ Left $ packageModErr dflags modl
Just s -> return s
rootLoc = mkGeneralSrcSpan (fsLit "<command line>")
-- In a root module, the filename is allowed to diverge from the module
-- name, so we have to check that there aren't multiple root files
-- defining the same module (otherwise the duplicates will be silently
-- ignored, leading to confusing behaviour).
checkDuplicates :: NodeMap [Either ErrMsg ModSummary] -> IO ()
checkDuplicates root_map
| allow_dup_roots = return ()
| null dup_roots = return ()
| otherwise = liftIO $ multiRootsErr dflags (head dup_roots)
where
dup_roots :: [[ModSummary]] -- Each at least of length 2
dup_roots = filterOut isSingleton $ map rights $ nodeMapElts root_map
loop :: [(Located ModuleName,IsBoot)]
-- Work list: process these modules
-> NodeMap [Either ErrMsg ModSummary]
-- Visited set; the range is a list because
-- the roots can have the same module names
-- if allow_dup_roots is True
-> IO [Either ErrMsg ModSummary]
-- The result includes the worklist, except
-- for those mentioned in the visited set
loop [] done = return (concat (nodeMapElts done))
loop ((wanted_mod, is_boot) : ss) done
| Just summs <- Map.lookup key done
= if isSingleton summs then
loop ss done
else
do { multiRootsErr dflags (rights summs); return [] }
| otherwise
= do mb_s <- summariseModule hsc_env old_summary_map
is_boot wanted_mod True
Nothing excl_mods
case mb_s of
Nothing -> loop ss done
Just (Left e) -> loop ss (Map.insert key [Left e] done)
Just (Right s)-> loop (calcDeps s ++ ss)
(Map.insert key [Right s] done)
where
key = (unLoc wanted_mod, is_boot)
mkRootMap :: [ModSummary] -> NodeMap [Either ErrMsg ModSummary]
mkRootMap summaries = Map.insertListWith (flip (++))
[ (msKey s, [Right s]) | s <- summaries ]
Map.empty
-- | Returns the dependencies of the ModSummary s.
-- A wrinkle is that for a {-# SOURCE #-} import we return
-- *both* the hs-boot file
-- *and* the source file
-- as "dependencies". That ensures that the list of all relevant
-- modules always contains B.hs if it contains B.hs-boot.
-- Remember, this pass isn't doing the topological sort. It's
-- just gathering the list of all relevant ModSummaries
msDeps :: ModSummary -> [(Located ModuleName, IsBoot)]
msDeps s =
concat [ [(m,IsBoot), (m,NotBoot)] | m <- ms_home_srcimps s ]
++ [ (m,NotBoot) | m <- ms_home_imps s ]
home_imps :: [(Maybe FastString, Located ModuleName)] -> [Located ModuleName]
home_imps imps = [ lmodname | (mb_pkg, lmodname) <- imps,
isLocal mb_pkg ]
where isLocal Nothing = True
isLocal (Just pkg) | pkg == fsLit "this" = True -- "this" is special
isLocal _ = False
ms_home_allimps :: ModSummary -> [ModuleName]
ms_home_allimps ms = map unLoc (ms_home_srcimps ms ++ ms_home_imps ms)
-- | Like 'ms_home_imps', but for SOURCE imports.
ms_home_srcimps :: ModSummary -> [Located ModuleName]
ms_home_srcimps = home_imps . ms_srcimps
-- | All of the (possibly) home module imports from a
-- 'ModSummary'; that is to say, each of these module names
-- could be a home import if an appropriately named file
-- existed. (This is in contrast to package qualified
-- imports, which are guaranteed not to be home imports.)
ms_home_imps :: ModSummary -> [Located ModuleName]
ms_home_imps = home_imps . ms_imps
-----------------------------------------------------------------------------
-- Summarising modules
-- We have two types of summarisation:
--
-- * Summarise a file. This is used for the root module(s) passed to
-- cmLoadModules. The file is read, and used to determine the root
-- module name. The module name may differ from the filename.
--
-- * Summarise a module. We are given a module name, and must provide
-- a summary. The finder is used to locate the file in which the module
-- resides.
summariseFile
:: HscEnv
-> [ModSummary] -- old summaries
-> FilePath -- source file name
-> Maybe Phase -- start phase
-> Bool -- object code allowed?
-> Maybe (StringBuffer,UTCTime)
-> IO ModSummary
summariseFile hsc_env old_summaries file mb_phase obj_allowed maybe_buf
-- we can use a cached summary if one is available and the
-- source file hasn't changed, But we have to look up the summary
-- by source file, rather than module name as we do in summarise.
| Just old_summary <- findSummaryBySourceFile old_summaries file
= do
let location = ms_location old_summary
dflags = hsc_dflags hsc_env
src_timestamp <- get_src_timestamp
-- The file exists; we checked in getRootSummary above.
-- If it gets removed subsequently, then this
-- getModificationUTCTime may fail, but that's the right
-- behaviour.
-- return the cached summary if the source didn't change
if ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp (hsc_dflags hsc_env))
then do -- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ getObjTimestamp location NotBoot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return old_summary{ ms_obj_date = obj_timestamp
, ms_iface_date = hi_timestamp }
else
new_summary src_timestamp
| otherwise
= do src_timestamp <- get_src_timestamp
new_summary src_timestamp
where
get_src_timestamp = case maybe_buf of
Just (_,t) -> return t
Nothing -> liftIO $ getModificationUTCTime file
-- getMofificationUTCTime may fail
new_summary src_timestamp = do
let dflags = hsc_dflags hsc_env
let hsc_src = if isHaskellSigFilename file then HsigFile else HsSrcFile
(dflags', hspp_fn, buf)
<- preprocessFile hsc_env file mb_phase maybe_buf
(srcimps,the_imps, L _ mod_name) <- getImports dflags' buf hspp_fn file
-- Make a ModLocation for this file
location <- liftIO $ mkHomeModLocation dflags mod_name file
-- Tell the Finder cache where it is, so that subsequent calls
-- to findModule will find it, even if it's not on any search path
mod <- liftIO $ addHomeModuleToFinder hsc_env mod_name location
-- when the user asks to load a source file by name, we only
-- use an object file if -fobject-code is on. See #1205.
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ modificationTimeIfExists (ml_obj_file location)
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return (ModSummary { ms_mod = mod, ms_hsc_src = hsc_src,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_srcimps = srcimps, ms_textual_imps = the_imps,
ms_hs_date = src_timestamp,
ms_iface_date = hi_timestamp,
ms_obj_date = obj_timestamp })
findSummaryBySourceFile :: [ModSummary] -> FilePath -> Maybe ModSummary
findSummaryBySourceFile summaries file
= case [ ms | ms <- summaries, HsSrcFile <- [ms_hsc_src ms],
expectJust "findSummaryBySourceFile" (ml_hs_file (ms_location ms)) == file ] of
[] -> Nothing
(x:_) -> Just x
-- Summarise a module, and pick up source and timestamp.
summariseModule
:: HscEnv
-> NodeMap ModSummary -- Map of old summaries
-> IsBoot -- IsBoot <=> a {-# SOURCE #-} import
-> Located ModuleName -- Imported module to be summarised
-> Bool -- object code allowed?
-> Maybe (StringBuffer, UTCTime)
-> [ModuleName] -- Modules to exclude
-> IO (Maybe (Either ErrMsg ModSummary)) -- Its new summary
summariseModule hsc_env old_summary_map is_boot (L loc wanted_mod)
obj_allowed maybe_buf excl_mods
| wanted_mod `elem` excl_mods
= return Nothing
| Just old_summary <- Map.lookup (wanted_mod, is_boot) old_summary_map
= do -- Find its new timestamp; all the
-- ModSummaries in the old map have valid ml_hs_files
let location = ms_location old_summary
src_fn = expectJust "summariseModule" (ml_hs_file location)
-- check the modification time on the source file, and
-- return the cached summary if it hasn't changed. If the
-- file has disappeared, we need to call the Finder again.
case maybe_buf of
Just (_,t) -> check_timestamp old_summary location src_fn t
Nothing -> do
m <- tryIO (getModificationUTCTime src_fn)
case m of
Right t -> check_timestamp old_summary location src_fn t
Left e | isDoesNotExistError e -> find_it
| otherwise -> ioError e
| otherwise = find_it
where
dflags = hsc_dflags hsc_env
check_timestamp old_summary location src_fn src_timestamp
| ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp dflags) = do
-- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return (Just (Right old_summary{ ms_obj_date = obj_timestamp
, ms_iface_date = hi_timestamp}))
| otherwise =
-- source changed: re-summarise.
new_summary location (ms_mod old_summary) src_fn src_timestamp
find_it = do
-- Don't use the Finder's cache this time. If the module was
-- previously a package module, it may have now appeared on the
-- search path, so we want to consider it to be a home module. If
-- the module was previously a home module, it may have moved.
uncacheModule hsc_env wanted_mod
found <- findImportedModule hsc_env wanted_mod Nothing
case found of
Found location mod
| isJust (ml_hs_file location) ->
-- Home package
just_found location mod
_ -> return Nothing
-- Not found
-- (If it is TRULY not found at all, we'll
-- error when we actually try to compile)
just_found location mod = do
-- Adjust location to point to the hs-boot source file,
-- hi file, object file, when is_boot says so
let location' | IsBoot <- is_boot = addBootSuffixLocn location
| otherwise = location
src_fn = expectJust "summarise2" (ml_hs_file location')
-- Check that it exists
-- It might have been deleted since the Finder last found it
maybe_t <- modificationTimeIfExists src_fn
case maybe_t of
Nothing -> return $ Just $ Left $ noHsFileErr dflags loc src_fn
Just t -> new_summary location' mod src_fn t
new_summary location mod src_fn src_timestamp
= do
-- Preprocess the source file and get its imports
-- The dflags' contains the OPTIONS pragmas
(dflags', hspp_fn, buf) <- preprocessFile hsc_env src_fn Nothing maybe_buf
(srcimps, the_imps, L mod_loc mod_name) <- getImports dflags' buf hspp_fn src_fn
-- NB: Despite the fact that is_boot is a top-level parameter, we
-- don't actually know coming into this function what the HscSource
-- of the module in question is. This is because we may be processing
-- this module because another module in the graph imported it: in this
-- case, we know if it's a boot or not because of the {-# SOURCE #-}
-- annotation, but we don't know if it's a signature or a regular
-- module until we actually look it up on the filesystem.
let hsc_src = case is_boot of
IsBoot -> HsBootFile
_ | isHaskellSigFilename src_fn -> HsigFile
| otherwise -> HsSrcFile
when (mod_name /= wanted_mod) $
throwOneError $ mkPlainErrMsg dflags' mod_loc $
text "File name does not match module name:"
$$ text "Saw:" <+> quotes (ppr mod_name)
$$ text "Expected:" <+> quotes (ppr wanted_mod)
-- Find the object timestamp, and return the summary
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return (Just (Right (ModSummary { ms_mod = mod,
ms_hsc_src = hsc_src,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_srcimps = srcimps,
ms_textual_imps = the_imps,
ms_hs_date = src_timestamp,
ms_iface_date = hi_timestamp,
ms_obj_date = obj_timestamp })))
getObjTimestamp :: ModLocation -> IsBoot -> IO (Maybe UTCTime)
getObjTimestamp location is_boot
= if is_boot == IsBoot then return Nothing
else modificationTimeIfExists (ml_obj_file location)
preprocessFile :: HscEnv
-> FilePath
-> Maybe Phase -- ^ Starting phase
-> Maybe (StringBuffer,UTCTime)
-> IO (DynFlags, FilePath, StringBuffer)
preprocessFile hsc_env src_fn mb_phase Nothing
= do
(dflags', hspp_fn) <- preprocess hsc_env (src_fn, mb_phase)
buf <- hGetStringBuffer hspp_fn
return (dflags', hspp_fn, buf)
preprocessFile hsc_env src_fn mb_phase (Just (buf, _time))
= do
let dflags = hsc_dflags hsc_env
let local_opts = getOptions dflags buf src_fn
(dflags', leftovers, warns)
<- parseDynamicFilePragma dflags local_opts
checkProcessArgsResult dflags leftovers
handleFlagWarnings dflags' warns
let needs_preprocessing
| Just (Unlit _) <- mb_phase = True
| Nothing <- mb_phase, Unlit _ <- startPhase src_fn = True
-- note: local_opts is only required if there's no Unlit phase
| xopt LangExt.Cpp dflags' = True
| gopt Opt_Pp dflags' = True
| otherwise = False
when needs_preprocessing $
throwGhcExceptionIO (ProgramError "buffer needs preprocesing; interactive check disabled")
return (dflags', src_fn, buf)
-----------------------------------------------------------------------------
-- Error messages
-----------------------------------------------------------------------------
noModError :: DynFlags -> SrcSpan -> ModuleName -> FindResult -> ErrMsg
-- ToDo: we don't have a proper line number for this error
noModError dflags loc wanted_mod err
= mkPlainErrMsg dflags loc $ cannotFindModule dflags wanted_mod err
noHsFileErr :: DynFlags -> SrcSpan -> String -> ErrMsg
noHsFileErr dflags loc path
= mkPlainErrMsg dflags loc $ text "Can't find" <+> text path
packageModErr :: DynFlags -> ModuleName -> ErrMsg
packageModErr dflags mod
= mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+> text "is a package module"
multiRootsErr :: DynFlags -> [ModSummary] -> IO ()
multiRootsErr _ [] = panic "multiRootsErr"
multiRootsErr dflags summs@(summ1:_)
= throwOneError $ mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+>
text "is defined in multiple files:" <+>
sep (map text files)
where
mod = ms_mod summ1
files = map (expectJust "checkDup" . ml_hs_file . ms_location) summs
cyclicModuleErr :: [ModSummary] -> SDoc
-- From a strongly connected component we find
-- a single cycle to report
cyclicModuleErr mss
= ASSERT( not (null mss) )
case findCycle graph of
Nothing -> text "Unexpected non-cycle" <+> ppr mss
Just path -> vcat [ text "Module imports form a cycle:"
, nest 2 (show_path path) ]
where
graph :: [Node NodeKey ModSummary]
graph = [(ms, msKey ms, get_deps ms) | ms <- mss]
get_deps :: ModSummary -> [NodeKey]
get_deps ms = ([ (unLoc m, IsBoot) | m <- ms_home_srcimps ms ] ++
[ (unLoc m, NotBoot) | m <- ms_home_imps ms ])
show_path [] = panic "show_path"
show_path [m] = text "module" <+> ppr_ms m
<+> text "imports itself"
show_path (m1:m2:ms) = vcat ( nest 7 (text "module" <+> ppr_ms m1)
: nest 6 (text "imports" <+> ppr_ms m2)
: go ms )
where
go [] = [text "which imports" <+> ppr_ms m1]
go (m:ms) = (text "which imports" <+> ppr_ms m) : go ms
ppr_ms :: ModSummary -> SDoc
ppr_ms ms = quotes (ppr (moduleName (ms_mod ms))) <+>
(parens (text (msHsFilePath ms)))
|
oldmanmike/ghc
|
compiler/main/GhcMake.hs
|
bsd-3-clause
| 88,323 | 9 | 35 | 27,926 | 14,465 | 7,397 | 7,068 | -1 | -1 |
module Utils.Misc where
import Data.Time
import Data.Char (isSpace)
maybeRead :: Read a => String -> Maybe a
maybeRead xs = case reads xs of
[(y,xs)] | all isSpace xs -> Just y
_ -> Nothing
whenJust :: Monad m => Maybe a -> (a -> m ()) -> m ()
whenJust (Just x) f = f x
whenJust _ _ = return ()
drop_prefix :: String -> String -> Maybe String
drop_prefix [] ys = Just ys
drop_prefix (x:xs) (y:ys) | x == y = drop_prefix xs ys
drop_prefix _ _ = Nothing
parse_time :: String -> Maybe UTCTime
parse_time = parseTimeM True defaultTimeLocale "%Y-%m-%d %H:%M:%S%Q"
show_rfc1123 :: UTCTime -> String
show_rfc1123 = formatTime defaultTimeLocale rfc1123_format
read_rfc1123 :: String -> Maybe UTCTime
read_rfc1123 = parseTimeM True defaultTimeLocale rfc1123_format
rfc1123_format :: String
rfc1123_format = "%a, %d %b %Y %H:%M:%S GMT"
rfc822_named_format_str :: String
rfc822_named_format_str = "%a, %d %b %Y %H:%M:%S %Z"
|
glguy/hpaste
|
src/Utils/Misc.hs
|
bsd-3-clause
| 1,041 | 0 | 11 | 278 | 331 | 166 | 165 | 24 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{- |
Module : Verifier.SAW.Simulator.Prims
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.Simulator.Prims
( Prim(..)
, BasePrims(..)
, constMap
-- * primitive function constructors
, primFun
, strictFun
, constFun
, boolFun
, natFun
, intFun
, intModFun
, tvalFun
, stringFun
, wordFun
, vectorFun
, Pack
, Unpack
-- * primitive computations
, selectV
, expByNatOp
, intToNatOp
, natToIntOp
, vRotateL
, vRotateR
, vShiftL
, vShiftR
, muxValue
, shifter
) where
import Prelude hiding (sequence, mapM)
import GHC.Stack( HasCallStack )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad (liftM, unless, mzero)
import Control.Monad.Fix (MonadFix(mfix))
import Control.Monad.Trans
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Except
import Data.Bits
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import qualified Data.Vector as V
import Numeric.Natural (Natural)
import Verifier.SAW.Term.Functor (Ident, primType, primName)
import Verifier.SAW.Simulator.Value
import Verifier.SAW.Prim
import qualified Verifier.SAW.Prim as Prim
import qualified Verifier.SAW.Utils as Panic (panic)
-- | A utility type for implementing primitive functions.
-- This type allows primtives to more easily define
-- functions that expect certain kinds of arguments,
-- and allows the simulator to respond gracefully if
-- the actual arguments don't match the expected filters.
data Prim l
= PrimFun (Thunk l -> Prim l)
| PrimStrict (Value l -> Prim l)
| forall a. PrimFilterFun Text (Value l -> MaybeT (EvalM l) a) (a -> Prim l)
| PrimExcept (ExceptT Text (EvalM l) (Value l))
| Prim (EvalM l (Value l))
| PrimValue (Value l)
-- | A primitive that takes a nonstrict argument
primFun :: (Thunk l -> Prim l) -> Prim l
primFun = PrimFun
-- | A primitive that takes a strict argument
strictFun :: (Value l -> Prim l) -> Prim l
strictFun = PrimStrict
-- | A primitive that ignores an argument
constFun :: Prim l -> Prim l
constFun p = PrimFun (const p)
-- | A primitive that requires a boolean argument
boolFun :: VMonad l => (VBool l -> Prim l) -> Prim l
boolFun = PrimFilterFun "expected Bool" r
where r (VBool b) = pure b
r _ = mzero
-- | A primitive that requires a concrete natural argument
natFun :: VMonad l => (Natural -> Prim l) -> Prim l
natFun = PrimFilterFun "expected Nat" r
where r (VNat n) = pure n
r (VCtorApp (primName -> "Prelude.Zero") [] []) = pure 0
r (VCtorApp (primName -> "Prelude.Succ") [] [x]) = succ <$> (r =<< lift (force x))
r _ = mzero
-- | A primitive that requires an integer argument
intFun :: VMonad l => (VInt l -> Prim l) -> Prim l
intFun = PrimFilterFun "expected Integer" r
where r (VInt i) = pure i
r _ = mzero
-- | A primitive that requires a (Z n) argument
intModFun :: VMonad l => (VInt l -> Prim l) -> Prim l
intModFun = PrimFilterFun "expected IntMod" r
where r (VIntMod _ i) = pure i
r _ = mzero
-- | A primitive that requires a type argument
tvalFun :: VMonad l => (TValue l -> Prim l) -> Prim l
tvalFun = PrimFilterFun "expected type value" r
where r (TValue tv) = pure tv
r _ = mzero
stringFun :: VMonad l => (Text -> Prim l) -> Prim l
stringFun = PrimFilterFun "expected string value" r
where r (VString x) = pure x
r _ = mzero
-- | A primitive that requires a packed word argument
wordFun :: VMonad l => Pack l -> (VWord l -> Prim l) -> Prim l
wordFun pack = PrimFilterFun "expected word" r
where r (VWord w) = pure w
r (VVector xs) = lift . pack =<< V.mapM (\x -> r' =<< lift (force x)) xs
r _ = mzero
r' (VBool b) = pure b
r' _ = mzero
-- | A primitive that requires a vector argument
vectorFun :: (VMonad l, Show (Extra l)) =>
Unpack l -> (Vector (Thunk l) -> Prim l) -> Prim l
vectorFun unpack = PrimFilterFun "expected vector" r
where r (VVector xs) = pure xs
r (VWord w) = fmap (ready . VBool) <$> lift (unpack w)
r _ = mzero
------------------------------------------------------------
--
-- | A collection of implementations of primitives on base types.
-- These can be used to derive other primitives on higher types.
data BasePrims l =
BasePrims
{ -- | This flag lets us know if we should attempt to build mux trees
-- for vector selects, push @ite@ inside structures, etc.
bpIsSymbolicEvaluator :: Bool
, bpAsBool :: VBool l -> Maybe Bool
-- Bitvectors
, bpUnpack :: VWord l -> EvalM l (Vector (VBool l))
, bpPack :: Vector (VBool l) -> MWord l
, bpBvAt :: VWord l -> Int -> MBool l
, bpBvLit :: Int -> Integer -> MWord l
, bpBvSize :: VWord l -> Int
, bpBvJoin :: VWord l -> VWord l -> MWord l
, bpBvSlice :: Int -> Int -> VWord l -> MWord l
-- Conditionals
, bpMuxBool :: VBool l -> VBool l -> VBool l -> MBool l
, bpMuxWord :: VBool l -> VWord l -> VWord l -> MWord l
, bpMuxInt :: VBool l -> VInt l -> VInt l -> MInt l
, bpMuxArray :: VBool l -> VArray l -> VArray l -> MArray l
, bpMuxExtra :: TValue l -> VBool l -> Extra l -> Extra l -> EvalM l (Extra l)
-- Booleans
, bpTrue :: VBool l
, bpFalse :: VBool l
, bpNot :: VBool l -> MBool l
, bpAnd :: VBool l -> VBool l -> MBool l
, bpOr :: VBool l -> VBool l -> MBool l
, bpXor :: VBool l -> VBool l -> MBool l
, bpBoolEq :: VBool l -> VBool l -> MBool l
-- Bitvector logical
, bpBvNot :: VWord l -> MWord l
, bpBvAnd :: VWord l -> VWord l -> MWord l
, bpBvOr :: VWord l -> VWord l -> MWord l
, bpBvXor :: VWord l -> VWord l -> MWord l
-- Bitvector arithmetic
, bpBvNeg :: VWord l -> MWord l
, bpBvAdd :: VWord l -> VWord l -> MWord l
, bpBvSub :: VWord l -> VWord l -> MWord l
, bpBvMul :: VWord l -> VWord l -> MWord l
, bpBvUDiv :: VWord l -> VWord l -> MWord l
, bpBvURem :: VWord l -> VWord l -> MWord l
, bpBvSDiv :: VWord l -> VWord l -> MWord l
, bpBvSRem :: VWord l -> VWord l -> MWord l
, bpBvLg2 :: VWord l -> MWord l
-- Bitvector comparisons
, bpBvEq :: VWord l -> VWord l -> MBool l
, bpBvsle :: VWord l -> VWord l -> MBool l
, bpBvslt :: VWord l -> VWord l -> MBool l
, bpBvule :: VWord l -> VWord l -> MBool l
, bpBvult :: VWord l -> VWord l -> MBool l
, bpBvsge :: VWord l -> VWord l -> MBool l
, bpBvsgt :: VWord l -> VWord l -> MBool l
, bpBvuge :: VWord l -> VWord l -> MBool l
, bpBvugt :: VWord l -> VWord l -> MBool l
-- Bitvector shift/rotate
, bpBvRolInt :: VWord l -> Integer -> MWord l
, bpBvRorInt :: VWord l -> Integer -> MWord l
, bpBvShlInt :: VBool l -> VWord l -> Integer -> MWord l
, bpBvShrInt :: VBool l -> VWord l -> Integer -> MWord l
, bpBvRol :: VWord l -> VWord l -> MWord l
, bpBvRor :: VWord l -> VWord l -> MWord l
, bpBvShl :: VBool l -> VWord l -> VWord l -> MWord l
, bpBvShr :: VBool l -> VWord l -> VWord l -> MWord l
-- Bitvector misc
, bpBvPopcount :: VWord l -> MWord l
, bpBvCountLeadingZeros :: VWord l -> MWord l
, bpBvCountTrailingZeros :: VWord l -> MWord l
, bpBvForall :: Natural -> (VWord l -> MBool l) -> MBool l
-- Integer operations
, bpIntAdd :: VInt l -> VInt l -> MInt l
, bpIntSub :: VInt l -> VInt l -> MInt l
, bpIntMul :: VInt l -> VInt l -> MInt l
, bpIntDiv :: VInt l -> VInt l -> MInt l
, bpIntMod :: VInt l -> VInt l -> MInt l
, bpIntNeg :: VInt l -> MInt l
, bpIntAbs :: VInt l -> MInt l
, bpIntEq :: VInt l -> VInt l -> MBool l
, bpIntLe :: VInt l -> VInt l -> MBool l
, bpIntLt :: VInt l -> VInt l -> MBool l
, bpIntMin :: VInt l -> VInt l -> MInt l
, bpIntMax :: VInt l -> VInt l -> MInt l
-- Array operations
, bpArrayConstant :: TValue l -> TValue l -> Value l -> MArray l
, bpArrayLookup :: VArray l -> Value l -> MValue l
, bpArrayUpdate :: VArray l -> Value l -> Value l -> MArray l
, bpArrayEq :: VArray l -> VArray l -> MBool l
, bpArrayCopy :: VArray l -> VWord l -> VArray l -> VWord l -> VWord l -> MArray l
, bpArraySet :: VArray l -> VWord l -> Value l -> VWord l -> MArray l
, bpArrayRangeEq :: VArray l -> VWord l -> VArray l -> VWord l -> VWord l -> MBool l
}
bpBool :: VMonad l => BasePrims l -> Bool -> MBool l
bpBool bp True = return (bpTrue bp)
bpBool bp False = return (bpFalse bp)
-- | Given implementations of the base primitives, construct a table
-- containing implementations of all primitives.
constMap ::
forall l.
(VMonadLazy l, MonadFix (EvalM l), Show (Extra l)) =>
BasePrims l ->
Map Ident (Prim l)
constMap bp = Map.fromList
-- Boolean
[ ("Prelude.Bool" , PrimValue (TValue VBoolType))
, ("Prelude.True" , PrimValue (VBool (bpTrue bp)))
, ("Prelude.False" , PrimValue (VBool (bpFalse bp)))
, ("Prelude.not" , boolFun (Prim . liftM VBool . bpNot bp))
, ("Prelude.and" , boolBinOp (bpAnd bp))
, ("Prelude.or" , boolBinOp (bpOr bp))
, ("Prelude.xor" , boolBinOp (bpXor bp))
, ("Prelude.boolEq", boolBinOp (bpBoolEq bp))
-- Bitwise
, ("Prelude.bvAnd" , wordBinOp (bpPack bp) (bpBvAnd bp))
, ("Prelude.bvOr" , wordBinOp (bpPack bp) (bpBvOr bp))
, ("Prelude.bvXor" , wordBinOp (bpPack bp) (bpBvXor bp))
, ("Prelude.bvNot" , wordUnOp (bpPack bp) (bpBvNot bp))
-- Arithmetic
, ("Prelude.bvNeg" , wordUnOp (bpPack bp) (bpBvNeg bp))
, ("Prelude.bvAdd" , wordBinOp (bpPack bp) (bpBvAdd bp))
, ("Prelude.bvSub" , wordBinOp (bpPack bp) (bpBvSub bp))
, ("Prelude.bvMul" , wordBinOp (bpPack bp) (bpBvMul bp))
, ("Prelude.bvUDiv", wordBinOp (bpPack bp) (bpBvUDiv bp))
, ("Prelude.bvURem", wordBinOp (bpPack bp) (bpBvURem bp))
, ("Prelude.bvSDiv", wordBinOp (bpPack bp) (bpBvSDiv bp))
, ("Prelude.bvSRem", wordBinOp (bpPack bp) (bpBvSRem bp))
, ("Prelude.bvLg2" , wordUnOp (bpPack bp) (bpBvLg2 bp))
-- Comparisons
, ("Prelude.bvEq" , wordBinRel (bpPack bp) (bpBvEq bp))
, ("Prelude.bvsle" , wordBinRel (bpPack bp) (bpBvsle bp))
, ("Prelude.bvslt" , wordBinRel (bpPack bp) (bpBvslt bp))
, ("Prelude.bvule" , wordBinRel (bpPack bp) (bpBvule bp))
, ("Prelude.bvult" , wordBinRel (bpPack bp) (bpBvult bp))
, ("Prelude.bvsge" , wordBinRel (bpPack bp) (bpBvsge bp))
, ("Prelude.bvsgt" , wordBinRel (bpPack bp) (bpBvsgt bp))
, ("Prelude.bvuge" , wordBinRel (bpPack bp) (bpBvuge bp))
, ("Prelude.bvugt" , wordBinRel (bpPack bp) (bpBvugt bp))
-- Bitvector misc
, ("Prelude.bvPopcount", wordUnOp (bpPack bp) (bpBvPopcount bp))
, ("Prelude.bvCountLeadingZeros", wordUnOp (bpPack bp) (bpBvCountLeadingZeros bp))
, ("Prelude.bvCountTrailingZeros", wordUnOp (bpPack bp) (bpBvCountTrailingZeros bp))
, ("Prelude.bvForall",
natFun $ \n ->
strictFun $ \f ->
Prim (VBool <$> bpBvForall bp n (toWordPred f))
)
-- Nat
, ("Prelude.Succ", succOp)
, ("Prelude.addNat", addNatOp)
, ("Prelude.subNat", subNatOp bp)
, ("Prelude.mulNat", mulNatOp)
, ("Prelude.minNat", minNatOp)
, ("Prelude.maxNat", maxNatOp)
, ("Prelude.divModNat", divModNatOp)
, ("Prelude.expNat", expNatOp)
, ("Prelude.widthNat", widthNatOp)
, ("Prelude.natCase", natCaseOp)
, ("Prelude.equalNat", equalNatOp bp)
, ("Prelude.ltNat", ltNatOp bp)
-- Integers
, ("Prelude.Integer", PrimValue (TValue VIntType))
, ("Prelude.intAdd", intBinOp (bpIntAdd bp))
, ("Prelude.intSub", intBinOp (bpIntSub bp))
, ("Prelude.intMul", intBinOp (bpIntMul bp))
, ("Prelude.intDiv", intBinOp (bpIntDiv bp))
, ("Prelude.intMod", intBinOp (bpIntMod bp))
, ("Prelude.intNeg", intUnOp (bpIntNeg bp))
, ("Prelude.intAbs", intUnOp (bpIntAbs bp))
, ("Prelude.intEq" , intBinCmp (bpIntEq bp))
, ("Prelude.intLe" , intBinCmp (bpIntLe bp))
, ("Prelude.intLt" , intBinCmp (bpIntLt bp))
, ("Prelude.intMin", intBinOp (bpIntMin bp))
, ("Prelude.intMax", intBinOp (bpIntMax bp))
-- Modular Integers
, ("Prelude.IntMod", natFun $ \n -> PrimValue (TValue (VIntModType n)))
-- Vectors
, ("Prelude.Vec", vecTypeOp)
, ("Prelude.gen", genOp)
, ("Prelude.atWithDefault", atWithDefaultOp bp)
, ("Prelude.upd", updOp bp)
, ("Prelude.take", takeOp bp)
, ("Prelude.drop", dropOp bp)
, ("Prelude.append", appendOp bp)
, ("Prelude.join", joinOp bp)
, ("Prelude.split", splitOp bp)
, ("Prelude.zip", vZipOp (bpUnpack bp))
, ("Prelude.foldr", foldrOp (bpUnpack bp))
, ("Prelude.foldl", foldlOp (bpUnpack bp))
, ("Prelude.scanl", scanlOp (bpUnpack bp))
, ("Prelude.rotateL", rotateLOp bp)
, ("Prelude.rotateR", rotateROp bp)
, ("Prelude.shiftL", shiftLOp bp)
, ("Prelude.shiftR", shiftROp bp)
, ("Prelude.EmptyVec", emptyVec)
-- Miscellaneous
, ("Prelude.coerce", coerceOp)
, ("Prelude.bvNat", bvNatOp bp)
, ("Prelude.bvToNat", bvToNatOp)
, ("Prelude.fix", fixOp)
, ("Prelude.error", errorOp)
-- Strings
, ("Prelude.String", PrimValue (TValue VStringType))
, ("Prelude.equalString", equalStringOp bp)
-- Overloaded
, ("Prelude.ite", iteOp bp)
, ("Prelude.iteDep", iteDepOp bp)
-- SMT Arrays
, ("Prelude.Array", arrayTypeOp)
, ("Prelude.arrayConstant", arrayConstantOp bp)
, ("Prelude.arrayLookup", arrayLookupOp bp)
, ("Prelude.arrayUpdate", arrayUpdateOp bp)
, ("Prelude.arrayEq", arrayEqOp bp)
, ("Prelude.arrayCopy", arrayCopyOp bp)
, ("Prelude.arraySet", arraySetOp bp)
, ("Prelude.arrayRangeEq", arrayRangeEqOp bp)
]
-- | Call this function to indicate that a programming error has
-- occurred, e.g. a datatype invariant has been violated.
panic :: HasCallStack => String -> a
panic msg = Panic.panic "Verifier.SAW.Simulator.Prims" [msg]
------------------------------------------------------------
-- Value accessors and constructors
vNat :: Natural -> Value l
vNat n = VNat n
toBool :: Show (Extra l) => Value l -> VBool l
toBool (VBool b) = b
toBool x = panic $ unwords ["Verifier.SAW.Simulator.toBool", show x]
type Pack l = Vector (VBool l) -> MWord l
type Unpack l = VWord l -> EvalM l (Vector (VBool l))
toWord :: (VMonad l, Show (Extra l)) => Pack l -> Value l -> MWord l
toWord _ (VWord w) = return w
toWord pack (VVector vv) = pack =<< V.mapM (liftM toBool . force) vv
toWord _ x = panic $ unwords ["Verifier.SAW.Simulator.toWord", show x]
toWordPred :: (VMonad l, Show (Extra l)) => Value l -> VWord l -> MBool l
toWordPred (VFun _ f) = fmap toBool . f . ready . VWord
toWordPred x = panic $ unwords ["Verifier.SAW.Simulator.toWordPred", show x]
toBits :: (VMonad l, Show (Extra l)) => Unpack l -> Value l ->
EvalM l (Vector (VBool l))
toBits unpack (VWord w) = unpack w
toBits _ (VVector v) = V.mapM (liftM toBool . force) v
toBits _ x = panic $ unwords ["Verifier.SAW.Simulator.toBits", show x]
toVector :: (VMonad l, Show (Extra l)) => Unpack l
-> Value l -> ExceptT Text (EvalM l) (Vector (Thunk l))
toVector _ (VVector v) = return v
toVector unpack (VWord w) = lift (liftM (fmap (ready . VBool)) (unpack w))
toVector _ x = throwE $ "Verifier.SAW.Simulator.toVector " <> Text.pack (show x)
vecIdx :: a -> Vector a -> Int -> a
vecIdx err v n =
case (V.!?) v n of
Just a -> a
Nothing -> err
toArray :: (VMonad l, Show (Extra l)) => Value l -> MArray l
toArray (VArray f) = return f
toArray x = panic $ unwords ["Verifier.SAW.Simulator.toArray", show x]
------------------------------------------------------------
-- Standard operator types
-- op :: Bool -> Bool -> Bool;
boolBinOp ::
(VMonad l, Show (Extra l)) =>
(VBool l -> VBool l -> MBool l) -> Prim l
boolBinOp op =
boolFun $ \x ->
boolFun $ \y ->
Prim (VBool <$> op x y)
-- op : (n : Nat) -> Vec n Bool -> Vec n Bool;
wordUnOp ::
(VMonad l, Show (Extra l)) =>
Pack l -> (VWord l -> MWord l) -> Prim l
wordUnOp pack op =
constFun $
wordFun pack $ \x ->
Prim (VWord <$> op x)
-- op : (n : Nat) -> Vec n Bool -> Vec n Bool -> Vec n Bool;
wordBinOp ::
(VMonad l, Show (Extra l)) =>
Pack l -> (VWord l -> VWord l -> MWord l) -> Prim l
wordBinOp pack op =
constFun $
wordFun pack $ \x ->
wordFun pack $ \y ->
Prim (VWord <$> op x y)
-- op : (n : Nat) -> Vec n Bool -> Vec n Bool -> Bool;
wordBinRel ::
(VMonad l, Show (Extra l)) =>
Pack l -> (VWord l -> VWord l -> MBool l) -> Prim l
wordBinRel pack op =
constFun $
wordFun pack $ \x ->
wordFun pack $ \y ->
Prim (VBool <$> op x y)
------------------------------------------------------------
-- Utility functions
-- @selectV mux maxValue valueFn v@ treats the vector @v@ as an
-- index, represented as a big-endian list of bits. It does a binary
-- lookup, using @mux@ as an if-then-else operator. If the index is
-- greater than @maxValue@, then it returns @valueFn maxValue@.
selectV :: (b -> a -> a -> a) -> Int -> (Int -> a) -> Vector b -> a
selectV mux maxValue valueFn v = impl len 0
where
len = V.length v
err = panic "selectV: impossible"
impl _ x | x > maxValue || x < 0 = valueFn maxValue
impl 0 x = valueFn x
impl i x = mux (vecIdx err v (len - i)) (impl j (x `setBit` j)) (impl j x) where j = i - 1
------------------------------------------------------------
-- Values for common primitives
-- bvNat : (n : Nat) -> Nat -> Vec n Bool;
bvNatOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
bvNatOp bp =
natFun $ \w ->
natFun $ \x ->
Prim (VWord <$> bpBvLit bp (fromIntegral w) (toInteger x)) -- FIXME check for overflow on w
-- bvToNat : (n : Nat) -> Vec n Bool -> Nat;
bvToNatOp :: VMonad l => Prim l
bvToNatOp =
natFun $ \n ->
primFun $ \x ->
Prim (liftM (VBVToNat (fromIntegral n)) (force x)) -- TODO, bad fromIntegral
-- coerce :: (a b :: sort 0) -> Eq (sort 0) a b -> a -> b;
coerceOp :: VMonad l => Prim l
coerceOp =
constFun $
constFun $
constFun $
primFun (\x -> Prim (force x))
------------------------------------------------------------
-- Nat primitives
-- | Return the number of bits necessary to represent the given value,
-- which should be a value of type Nat.
natSize :: HasCallStack => BasePrims l -> Value l -> Natural
natSize _bp val =
case val of
VNat n -> widthNat n
VBVToNat n _ -> fromIntegral n -- TODO, remove this fromIntegral
VIntToNat _ -> panic "natSize: symbolic integer (TODO)"
_ -> panic "natSize: expected Nat"
-- | Convert the given value (which should be of type Nat) to a word
-- of the given bit-width. The bit-width must be at least as large as
-- that returned by @natSize@.
natToWord :: (VMonad l, Show (Extra l)) => BasePrims l -> Int -> Value l -> MWord l
natToWord bp w val =
case val of
VNat n -> bpBvLit bp w (toInteger n)
VIntToNat _i -> panic "natToWord of VIntToNat TODO!"
VBVToNat xsize v ->
do x <- toWord (bpPack bp) v
case compare xsize w of
GT -> panic "natToWord: not enough bits"
EQ -> return x
LT -> -- zero-extend x to width w
do pad <- bpBvLit bp (w - xsize) 0
bpBvJoin bp pad x
_ -> panic "natToWord: expected Nat"
-- Succ :: Nat -> Nat;
succOp :: VMonad l => Prim l
succOp =
natFun $ \n -> PrimValue (vNat (succ n))
-- addNat :: Nat -> Nat -> Nat;
addNatOp :: VMonad l => Prim l
addNatOp =
natFun $ \m ->
natFun $ \n ->
PrimValue (vNat (m + n))
-- subNat :: Nat -> Nat -> Nat;
subNatOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
subNatOp bp =
strictFun $ \x ->
strictFun $ \y -> Prim (g x y)
where
g (VNat i) (VNat j) = return $ VNat (if i < j then 0 else i - j)
g v1 v2 =
do let w = toInteger (max (natSize bp v1) (natSize bp v2))
unless (w <= toInteger (maxBound :: Int))
(panic "subNatOp" ["width too large", show w])
x1 <- natToWord bp (fromInteger w) v1
x2 <- natToWord bp (fromInteger w) v2
lt <- bpBvult bp x1 x2
z <- bpBvLit bp (fromInteger w) 0
d <- bpBvSub bp x1 x2
VBVToNat (fromInteger w) . VWord <$> bpMuxWord bp lt z d -- TODO, boo fromInteger
-- mulNat :: Nat -> Nat -> Nat;
mulNatOp :: VMonad l => Prim l
mulNatOp =
natFun $ \m ->
natFun $ \n ->
PrimValue (vNat (m * n))
-- minNat :: Nat -> Nat -> Nat;
minNatOp :: VMonad l => Prim l
minNatOp =
natFun $ \m ->
natFun $ \n ->
PrimValue (vNat (min m n))
-- maxNat :: Nat -> Nat -> Nat;
maxNatOp :: VMonad l => Prim l
maxNatOp =
natFun $ \m ->
natFun $ \n ->
PrimValue (vNat (max m n))
-- divModNat :: Nat -> Nat -> #(Nat, Nat);
divModNatOp :: VMonad l => Prim l
divModNatOp =
natFun $ \m ->
natFun $ \n -> PrimValue $
let (q,r) = divMod m n in
vTuple [ready $ vNat q, ready $ vNat r]
-- expNat :: Nat -> Nat -> Nat;
expNatOp :: VMonad l => Prim l
expNatOp =
natFun $ \m ->
natFun $ \n ->
PrimValue (vNat (m ^ n))
-- widthNat :: Nat -> Nat;
widthNatOp :: VMonad l => Prim l
widthNatOp =
natFun $ \n ->
PrimValue (vNat (widthNat n))
-- equalNat :: Nat -> Nat -> Bool;
equalNatOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
equalNatOp bp =
strictFun $ \x ->
strictFun $ \y -> Prim (g x y)
where
g (VNat i) (VNat j) = VBool <$> bpBool bp (i == j)
g v1 v2 =
do let w = toInteger (max (natSize bp v1) (natSize bp v2))
unless (w <= toInteger (maxBound :: Int))
(panic "equalNatOp" ["width too large", show w])
x1 <- natToWord bp (fromInteger w) v1
x2 <- natToWord bp (fromInteger w) v2
VBool <$> bpBvEq bp x1 x2
-- ltNat :: Nat -> Nat -> Bool;
ltNatOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
ltNatOp bp =
strictFun $ \x ->
strictFun $ \y -> Prim (g x y)
where
g (VNat i) (VNat j) = VBool <$> bpBool bp (i < j)
g v1 v2 =
do let w = toInteger (max (natSize bp v1) (natSize bp v2))
unless (w <= toInteger (maxBound :: Int))
(panic "ltNatOp" ["width too large", show w])
x1 <- natToWord bp (fromInteger w) v1
x2 <- natToWord bp (fromInteger w) v2
VBool <$> bpBvult bp x1 x2
-- natCase :: (p :: Nat -> sort 0) -> p Zero -> ((n :: Nat) -> p (Succ n)) -> (n :: Nat) -> p n;
natCaseOp :: (VMonad l, Show (Extra l)) => Prim l
natCaseOp =
constFun $
primFun $ \z ->
primFun $ \s ->
natFun $ \n -> Prim $
if n == 0
then force z
else do s' <- force s
apply s' (ready (VNat (n - 1)))
--------------------------------------------------------------------------------
-- Strings
equalStringOp :: VMonad l => BasePrims l -> Prim l
equalStringOp bp =
stringFun $ \x ->
stringFun $ \y ->
Prim (VBool <$> bpBool bp (x == y))
--------------------------------------------------------------------------------
-- Vec :: (n :: Nat) -> (a :: sort 0) -> sort 0;
vecTypeOp :: VMonad l => Prim l
vecTypeOp =
natFun $ \n ->
tvalFun $ \a ->
PrimValue (TValue (VVecType n a))
-- gen :: (n :: Nat) -> (a :: sort 0) -> (Nat -> a) -> Vec n a;
genOp :: (VMonadLazy l, Show (Extra l)) => Prim l
genOp =
natFun $ \n ->
constFun $
strictFun $ \f -> Prim $
do let g i = delay $ apply f (ready (VNat (fromIntegral i)))
if toInteger n > toInteger (maxBound :: Int) then
panic ("Verifier.SAW.Simulator.gen: vector size too large: " ++ show n)
else liftM VVector $ V.generateM (fromIntegral n) g
-- atWithDefault :: (n :: Nat) -> (a :: sort 0) -> a -> Vec n a -> Nat -> a;
atWithDefaultOp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
atWithDefaultOp bp =
natFun $ \n ->
tvalFun $ \tp ->
primFun $ \d ->
strictFun $ \x ->
strictFun $ \idx ->
PrimExcept $
case idx of
VNat i ->
case x of
VVector xv -> lift (force (vecIdx d xv (fromIntegral i))) -- FIXME dangerous fromIntegral
VWord xw -> lift (VBool <$> bpBvAt bp xw (fromIntegral i)) -- FIXME dangerous fromIntegral
_ -> throwE "atOp: expected vector"
VBVToNat _sz i | bpIsSymbolicEvaluator bp -> do
iv <- lift (toBits (bpUnpack bp) i)
case x of
VVector xv ->
lift $ selectV (lazyMuxValue bp tp) (fromIntegral n - 1) (force . vecIdx d xv) iv -- FIXME dangerous fromIntegral
VWord xw ->
lift $ selectV (lazyMuxValue bp tp) (fromIntegral n - 1) (liftM VBool . bpBvAt bp xw) iv -- FIXME dangerous fromIntegral
_ -> throwE "atOp: expected vector"
VIntToNat _i | bpIsSymbolicEvaluator bp -> panic "atWithDefault: symbolic integer TODO"
_ -> throwE $ "atOp: expected Nat, got " <> Text.pack (show idx)
-- upd :: (n :: Nat) -> (a :: sort 0) -> Vec n a -> Nat -> a -> Vec n a;
updOp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
updOp bp =
natFun $ \n ->
tvalFun $ \tp ->
vectorFun (bpUnpack bp) $ \xv ->
strictFun $ \idx ->
primFun $ \y ->
PrimExcept $
case idx of
VNat i
| toInteger i < toInteger (V.length xv)
-> return (VVector (xv V.// [(fromIntegral i, y)]))
| otherwise -> return (VVector xv)
VBVToNat wsize (VWord w) | bpIsSymbolicEvaluator bp -> lift $
do let f i = do b <- bpBvEq bp w =<< bpBvLit bp wsize (toInteger i)
if wsize < 64 && toInteger i >= 2 ^ wsize
then return (xv V.! i)
else delay (lazyMuxValue bp tp b (force y) (force (xv V.! i)))
yv <- V.generateM (V.length xv) f
return (VVector yv)
VBVToNat _sz (VVector iv) | bpIsSymbolicEvaluator bp -> lift $
do let update i = return (VVector (xv V.// [(i, y)]))
iv' <- V.mapM (liftM toBool . force) iv
selectV (lazyMuxValue bp (VVecType n tp)) (fromIntegral n - 1) update iv' -- FIXME dangerous fromIntegral
VIntToNat _ | bpIsSymbolicEvaluator bp -> panic "updOp: symbolic integer TODO"
_ -> throwE $ "updOp: expected Nat, got " <> Text.pack (show idx)
-- primitive EmptyVec :: (a :: sort 0) -> Vec 0 a;
emptyVec :: VMonad l => Prim l
emptyVec = constFun (PrimValue (VVector V.empty))
-- take :: (a :: sort 0) -> (m n :: Nat) -> Vec (addNat m n) a -> Vec m a;
takeOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
takeOp bp =
constFun $
natFun $ \(fromIntegral -> m) -> -- FIXME dangerous fromIntegral
constFun $
strictFun $ \v ->
PrimExcept $
case v of
VVector vv -> return (VVector (V.take m vv))
VWord vw -> lift (VWord <$> bpBvSlice bp 0 m vw)
_ -> throwE $ "takeOp: " <> Text.pack (show v)
-- drop :: (a :: sort 0) -> (m n :: Nat) -> Vec (addNat m n) a -> Vec n a;
dropOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
dropOp bp =
constFun $
natFun $ \(fromIntegral -> m) -> -- FIXME dangerous fromIntegral
constFun $
strictFun $ \v ->
PrimExcept $
case v of
VVector vv -> return (VVector (V.drop m vv))
VWord vw -> lift (VWord <$> bpBvSlice bp m (bpBvSize bp vw - m) vw)
_ -> throwE $ "dropOp: " <> Text.pack (show v)
-- append :: (m n :: Nat) -> (a :: sort 0) -> Vec m a -> Vec n a -> Vec (addNat m n) a;
appendOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
appendOp bp =
constFun $
constFun $
constFun $
strictFun $ \xs ->
strictFun $ \ys ->
PrimExcept (appV bp xs ys)
appV :: (VMonad l, Show (Extra l)) => BasePrims l -> Value l -> Value l -> ExceptT Text (EvalM l) (Value l)
appV bp xs ys =
case (xs, ys) of
(VVector xv, _) | V.null xv -> return ys
(_, VVector yv) | V.null yv -> return xs
(VWord xw, VWord yw) -> lift (VWord <$> bpBvJoin bp xw yw)
(VVector xv, VVector yv) -> return $ VVector ((V.++) xv yv)
(VVector xv, VWord yw) -> lift (liftM (\yv -> VVector ((V.++) xv (fmap (ready . VBool) yv))) (bpUnpack bp yw))
(VWord xw, VVector yv) -> lift (liftM (\xv -> VVector ((V.++) (fmap (ready . VBool) xv) yv)) (bpUnpack bp xw))
_ -> throwE $ "Verifier.SAW.Simulator.Prims.appendOp: " <> Text.pack (show xs) <> ", " <> Text.pack (show ys)
-- join :: (m n :: Nat) -> (a :: sort 0) -> Vec m (Vec n a) -> Vec (mulNat m n) a;
joinOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
joinOp bp =
constFun $
constFun $
constFun $
strictFun $ \x ->
PrimExcept $
case x of
VVector xv -> do
vv <- lift (V.mapM force xv)
V.foldM (appV bp) (VVector V.empty) vv
_ -> throwE "Verifier.SAW.Simulator.Prims.joinOp"
-- split :: (m n :: Nat) -> (a :: sort 0) -> Vec (mulNat m n) a -> Vec m (Vec n a);
splitOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
splitOp bp =
natFun $ \(fromIntegral -> m) -> -- FIXME dangerous fromIntegral
natFun $ \(fromIntegral -> n) -> -- FIXME dangerous fromIntegral
constFun $
strictFun $ \x ->
PrimExcept $
case x of
VVector xv ->
let f i = ready (VVector (V.slice (i*n) n xv))
in return (VVector (V.generate m f))
VWord xw ->
let f i = (ready . VWord) <$> bpBvSlice bp (i*n) n xw
in lift (VVector <$> V.generateM m f)
_ -> throwE "Verifier.SAW.Simulator.SBV.splitOp"
-- vZip :: (a b :: sort 0) -> (m n :: Nat) -> Vec m a -> Vec n b -> Vec (minNat m n) #(a, b);
vZipOp :: (VMonadLazy l, Show (Extra l)) => Unpack l -> Prim l
vZipOp unpack =
constFun $
constFun $
constFun $
constFun $
strictFun $ \x ->
strictFun $ \y ->
PrimExcept $
do xv <- toVector unpack x
yv <- toVector unpack y
let pair a b = ready (vTuple [a, b])
return (VVector (V.zipWith pair xv yv))
--------------------------------------------------------------------------
-- Generic square-and-multiply
-- primitive expByNat : (a:sort 0) -> a -> (a -> a -> a) -> a -> Nat -> a;
expByNatOp :: (MonadLazy (EvalM l), VMonad l, Show (Extra l)) => BasePrims l -> Prim l
expByNatOp bp =
tvalFun $ \tp ->
strictFun $ \one ->
strictFun $ \mul ->
strictFun $ \x ->
strictFun $ \e ->
PrimExcept $
case e of
VBVToNat _sz w | bpIsSymbolicEvaluator bp -> lift $
do let loop acc [] = return acc
loop acc (b:bs)
| Just False <- bpAsBool bp b
= do sq <- applyAll mul [ ready acc, ready acc ]
loop sq bs
| Just True <- bpAsBool bp b
= do sq <- applyAll mul [ ready acc, ready acc ]
sq_x <- applyAll mul [ ready sq, ready x ]
loop sq_x bs
| otherwise
= do sq <- applyAll mul [ ready acc, ready acc ]
sq_x <- applyAll mul [ ready sq, ready x ]
acc' <- muxValue bp tp b sq_x sq
loop acc' bs
loop one . V.toList =<< toBits (bpUnpack bp) w
-- This can't really be implemented, we should throw an unsupported exception
-- of some kind instead
VIntToNat _ | bpIsSymbolicEvaluator bp -> panic "expByNat: symbolic integer"
VNat n ->
do let loop acc [] = return acc
loop acc (False:bs) =
do sq <- applyAll mul [ ready acc, ready acc ]
loop sq bs
loop acc (True:bs) =
do sq <- applyAll mul [ ready acc, ready acc ]
sq_x <- applyAll mul [ ready sq, ready x ]
loop sq_x bs
w = toInteger (widthNat n)
if w > toInteger (maxBound :: Int) then
panic "expByNatOp" ["Exponent too large", show n]
else
lift (loop one [ testBit n (fromInteger i) | i <- reverse [ 0 .. w-1 ]])
v -> throwE $ "expByNatOp: Expected Nat value " <> Text.pack (show v)
------------------------------------------------------------
-- Shifts and Rotates
-- | Barrel-shifter algorithm. Takes a list of bits in big-endian order.
-- TODO use Natural instead of Integer
shifter :: Monad m => (b -> a -> a -> m a) -> (a -> Integer -> m a) -> a -> [b] -> m a
shifter mux op = go
where
go x [] = return x
go x (b : bs) = do
x' <- op x (2 ^ length bs)
y <- mux b x' x
go y bs
-- shift{L,R} :: (n :: Nat) -> (a :: sort 0) -> a -> Vec n a -> Nat -> Vec n a;
shiftOp :: forall l.
(HasCallStack, VMonadLazy l, Show (Extra l)) =>
BasePrims l ->
-- TODO use Natural instead of Integer
(Thunk l -> Vector (Thunk l) -> Integer -> Vector (Thunk l)) ->
(VBool l -> VWord l -> Integer -> MWord l) ->
(VBool l -> VWord l -> VWord l -> MWord l) ->
Prim l
shiftOp bp vecOp wordIntOp wordOp =
natFun $ \n ->
tvalFun $ \tp ->
primFun $ \z ->
strictFun $ \xs ->
strictFun $ \y ->
PrimExcept $
case y of
VNat i ->
case xs of
VVector xv -> return $ VVector (vecOp z xv (toInteger i))
VWord xw -> lift $ do
zb <- toBool <$> force z
VWord <$> wordIntOp zb xw (toInteger (min i n))
_ -> throwE $ "shiftOp: " <> Text.pack (show xs)
VBVToNat _sz (VVector iv) | bpIsSymbolicEvaluator bp -> do
bs <- lift (V.toList <$> traverse (fmap toBool . force) iv)
case xs of
VVector xv -> VVector <$> shifter (muxVector n tp) (\v i -> return (vecOp z v i)) xv bs
VWord xw -> lift $ do
zb <- toBool <$> force z
VWord <$> shifter (bpMuxWord bp) (wordIntOp zb) xw bs
_ -> throwE $ "shiftOp: " <> Text.pack (show xs)
VBVToNat _sz (VWord iw) | bpIsSymbolicEvaluator bp ->
case xs of
VVector xv -> do
bs <- lift (V.toList <$> bpUnpack bp iw)
VVector <$> shifter (muxVector n tp) (\v i -> return (vecOp z v i)) xv bs
VWord xw -> lift $ do
zb <- toBool <$> force z
VWord <$> wordOp zb xw iw
_ -> throwE $ "shiftOp: " <> Text.pack (show xs)
VIntToNat _i | bpIsSymbolicEvaluator bp -> panic "shiftOp: symbolic integer TODO"
_ -> throwE $ "shiftOp: " <> Text.pack (show y)
where
muxVector :: Natural -> TValue l -> VBool l ->
Vector (Thunk l) -> Vector (Thunk l) -> ExceptT Text (EvalM l) (Vector (Thunk l))
muxVector n tp b v1 v2 = toVector (bpUnpack bp) =<< muxVal (VVecType n tp) b (VVector v1) (VVector v2)
muxVal :: TValue l -> VBool l -> Value l -> Value l -> ExceptT Text (EvalM l) (Value l)
muxVal tv p x y = lift (muxValue bp tv p x y)
-- rotate{L,R} :: (n :: Nat) -> (a :: sort 0) -> Vec n a -> Nat -> Vec n a;
rotateOp :: forall l.
(HasCallStack, VMonadLazy l, Show (Extra l)) =>
BasePrims l ->
-- TODO use Natural instead of Integer?
(Vector (Thunk l) -> Integer -> Vector (Thunk l)) ->
(VWord l -> Integer -> MWord l) ->
(VWord l -> VWord l -> MWord l) ->
Prim l
rotateOp bp vecOp wordIntOp wordOp =
natFun $ \n ->
tvalFun $ \tp ->
strictFun $ \xs ->
strictFun $ \y ->
PrimExcept $
case y of
VNat i ->
case xs of
VVector xv -> return $ VVector (vecOp xv (toInteger i))
VWord xw -> lift (VWord <$> wordIntOp xw (toInteger i))
_ -> throwE $ "rotateOp: " <> Text.pack (show xs)
VBVToNat _sz (VVector iv) | bpIsSymbolicEvaluator bp -> do
bs <- lift (V.toList <$> traverse (fmap toBool . force) iv)
case xs of
VVector xv -> VVector <$> shifter (muxVector n tp) (\v i -> return (vecOp v i)) xv bs
VWord xw -> lift (VWord <$> shifter (bpMuxWord bp) wordIntOp xw bs)
_ -> throwE $ "rotateOp: " <> Text.pack (show xs)
VBVToNat _sz (VWord iw) | bpIsSymbolicEvaluator bp ->
case xs of
VVector xv -> do
bs <- lift (V.toList <$> bpUnpack bp iw)
VVector <$> shifter (muxVector n tp) (\v i -> return (vecOp v i)) xv bs
VWord xw -> lift (VWord <$> wordOp xw iw)
_ -> throwE $ "rotateOp: " <> Text.pack (show xs)
VIntToNat _i | bpIsSymbolicEvaluator bp -> panic "rotateOp: symbolic integer TODO"
_ -> throwE $ "rotateOp: " <> Text.pack (show y)
where
muxVector :: HasCallStack => Natural -> TValue l -> VBool l ->
Vector (Thunk l) -> Vector (Thunk l) -> ExceptT Text (EvalM l) (Vector (Thunk l))
muxVector n tp b v1 v2 = toVector (bpUnpack bp) =<< lift (muxValue bp (VVecType n tp) b (VVector v1) (VVector v2))
vRotateL :: Vector a -> Integer -> Vector a
vRotateL xs i
| V.null xs = xs
| otherwise = (V.++) (V.drop j xs) (V.take j xs)
where j = fromInteger (i `mod` toInteger (V.length xs))
vRotateR :: Vector a -> Integer -> Vector a
vRotateR xs i = vRotateL xs (- i)
vShiftL :: a -> Vector a -> Integer -> Vector a
vShiftL x xs i = (V.++) (V.drop j xs) (V.replicate j x)
where j = fromInteger (i `min` toInteger (V.length xs))
vShiftR :: a -> Vector a -> Integer -> Vector a
vShiftR x xs i = (V.++) (V.replicate j x) (V.take (V.length xs - j) xs)
where j = fromInteger (i `min` toInteger (V.length xs))
rotateLOp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
rotateLOp bp = rotateOp bp vRotateL (bpBvRolInt bp) (bpBvRol bp)
rotateROp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
rotateROp bp = rotateOp bp vRotateR (bpBvRorInt bp) (bpBvRor bp)
shiftLOp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
shiftLOp bp = shiftOp bp vShiftL (bpBvShlInt bp) (bpBvShl bp)
shiftROp :: (VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
shiftROp bp = shiftOp bp vShiftR (bpBvShrInt bp) (bpBvShr bp)
-- foldr :: (a b :: sort 0) -> (n :: Nat) -> (a -> b -> b) -> b -> Vec n a -> b;
foldrOp :: (VMonadLazy l, Show (Extra l)) => Unpack l -> Prim l
foldrOp unpack =
constFun $
constFun $
constFun $
strictFun $ \f ->
primFun $ \z ->
strictFun $ \xs ->
PrimExcept $ do
let g x m = do fx <- apply f x
y <- delay m
apply fx y
xv <- toVector unpack xs
lift (V.foldr g (force z) xv)
-- foldl : (a b : sort 0) -> (n : Nat) -> (b -> a -> b) -> b -> Vec n a -> b;
foldlOp :: (VMonadLazy l, Show (Extra l)) => Unpack l -> Prim l
foldlOp unpack =
constFun $
constFun $
constFun $
strictFun $ \f ->
primFun $ \z ->
strictFun $ \xs ->
PrimExcept $ do
let g m x = do f1 <- apply f =<< delay m
apply f1 x
xv <- toVector unpack xs
lift (V.foldl g (force z) xv)
-- scanl : (a b : sort 0) -> (n : Nat) -> (b -> a -> b) -> b -> Vec n a -> Vec (addNat n 1) b;
scanlOp :: forall l. (VMonadLazy l, Show (Extra l)) => Unpack l -> Prim l
scanlOp unpack =
constFun $ -- a
constFun $ -- b
constFun $ -- n
strictFun $ \f ->
primFun $ \z ->
strictFun $ \xs ->
PrimExcept $ do
let g :: Vector (Thunk l) ->
Thunk l ->
EvalM l (Vector (Thunk l))
g bs v = do b <- delay (applyAll f [V.last bs, v])
return (V.snoc bs b)
xv <- toVector unpack xs
lift (VVector <$> V.foldM g (V.singleton z) xv)
-- op :: Integer -> Integer;
intUnOp :: VMonad l => (VInt l -> MInt l) -> Prim l
intUnOp f =
intFun $ \x ->
Prim (VInt <$> f x)
-- op :: Integer -> Integer -> Integer;
intBinOp :: VMonad l => (VInt l -> VInt l -> MInt l) -> Prim l
intBinOp f =
intFun $ \x ->
intFun $ \y ->
Prim (VInt <$> f x y)
-- op :: Integer -> Integer -> Bool;
intBinCmp :: VMonad l => (VInt l -> VInt l -> MBool l) -> Prim l
intBinCmp f =
intFun $ \x ->
intFun $ \y ->
Prim (VBool <$> f x y)
-- primitive intToNat :: Integer -> Nat;
intToNatOp :: (VMonad l, VInt l ~ Integer) => Prim l
intToNatOp =
intFun $ \x -> PrimValue $!
if x >= 0 then VNat (fromInteger x) else VNat 0
-- primitive natToInt :: Nat -> Integer;
natToIntOp :: (VMonad l, VInt l ~ Integer) => Prim l
natToIntOp = natFun $ \x -> PrimValue $ VInt (toInteger x)
-- primitive error :: (a :: sort 0) -> String -> a;
errorOp :: VMonad l => Prim l
errorOp =
constFun $
stringFun $ \msg ->
Prim $ Prim.userError (Text.unpack msg)
------------------------------------------------------------
-- Conditionals
iteDepOp :: (HasCallStack, VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
iteDepOp bp =
primFun $ \_p ->
boolFun $ \b ->
primFun $ \x ->
primFun $ \y ->
PrimExcept $
case bpAsBool bp b of
Just True -> lift (force x)
Just False -> lift (force y)
Nothing -> throwE "unsupported symbolic operation: iteDep"
iteOp :: (HasCallStack, VMonadLazy l, Show (Extra l)) => BasePrims l -> Prim l
iteOp bp =
tvalFun $ \tp ->
boolFun $ \b ->
primFun $ \x ->
primFun $ \y ->
PrimExcept $
case bpAsBool bp b of
Just True -> lift (force x)
Just False -> lift (force y)
Nothing
| bpIsSymbolicEvaluator bp -> lift (lazyMuxValue bp tp b (force x) (force y))
| otherwise -> throwE "iteOp"
lazyMuxValue ::
(HasCallStack, VMonadLazy l, Show (Extra l)) =>
BasePrims l ->
TValue l ->
VBool l ->
EvalM l (Value l) ->
EvalM l (Value l) ->
EvalM l (Value l)
lazyMuxValue bp tp b x y =
case bpAsBool bp b of
Just True -> x
Just False -> y
Nothing ->
do x' <- x
y' <- y
muxValue bp tp b x' y'
muxValue :: forall l.
(HasCallStack, VMonadLazy l, Show (Extra l)) =>
BasePrims l ->
TValue l ->
VBool l -> Value l -> Value l -> EvalM l (Value l)
muxValue bp tp0 b = value tp0
where
value :: TValue l -> Value l -> Value l -> EvalM l (Value l)
value _ (VNat m) (VNat n) | m == n = return $ VNat m
value _ (VString x) (VString y) | x == y = return $ VString x
value (VPiType _ _tp body) (VFun nm f) (VFun _ g) =
return $ VFun nm $ \a ->
do tp' <- applyPiBody body a
x <- f a
y <- g a
value tp' x y
value VUnitType VUnit VUnit = return VUnit
value (VPairType t1 t2) (VPair x1 x2) (VPair y1 y2) =
VPair <$> thunk t1 x1 y1 <*> thunk t2 x2 y2
value (VRecordType fs) (VRecordValue elems1) (VRecordValue elems2) =
do let em1 = Map.fromList elems1
let em2 = Map.fromList elems2
let build (f,tp) = case (Map.lookup f em1, Map.lookup f em2) of
(Just v1, Just v2) ->
do v <- thunk tp v1 v2
pure (f,v)
_ -> panic "muxValue" ["Record field missing!", show f]
VRecordValue <$> traverse build fs
value (VDataType _nm _ps _ixs) (VCtorApp i ps xv) (VCtorApp j _ yv)
| i == j = VCtorApp i ps <$> ctorArgs (primType i) ps xv yv
| otherwise = unsupportedPrimitive "muxValue"
("cannot mux different data constructors " <> show i <> " " <> show j)
value (VVecType _ tp) (VVector xv) (VVector yv) =
VVector <$> thunks tp xv yv
value tp (VExtra x) (VExtra y) =
VExtra <$> bpMuxExtra bp tp b x y
value _ (VBool x) (VBool y) = VBool <$> bpMuxBool bp b x y
value _ (VWord x) (VWord y) = VWord <$> bpMuxWord bp b x y
value _ (VInt x) (VInt y) = VInt <$> bpMuxInt bp b x y
value _ (VArray x) (VArray y) = VArray <$> bpMuxArray bp b x y
value _ (VIntMod n x) (VIntMod _ y) = VIntMod n <$> bpMuxInt bp b x y
value tp x@(VWord _) y = do xv <- toVector' x
value tp (VVector xv) y
value tp x y@(VWord _) = do yv <- toVector' y
value tp x (VVector yv)
value _ x@(VNat _) y = nat x y
value _ x@(VBVToNat _ _) y = nat x y
value _ x@(VIntToNat _) y = nat x y
value _ (TValue x) (TValue y) = TValue <$> tvalue x y
value tp x y =
panic $ "Verifier.SAW.Simulator.Prims.iteOp: malformed arguments: " <>
show x <> " " <> show y <> " " <> show tp
ctorArgs :: TValue l -> [Thunk l] -> [Thunk l] -> [Thunk l] -> EvalM l [Thunk l]
-- consume the data type parameters and compute the type of the constructor
ctorArgs (VPiType _nm _t1 body) (p:ps) xs ys =
do t' <- applyPiBody body p
ctorArgs t' ps xs ys
-- mux the arguments one at a time, as long as the constructor type is not
-- a dependent function
ctorArgs (VPiType _nm t1 (VNondependentPi t2)) [] (x:xs) (y:ys)=
do z <- thunk t1 x y
zs <- ctorArgs t2 [] xs ys
pure (z:zs)
ctorArgs _ [] [] [] = pure []
ctorArgs (VPiType _nm _t1 (VDependentPi _)) [] _ _ =
unsupportedPrimitive "muxValue" "cannot mux constructors with dependent types"
ctorArgs _ _ _ _ =
panic $ "Verifier.SAW.Simulator.Prims.iteOp: constructor arguments mismtch"
tvalue :: TValue l -> TValue l -> EvalM l (TValue l)
tvalue (VSort x) (VSort y) | x == y = return $ VSort y
tvalue x y =
panic $ "Verifier.SAW.Simulator.Prims.iteOp: malformed arguments: "
++ show x ++ " " ++ show y
toVector' :: Value l -> EvalM l (Vector (Thunk l))
toVector' v =
let err msg = unsupportedPrimitive "muxValue: expected vector" (Text.unpack msg)
in runExceptT (toVector (bpUnpack bp) v) >>= either err pure
thunks :: TValue l -> Vector (Thunk l) -> Vector (Thunk l) -> EvalM l (Vector (Thunk l))
thunks tp xv yv
| V.length xv == V.length yv = V.zipWithM (thunk tp) xv yv
| otherwise = panic "Verifier.SAW.Simulator.Prims.iteOp: malformed arguments"
thunk :: TValue l -> Thunk l -> Thunk l -> EvalM l (Thunk l)
thunk tp x y = delay $
do x' <- force x
y' <- force y
value tp x' y'
nat :: Value l -> Value l -> MValue l
nat v1 v2 =
do let w = toInteger (max (natSize bp v1) (natSize bp v2))
unless (w <= toInteger (maxBound :: Int))
(panic "muxValue" ["width too large", show w])
x1 <- natToWord bp (fromInteger w) v1
x2 <- natToWord bp (fromInteger w) v2
VBVToNat (fromInteger w) . VWord <$> bpMuxWord bp b x1 x2
-- fix :: (a :: sort 0) -> (a -> a) -> a;
fixOp :: (VMonadLazy l, MonadFix (EvalM l), Show (Extra l)) => Prim l
fixOp =
constFun $
strictFun $ \f -> Prim
(force =<< mfix (\x -> delay (apply f x)))
------------------------------------------------------------
-- SMT Array
-- Array :: sort 0 -> sort 0 -> sort 0
arrayTypeOp :: VMonad l => Prim l
arrayTypeOp =
tvalFun $ \a ->
tvalFun $ \b ->
PrimValue (TValue (VArrayType a b))
-- arrayConstant :: (a b :: sort 0) -> b -> (Array a b);
arrayConstantOp :: VMonad l => BasePrims l -> Prim l
arrayConstantOp bp =
tvalFun $ \a ->
tvalFun $ \b ->
strictFun $ \e ->
Prim (VArray <$> bpArrayConstant bp a b e)
-- arrayLookup :: (a b :: sort 0) -> (Array a b) -> a -> b;
arrayLookupOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arrayLookupOp bp =
constFun $
constFun $
strictFun $ \f ->
strictFun $ \i -> Prim $
do f' <- toArray f
bpArrayLookup bp f' i
-- arrayUpdate :: (a b :: sort 0) -> (Array a b) -> a -> b -> (Array a b);
arrayUpdateOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arrayUpdateOp bp =
constFun $
constFun $
strictFun $ \f ->
strictFun $ \i ->
strictFun $ \e -> Prim $
do f' <- toArray f
VArray <$> bpArrayUpdate bp f' i e
-- arrayEq : (a b : sort 0) -> (Array a b) -> (Array a b) -> Bool;
arrayEqOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arrayEqOp bp =
constFun $
constFun $
strictFun $ \x ->
strictFun $ \y -> Prim $
do x' <- toArray x
y' <- toArray y
VBool <$> bpArrayEq bp x' y'
-- arrayCopy : (n : Nat) -> (a : sort 0) -> Array (Vec n Bool) a -> Vec n Bool -> Array (Vec n Bool) a -> Vec n Bool -> Vec n Bool -> Array (Vec n Bool) a;
arrayCopyOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arrayCopyOp bp =
constFun $
constFun $
strictFun $ \f ->
strictFun $ \i ->
strictFun $ \g ->
strictFun $ \j ->
strictFun $ \l -> Prim $
do f' <- toArray f
i' <- toWord (bpPack bp) i
g' <- toArray g
j' <- toWord (bpPack bp) j
l' <- toWord (bpPack bp) l
VArray <$> (bpArrayCopy bp) f' i' g' j' l'
-- arraySet : (n : Nat) -> (a : sort 0) -> Array (Vec n Bool) a -> Vec n Bool -> a -> Vec n Bool -> Array (Vec n Bool) a;
arraySetOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arraySetOp bp =
constFun $
constFun $
strictFun $ \f ->
strictFun $ \i ->
strictFun $ \e ->
strictFun $ \l -> Prim $
do f' <- toArray f
i' <- toWord (bpPack bp) i
l' <- toWord (bpPack bp) l
VArray <$> (bpArraySet bp) f' i' e l'
-- arrayRangeEq : (n : Nat) -> (a : sort 0) -> Array (Vec n Bool) a -> Vec n Bool -> Array (Vec n Bool) a -> Vec n Bool -> Vec n Bool -> Bool;
arrayRangeEqOp :: (VMonad l, Show (Extra l)) => BasePrims l -> Prim l
arrayRangeEqOp bp =
constFun $
constFun $
strictFun $ \f ->
strictFun $ \i ->
strictFun $ \g ->
strictFun $ \j ->
strictFun $ \l -> Prim $
do f' <- toArray f
i' <- toWord (bpPack bp) i
g' <- toArray g
j' <- toWord (bpPack bp) j
l' <- toWord (bpPack bp) l
VBool <$> (bpArrayRangeEq bp) f' i' g' j' l'
|
GaloisInc/saw-script
|
saw-core/src/Verifier/SAW/Simulator/Prims.hs
|
bsd-3-clause
| 49,399 | 0 | 35 | 13,516 | 18,857 | 9,402 | 9,455 | 1,073 | 27 |
module Exercises912 where
import Data.Char
-- Data.Char problems
-- 1. isUpper :: Char -> Bool
-- toUpper :: Char -> Char
-- 2.
removeLower :: [Char] -> [Char]
removeLower = filter isUpper
-- 3.
capitalize :: [Char] -> [Char]
capitalize (x:xs) = (toUpper x):xs
capitalize _ = []
-- 4.
allCaps :: [Char] -> [Char]
allCaps [] = []
allCaps (x:xs) = toUpper x : allCaps xs
-- 5.
firstLetterCap :: [Char] -> Maybe Char
firstLetterCap xs
| length xs >= 1 = Just (toUpper $ head xs)
| otherwise = Nothing
-- 6.
firstLetterCapComposed :: [Char] -> Maybe Char
firstLetterCapComposed xs
| length xs >= 1 = Just . toUpper . head $ xs
| otherwise = Nothing
firstLetterCapPF :: [Char] -> Char
firstLetterCapPF = toUpper . head
-- Ciphers - See Ciphers.hs
-- Writing your own Standard Functions
-- Takes a list of Bools and returns True if any Bool in the list is True
myOr :: [Bool] -> Bool
myOr [] = False
myOr (x:xs) = if x == True then True else myOr xs
-- returns True if a -> Bool applied to any of the values in the list returns True.
myAny :: (a -> Bool) -> [a] -> Bool
myAny _ [] = False
myAny f (x:xs) = if f x == True then True else myAny f xs
-- returns True if element is present in List
myElem :: Eq a => a -> [a] -> Bool
myElem _ [] = False
myElem x (y:ys) = if x == y then True else myElem x ys
myElem' :: Eq a => a -> [a] -> Bool
myElem' _ [] = False
myElem' x y = myAny ((==) x) y
-- Reverse a list
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
-- flattens a list into a list of lists
squish :: [[a]] -> [a]
squish [] = []
squish (x:xs) = x ++ squish xs
-- maps a function over a list and concatenates the results.
-- Prelude> squishMap (\x -> [1, x, 3]) [2]
-- [1,2,3]
-- Prelude> squishMap (\x -> "WO "++[x]++" HOO ") "123"
-- "WO 1 HOO WO 2 HOO WO 3 HOO "
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap _ [] = []
squishMap f (x:xs) = f x ++ squishMap f xs
-- flattens a list of lists into a list. This time re-use the squishMap function.
squishAgain :: [[a]] -> [a]
squishAgain = squishMap id
-- takes a comparison function and a list and returns the greatest element of the
-- list based on the last value that the comparison returned GT for.
-- Prelude> let xs = [1, 53, 9001, 10]
-- Prelude> myMaximumBy compare xs
-- 9001
myMaximumBy :: (a -> a -> Ordering) -> [a] -> a
myMaximumBy f (x:xs) = go f xs x
where go _ [] acc = acc
go f (x:xs) acc
| f x acc == GT = go f xs x
| otherwise = go f xs acc
-- myMinimumBy takes a comparison function and a list and returns the least element of the
-- list based on the last value that the comparison returned LT for.
myMinimumBy :: (a -> a -> Ordering) -> [a] -> a
myMinimumBy f (x:xs) = go f xs x
where go _ [] acc = acc
go f (x:xs) acc
| f x acc == LT = go f xs x
| otherwise = go f xs acc
myMaximum :: (Ord a) => [a] -> a
myMaximum = myMaximumBy compare
myMinimum :: (Ord a) => [a] -> a
myMinimum = myMinimumBy compare
|
pdmurray/haskell-book-ex
|
src/ch9/Exercises9.12.hs
|
bsd-3-clause
| 3,051 | 0 | 11 | 757 | 1,057 | 562 | 495 | 59 | 2 |
module Game.LambdaPad.Core.Run
( Stop, stop
, PadConfigSelector, runPadConfigSelector
, padConfigByName
, padConfigByShortName
, padConfigByDefault
, startLambdaPad
, rawLambdaPad
) where
import Game.LambdaPad.Core.Internal
|
zearen-wover/lambda-pad-core
|
src/Game/LambdaPad/Core/Run.hs
|
bsd-3-clause
| 241 | 0 | 4 | 39 | 44 | 30 | 14 | 9 | 0 |
{-# LANGUAGE CPP,TemplateHaskell #-}
{- This module is used to create arrays from lists in template haskell -}
module Data.Encoding.Helper.Template where
import Data.Char
import Data.Word
import Data.Array.IArray (Array,array)
import Language.Haskell.TH
createCharArray :: [(Integer,Char)] -> Integer -> Integer -> Q Exp
#ifndef __HADDOCK__
createCharArray lst = createArray (map (\(x,y) -> (x,LitE $ CharL y)) lst)
#endif
createArray :: [(Integer,Exp)] -> Integer -> Integer -> Q Exp
#ifndef __HADDOCK__
createArray lst from to = return $ AppE
(AppE
(VarE 'array)
(TupE [LitE $ IntegerL from,LitE $ IntegerL to]))
(ListE [ TupE [LitE $ IntegerL x,y]
| (x,y) <- lst ])
#endif
xmlArray :: [(Char,[Word8])] -> Integer -> Integer -> Q Exp
#ifndef __HADDOCK__
xmlArray lst l u = do
let trans = map (\(ch,bin) ->
(toInteger $ ord ch
,TupE [LitE $ IntegerL (toInteger $ length bin),TupE $ map (\b -> LitE $ IntegerL (fromIntegral b)) bin ++ replicate (4-length bin) (LitE $ IntegerL 0)]
)) (filter (\(c,_) -> ord c <= fromInteger u && ord c >= fromInteger l) lst)
createArray trans l u
#endif
|
abuiles/turbinado-blog
|
tmp/dependencies/encoding-0.4.1/Data/Encoding/Helper/Template.hs
|
bsd-3-clause
| 1,108 | 14 | 20 | 195 | 475 | 260 | 215 | 22 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Compiler
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This should be a much more sophisticated abstraction than it is. Currently
-- it's just a bit of data about the compiler, like it's flavour and name and
-- version. The reason it's just data is because currently it has to be in
-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The
-- only interesting bit of info it contains is a mapping between language
-- extensions and compiler command line flags. This module also defines a
-- 'PackageDB' type which is used to refer to package databases. Most compilers
-- only know about a single global package collection but GHC has a global and
-- per-user one and it lets you create arbitrary other package databases. We do
-- not yet fully support this latter feature.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Compiler (
-- * Haskell implementations
module Distribution.Compiler,
Compiler(..),
showCompilerId, compilerFlavor, compilerVersion,
-- * Support for package databases
PackageDB(..),
PackageDBStack,
registrationPackageDB,
-- * Support for optimisation levels
OptimisationLevel(..),
flagToOptimisationLevel,
-- * Support for language extensions
Flag,
languageToFlags,
unsupportedLanguages,
extensionsToFlags,
unsupportedExtensions
) where
import Distribution.Compiler
import Distribution.Version (Version(..))
import Distribution.Text (display)
import Language.Haskell.Extension (Language(Haskell98), Extension)
import Data.List (nub)
import Data.Maybe (catMaybes, isNothing)
data Compiler = Compiler {
compilerId :: CompilerId,
compilerLanguages :: [(Language, Flag)],
compilerExtensions :: [(Extension, Flag)]
}
deriving (Show, Read)
showCompilerId :: Compiler -> String
showCompilerId = display . compilerId
compilerFlavor :: Compiler -> CompilerFlavor
compilerFlavor = (\(CompilerId f _) -> f) . compilerId
compilerVersion :: Compiler -> Version
compilerVersion = (\(CompilerId _ v) -> v) . compilerId
-- ------------------------------------------------------------
-- * Package databases
-- ------------------------------------------------------------
-- |Some compilers have a notion of a database of available packages.
-- For some there is just one global db of packages, other compilers
-- support a per-user or an arbitrary db specified at some location in
-- the file system. This can be used to build isloated environments of
-- packages, for example to build a collection of related packages
-- without installing them globally.
--
data PackageDB = GlobalPackageDB
| UserPackageDB
| SpecificPackageDB FilePath
deriving (Eq, Ord, Show, Read)
-- | We typically get packages from several databases, and stack them
-- together. This type lets us be explicit about that stacking. For example
-- typical stacks include:
--
-- > [GlobalPackageDB]
-- > [GlobalPackageDB, UserPackageDB]
-- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"]
--
-- Note that the 'GlobalPackageDB' is invariably at the bottom since it
-- contains the rts, base and other special compiler-specific packages.
--
-- We are not restricted to using just the above combinations. In particular
-- we can use several custom package dbs and the user package db together.
--
-- When it comes to writing, the top most (last) package is used.
--
type PackageDBStack = [PackageDB]
-- | Return the package that we should register into. This is the package db at
-- the top of the stack.
--
registrationPackageDB :: PackageDBStack -> PackageDB
registrationPackageDB [] = error "internal error: empty package db set"
registrationPackageDB dbs = last dbs
-- ------------------------------------------------------------
-- * Optimisation levels
-- ------------------------------------------------------------
-- | Some compilers support optimising. Some have different levels.
-- For compliers that do not the level is just capped to the level
-- they do support.
--
data OptimisationLevel = NoOptimisation
| NormalOptimisation
| MaximumOptimisation
deriving (Eq, Show, Read, Enum, Bounded)
flagToOptimisationLevel :: Maybe String -> OptimisationLevel
flagToOptimisationLevel Nothing = NormalOptimisation
flagToOptimisationLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: OptimisationLevel)
&& i <= fromEnum (maxBound :: OptimisationLevel)
-> toEnum i
| otherwise -> error $ "Bad optimisation level: " ++ show i
++ ". Valid values are 0..2"
_ -> error $ "Can't parse optimisation level " ++ s
-- ------------------------------------------------------------
-- * Languages and Extensions
-- ------------------------------------------------------------
unsupportedLanguages :: Compiler -> [Language] -> [Language]
unsupportedLanguages comp langs =
[ lang | lang <- langs
, isNothing (languageToFlag comp lang) ]
languageToFlags :: Compiler -> Maybe Language -> [Flag]
languageToFlags comp = filter (not . null)
. catMaybes . map (languageToFlag comp)
. maybe [Haskell98] (\x->[x])
languageToFlag :: Compiler -> Language -> Maybe Flag
languageToFlag comp ext = lookup ext (compilerLanguages comp)
-- |For the given compiler, return the extensions it does not support.
unsupportedExtensions :: Compiler -> [Extension] -> [Extension]
unsupportedExtensions comp exts =
[ ext | ext <- exts
, isNothing (extensionToFlag comp ext) ]
type Flag = String
-- |For the given compiler, return the flags for the supported extensions.
extensionsToFlags :: Compiler -> [Extension] -> [Flag]
extensionsToFlags comp = nub . filter (not . null)
. catMaybes . map (extensionToFlag comp)
extensionToFlag :: Compiler -> Extension -> Maybe Flag
extensionToFlag comp ext = lookup ext (compilerExtensions comp)
|
alphaHeavy/cabal
|
Cabal/Distribution/Simple/Compiler.hs
|
bsd-3-clause
| 7,751 | 0 | 15 | 1,546 | 915 | 533 | 382 | 73 | 2 |
{-# LANGUAGE
DeriveFunctor
, DeriveFoldable
, DeriveTraversable
, TemplateHaskell
, EmptyDataDecls
, TypeFamilies
#-}
module Data.Tree.Abstract where
import Data.Binary
import Data.Foldable
import Data.Traversable
import Data.Fixpoint
import Generics.Regular hiding (Fix (..))
import qualified Generics.Regular.Functions.Binary as G
-- | Binary search tree datatype parametrized with key/value types and
-- recursive positions.
data TreeF k v f = Leaf | Branch k v f f
deriving ( Eq, Ord, Show
, Functor, Foldable, Traversable
)
type Tree k v = Fix (TreeF k v)
-- Derive generic representation using Regular.
$(deriveAll ''TreeF "PFTree")
type instance PF (TreeF k v f) = PFTree k v f
-- Binary instance, we get this for free using the generic binary function.
instance (Binary k, Binary v, Binary f) => Binary (TreeF k v f) where
put = G.gput
get = G.gget
-- Destructor.
tree :: r -> (k -> v -> f -> f -> r) -> TreeF k v f -> r
tree l _ Leaf = l
tree _ b (Branch k v l r) = b k v l r
-- Smart constructors.
leaf :: Tree k v
leaf = In Leaf
branch :: k -> v -> Tree k v -> Tree k v -> Tree k v
branch k v l r = In (Branch k v l r)
|
sebastiaanvisser/fixpoints
|
src/Data/Tree/Abstract.hs
|
bsd-3-clause
| 1,200 | 0 | 11 | 288 | 391 | 212 | 179 | 30 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <[email protected]
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Data types for describing actions and data structures performed to interact
-- with Elasticsearch. The two main buckets your queries against Elasticsearch
-- will fall into are 'Query's and 'Filter's. 'Filter's are more like
-- traditional database constraints and often have preferable performance
-- properties. 'Query's support human-written textual queries, such as fuzzy
-- queries.
-------------------------------------------------------------------------------
module Database.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, mkSort
, showText
, unpackId
, mkMatchQuery
, mkMultiMatchQuery
, mkBoolQuery
, mkRangeQuery
, mkQueryStringQuery
, mkAggregations
, mkTermsAggregation
, mkTermsScriptAggregation
, mkDateHistogram
, toTerms
, toDateHistogram
, omitNulls
, BH
, runBH
, BHEnv(..)
, MonadBH(..)
, Version(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, Server(..)
, Reply
, EsResult(..)
, Query(..)
, Search(..)
, SearchResult(..)
, SearchHits(..)
, TrackSortScores
, From(..)
, Size(..)
, ShardResult(..)
, Hit(..)
, Filter(..)
, Seminearring(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, RangeValue(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, LessThanD(..)
, LessThanEqD(..)
, GreaterThanD(..)
, GreaterThanEqD(..)
, Regexp(..)
, RegexpFlags(..)
, RegexpFlag(..)
, FieldName(..)
, IndexName(..)
, MappingName(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, DisMaxQuery(..)
, FilteredQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, IndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
, MultiMatchQueryType(..)
, Tiebreaker(..)
, MinimumMatch(..)
, DisableCoord(..)
, CommonMinimumMatch(..)
, MinimumMatchHighLow(..)
, PrefixLength(..)
, Fuzziness(..)
, IgnoreTermFrequency(..)
, MaxQueryTerms(..)
, ScoreType(..)
, Score
, Cache
, TypeName(..)
, BoostTerms(..)
, MaxWordLength(..)
, MinWordLength(..)
, MaxDocFrequency(..)
, MinDocFrequency(..)
, PhraseSlop(..)
, StopWord(..)
, QueryPath(..)
, MinimumTermFrequency(..)
, PercentMatch(..)
, FieldDefinition(..)
, MappingField(..)
, Mapping(..)
, AllowLeadingWildcard(..)
, LowercaseExpanded(..)
, GeneratePhraseQueries(..)
, Locale(..)
, AnalyzeWildcard(..)
, EnablePositionIncrements(..)
, SimpleQueryFlag(..)
, FieldOrFields(..)
, Monoid(..)
, ToJSON(..)
, Interval(..)
, TimeInterval(..)
, ExecutionHint(..)
, CollectionMode(..)
, TermOrder(..)
, TermInclusion(..)
, Aggregation(..)
, Aggregations
, AggregationResults
, Bucket(..)
, BucketAggregation(..)
, TermsAggregation(..)
, DateHistogramAggregation(..)
, Highlights(..)
, FieldHighlight(..)
, HighlightSettings(..)
, PlainHighlight(..)
, PostingsHighlight(..)
, FastVectorHighlight(..)
, CommonHighlight(..)
, NonPostings(..)
, HighlightEncoder(..)
, HighlightTag(..)
, HitHighlight
, TermsResult(..)
, DateHistogramResult(..)
) where
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Data.Aeson
import Data.Aeson.Types (Pair, emptyObject, parseMaybe)
import qualified Data.ByteString.Lazy.Char8 as L
import Data.List (nub)
import Data.List.NonEmpty (NonEmpty (..), toList)
import qualified Data.Map.Strict as M
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime)
import qualified Data.Vector as V
import GHC.Generics (Generic)
import Network.HTTP.Client
import qualified Network.HTTP.Types.Method as NHTM
import Database.Bloodhound.Types.Class
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Database.Bloodhound
-- >>> let testServer = (Server "http://localhost:9200")
-- >>> let testIndex = IndexName "twitter"
-- >>> let testMapping = MappingName "tweet"
-- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
-- defaultIndexSettings is exported by Database.Bloodhound as well
-- no trailing slashes in servers, library handles building the path.
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
-}
data BHEnv = BHEnv { bhServer :: Server
, bhManager :: Manager
}
{-| All API calls to Elasticsearch operate within
MonadBH. The idea is that it can be easily embedded in your
own monad transformer stack. A default instance for a ReaderT and
alias 'BH' is provided for the simple case.
-}
class (Functor m, Applicative m, MonadIO m) => MonadBH m where
getBHEnv :: m BHEnv
newtype BH m a = BH {
unBH :: ReaderT BHEnv m a
} deriving ( Functor
, Applicative
, Monad
, MonadIO
, MonadState s
, MonadWriter w
, MonadError e
, Alternative
, MonadPlus
, MonadFix)
instance MonadTrans BH where
lift = BH . lift
instance (MonadReader r m) => MonadReader r (BH m) where
ask = lift ask
local f (BH (ReaderT m)) = BH $ ReaderT $ \r ->
local f (m r)
instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where
getBHEnv = BH getBHEnv
instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where
getBHEnv = ask
runBH :: BHEnv -> BH m a -> m a
runBH e f = runReaderT (unBH f) e
{-| 'Version' is embedded in 'Status' -}
data Version = Version { number :: Text
, build_hash :: Text
, build_timestamp :: UTCTime
, build_snapshot :: Bool
, lucene_version :: Text } deriving (Eq, Show, Generic)
{-| 'Status' is a data type for describing the JSON body returned by
Elasticsearch when you query its status. This was deprecated in 1.2.0.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-status.html#indices-status>
-}
data Status = Status { ok :: Maybe Bool
, status :: Int
, name :: Text
, version :: Version
, tagline :: Text } deriving (Eq, Show)
{-| 'IndexSettings' is used to configure the shards and replicas when you create
an Elasticsearch Index.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-}
data IndexSettings =
IndexSettings { indexShards :: ShardCount
, indexReplicas :: ReplicaCount } deriving (Eq, Show)
{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -}
defaultIndexSettings :: IndexSettings
defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -}
type Reply = Network.HTTP.Client.Response L.ByteString
type Method = NHTM.Method
{-| 'OpenCloseIndex' is a sum type for opening and closing indices.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-open-close.html>
-}
data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show)
data FieldType = GeoPointType
| GeoShapeType
| FloatType
| IntegerType
| LongType
| ShortType
| ByteType deriving (Eq, Show)
data FieldDefinition =
FieldDefinition { fieldType :: FieldType } deriving (Eq, Show)
data MappingField =
MappingField { mappingFieldName :: FieldName
, fieldDefinition :: FieldDefinition } deriving (Eq, Show)
{-| Support for type reification of 'Mapping's is currently incomplete, for
now the mapping API verbiage expects a 'ToJSON'able blob.
Indexes have mappings, mappings are schemas for the documents contained in the
index. I'd recommend having only one mapping per index, always having a mapping,
and keeping different kinds of documents separated if possible.
-}
data Mapping = Mapping { typeName :: TypeName
, mappingFields :: [MappingField] } deriving (Eq, Show)
{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk
operation index, create, delete, and update. 'BulkIndex' behaves like an
"upsert", 'BulkCreate' will fail if a document already exists at the DocId.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html#docs-bulk>
-}
data BulkOperation =
BulkIndex IndexName MappingName DocId Value
| BulkCreate IndexName MappingName DocId Value
| BulkDelete IndexName MappingName DocId
| BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show)
{-| 'EsResult' describes the standard wrapper JSON document that you see in
successful Elasticsearch responses.
-}
data EsResult a = EsResult { _index :: Text
, _type :: Text
, _id :: Text
, _version :: Int
, found :: Maybe Bool
, _source :: a } deriving (Eq, Show)
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec = DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Bool
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
that you can concisely describe the usual kind of 'SortSpec's you want.
-}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing
{-| 'Cache' is for telling ES whether it should cache a 'Filter' not.
'Query's cannot be cached.
-}
type Cache = Bool -- caching on/off
defaultCache :: Cache
defaultCache = False
{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component.
-}
type PrefixValue = Text
{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible
JSON encoding baked in. Used all over the place.
-}
data BooleanOperator = And | Or deriving (Eq, Show)
{-| 'ShardCount' is part of 'IndexSettings'
-}
newtype ShardCount = ShardCount Int deriving (Eq, Show, Generic)
{-| 'ReplicaCount' is part of 'IndexSettings'
-}
newtype ReplicaCount = ReplicaCount Int deriving (Eq, Show, Generic)
{-| 'Server' is used with the client functions to point at the ES instance
-}
newtype Server = Server Text deriving (Eq, Show)
{-| 'IndexName' is used to describe which index to query/create/delete
-}
newtype IndexName = IndexName Text deriving (Eq, Generic, Show)
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName = MappingName Text deriving (Eq, Generic, Show)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId = DocId Text deriving (Eq, Generic, Show)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString = QueryString Text deriving (Eq, Generic, Show)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName = FieldName Text deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName = CacheName Text deriving (Eq, Show)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show)
newtype Existence =
Existence Bool deriving (Eq, Show)
newtype NullValue =
NullValue Bool deriving (Eq, Show)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, Generic)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, Generic)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, Generic)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, Generic)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, Generic)
newtype Boost =
Boost Double deriving (Eq, Show, Generic)
newtype BoostTerms =
BoostTerms Double deriving (Eq, Show, Generic)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, Generic)
newtype MinimumMatchText =
MinimumMatchText Text deriving (Eq, Show)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, Generic)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, Generic)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, Generic)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, Generic)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, Generic)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, Generic)
newtype TypeName =
TypeName Text deriving (Eq, Show, Generic)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, Generic)
newtype StopWord =
StopWord Text deriving (Eq, Show, Generic)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, Generic)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, Generic)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, Generic)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, Generic)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, Generic)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, Generic)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, Generic)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, Generic)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, Generic)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, Generic)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, Generic)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, Generic)
{-| 'unpackId' is a silly convenience function that gets used once.
-}
unpackId :: DocId -> Text
unpackId (DocId docId) = docId
type TrackSortScores = Bool
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON)
data Search = Search { queryBody :: Maybe Query
, filterBody :: Maybe Filter
, sortBody :: Maybe Sort
, aggBody :: Maybe Aggregations
, highlight :: Maybe Highlights
-- default False
, trackSortScores :: TrackSortScores
, from :: From
, size :: Size } deriving (Eq, Show)
data Highlights = Highlights { globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Show, Eq)
data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Show, Eq)
data HighlightSettings = Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Show, Eq)
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings } deriving (Show, Eq)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight =
FastVectorHighlight { fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Show, Eq)
data CommonHighlight =
CommonHighlight { order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Show, Eq)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int} deriving (Show, Eq)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Show, Eq)
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag = TagSchema Text
| CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh
deriving (Show, Eq)
data Query =
TermQuery Term (Maybe Boost)
| TermsQuery (NonEmpty Term)
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreFilter Filter Boost
| ConstantScoreQuery Query Boost
| QueryDisMaxQuery DisMaxQuery
| QueryFilteredQuery FilteredQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
| QueryHasChildQuery HasChildQuery
| QueryHasParentQuery HasParentQuery
| IdsQuery MappingName [DocId]
| QueryIndicesQuery IndicesQuery
| MatchAllQuery (Maybe Boost)
| QueryMoreLikeThisQuery MoreLikeThisQuery
| QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery
| QueryNestedQuery NestedQuery
| QueryPrefixQuery PrefixQuery
| QueryQueryStringQuery QueryStringQuery
| QuerySimpleQueryStringQuery SimpleQueryStringQuery
| QueryRangeQuery RangeQuery
| QueryRegexpQuery RegexpQuery
deriving (Eq, Show)
data RegexpQuery =
RegexpQuery { regexpQueryField :: FieldName
, regexpQuery :: Regexp
, regexpQueryFlags :: RegexpFlags
, regexpQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data RangeQuery =
RangeQuery { rangeQueryField :: FieldName
, rangeQueryRange :: RangeValue
, rangeQueryBoost :: Boost } deriving (Eq, Show)
mkRangeQuery :: FieldName -> RangeValue -> RangeQuery
mkRangeQuery f r = RangeQuery f r (Boost 1.0)
data SimpleQueryStringQuery =
SimpleQueryStringQuery
{ simpleQueryStringQuery :: QueryString
, simpleQueryStringField :: Maybe FieldOrFields
, simpleQueryStringOperator :: Maybe BooleanOperator
, simpleQueryStringAnalyzer :: Maybe Analyzer
, simpleQueryStringFlags :: Maybe [SimpleQueryFlag]
, simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded
, simpleQueryStringLocale :: Maybe Locale
} deriving (Eq, Show)
data SimpleQueryFlag =
SimpleQueryAll
| SimpleQueryNone
| SimpleQueryAnd
| SimpleQueryOr
| SimpleQueryPrefix
| SimpleQueryPhrase
| SimpleQueryPrecedence
| SimpleQueryEscape
| SimpleQueryWhitespace
| SimpleQueryFuzzy
| SimpleQueryNear
| SimpleQuerySlop deriving (Eq, Show)
-- use_dis_max and tie_breaker when fields are plural?
data QueryStringQuery =
QueryStringQuery
{ queryStringQuery :: QueryString
, queryStringDefaultField :: Maybe FieldName
, queryStringOperator :: Maybe BooleanOperator
, queryStringAnalyzer :: Maybe Analyzer
, queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard
, queryStringLowercaseExpanded :: Maybe LowercaseExpanded
, queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements
, queryStringFuzzyMaxExpansions :: Maybe MaxExpansions
, queryStringFuzziness :: Maybe Fuzziness
, queryStringFuzzyPrefixLength :: Maybe PrefixLength
, queryStringPhraseSlop :: Maybe PhraseSlop
, queryStringBoost :: Maybe Boost
, queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard
, queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries
, queryStringMinimumShouldMatch :: Maybe MinimumMatch
, queryStringLenient :: Maybe Lenient
, queryStringLocale :: Maybe Locale
} deriving (Eq, Show)
mkQueryStringQuery :: QueryString -> QueryStringQuery
mkQueryStringQuery qs =
QueryStringQuery qs Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing
data FieldOrFields = FofField FieldName
| FofFields [FieldName] deriving (Eq, Show)
data PrefixQuery =
PrefixQuery
{ prefixQueryField :: FieldName
, prefixQueryPrefixValue :: Text
, prefixQueryBoost :: Maybe Boost } deriving (Eq, Show)
data NestedQuery =
NestedQuery
{ nestedQueryPath :: QueryPath
, nestedQueryScoreType :: ScoreType
, nestedQuery :: Query } deriving (Eq, Show)
data MoreLikeThisFieldQuery =
MoreLikeThisFieldQuery
{ moreLikeThisFieldText :: Text
, moreLikeThisFieldFields :: FieldName
-- default 0.3 (30%)
, moreLikeThisFieldPercentMatch :: Maybe PercentMatch
, moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisFieldStopWords :: Maybe [StopWord]
, moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisFieldMinWordLength :: Maybe MinWordLength
, moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength
, moreLikeThisFieldBoostTerms :: Maybe BoostTerms
, moreLikeThisFieldBoost :: Maybe Boost
, moreLikeThisFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data MoreLikeThisQuery =
MoreLikeThisQuery
{ moreLikeThisText :: Text
, moreLikeThisFields :: Maybe [FieldName]
-- default 0.3 (30%)
, moreLikeThisPercentMatch :: Maybe PercentMatch
, moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisStopWords :: Maybe [StopWord]
, moreLikeThisMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisMinWordLength :: Maybe MinWordLength
, moreLikeThisMaxWordLength :: Maybe MaxWordLength
, moreLikeThisBoostTerms :: Maybe BoostTerms
, moreLikeThisBoost :: Maybe Boost
, moreLikeThisAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data IndicesQuery =
IndicesQuery
{ indicesQueryIndices :: [IndexName]
, indicesQuery :: Query
-- default "all"
, indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show)
data HasParentQuery =
HasParentQuery
{ hasParentQueryType :: TypeName
, hasParentQuery :: Query
, hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data HasChildQuery =
HasChildQuery
{ hasChildQueryType :: TypeName
, hasChildQuery :: Query
, hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data ScoreType =
ScoreTypeMax
| ScoreTypeSum
| ScoreTypeAvg
| ScoreTypeNone deriving (Eq, Show)
data FuzzyQuery =
FuzzyQuery { fuzzyQueryField :: FieldName
, fuzzyQueryValue :: Text
, fuzzyQueryPrefixLength :: PrefixLength
, fuzzyQueryMaxExpansions :: MaxExpansions
, fuzzyQueryFuzziness :: Fuzziness
, fuzzyQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data FuzzyLikeFieldQuery =
FuzzyLikeFieldQuery
{ fuzzyLikeField :: FieldName
-- anaphora is good for the soul.
, fuzzyLikeFieldText :: Text
, fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFieldFuzziness :: Fuzziness
, fuzzyLikeFieldPrefixLength :: PrefixLength
, fuzzyLikeFieldBoost :: Boost
, fuzzyLikeFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FuzzyLikeThisQuery =
FuzzyLikeThisQuery
{ fuzzyLikeFields :: [FieldName]
, fuzzyLikeText :: Text
, fuzzyLikeMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFuzziness :: Fuzziness
, fuzzyLikePrefixLength :: PrefixLength
, fuzzyLikeBoost :: Boost
, fuzzyLikeAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FilteredQuery =
FilteredQuery
{ filteredQuery :: Query
, filteredFilter :: Filter } deriving (Eq, Show)
data DisMaxQuery =
DisMaxQuery { disMaxQueries :: [Query]
-- default 0.0
, disMaxTiebreaker :: Tiebreaker
, disMaxBoost :: Maybe Boost
} deriving (Eq, Show)
data MatchQuery =
MatchQuery { matchQueryField :: FieldName
, matchQueryQueryString :: QueryString
, matchQueryOperator :: BooleanOperator
, matchQueryZeroTerms :: ZeroTermsQuery
, matchQueryCutoffFrequency :: Maybe CutoffFrequency
, matchQueryMatchType :: Maybe MatchQueryType
, matchQueryAnalyzer :: Maybe Analyzer
, matchQueryMaxExpansions :: Maybe MaxExpansions
, matchQueryLenient :: Maybe Lenient } deriving (Eq, Show)
{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery'
-}
mkMatchQuery :: FieldName -> QueryString -> MatchQuery
mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing
data MatchQueryType =
MatchPhrase
| MatchPhrasePrefix deriving (Eq, Show)
data MultiMatchQuery =
MultiMatchQuery { multiMatchQueryFields :: [FieldName]
, multiMatchQueryString :: QueryString
, multiMatchQueryOperator :: BooleanOperator
, multiMatchQueryZeroTerms :: ZeroTermsQuery
, multiMatchQueryTiebreaker :: Maybe Tiebreaker
, multiMatchQueryType :: Maybe MultiMatchQueryType
, multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency
, multiMatchQueryAnalyzer :: Maybe Analyzer
, multiMatchQueryMaxExpansions :: Maybe MaxExpansions
, multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Show)
{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the list of 'FieldName's and 'QueryString' to
make a 'MultiMatchQuery'.
-}
mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery
mkMultiMatchQuery matchFields query =
MultiMatchQuery matchFields query
Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MultiMatchQueryType =
MultiMatchBestFields
| MultiMatchMostFields
| MultiMatchCrossFields
| MultiMatchPhrase
| MultiMatchPhrasePrefix deriving (Eq, Show)
data BoolQuery =
BoolQuery { boolQueryMustMatch :: [Query]
, boolQueryMustNotMatch :: [Query]
, boolQueryShouldMatch :: [Query]
, boolQueryMinimumShouldMatch :: Maybe MinimumMatch
, boolQueryBoost :: Maybe Boost
, boolQueryDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery
mkBoolQuery must mustNot should =
BoolQuery must mustNot should Nothing Nothing Nothing
data BoostingQuery =
BoostingQuery { positiveQuery :: Query
, negativeQuery :: Query
, negativeBoost :: Boost } deriving (Eq, Show)
data CommonTermsQuery =
CommonTermsQuery { commonField :: FieldName
, commonQuery :: QueryString
, commonCutoffFrequency :: CutoffFrequency
, commonLowFreqOperator :: BooleanOperator
, commonHighFreqOperator :: BooleanOperator
, commonMinimumShouldMatch :: Maybe CommonMinimumMatch
, commonBoost :: Maybe Boost
, commonAnalyzer :: Maybe Analyzer
, commonDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
data CommonMinimumMatch =
CommonMinimumMatchHighLow MinimumMatchHighLow
| CommonMinimumMatch MinimumMatch
deriving (Eq, Show)
data MinimumMatchHighLow =
MinimumMatchHighLow { lowFreq :: MinimumMatch
, highFreq :: MinimumMatch } deriving (Eq, Show)
data Filter = AndFilter [Filter] Cache
| OrFilter [Filter] Cache
| NotFilter Filter Cache
| IdentityFilter
| BoolFilter BoolMatch
| ExistsFilter FieldName -- always cached
| GeoBoundingBoxFilter GeoBoundingBoxConstraint
| GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache
| GeoDistanceRangeFilter GeoPoint DistanceRange
| GeoPolygonFilter FieldName [LatLon]
| IdsFilter MappingName [DocId]
| LimitFilter Int
| MissingFilter FieldName Existence NullValue
| PrefixFilter FieldName PrefixValue Cache
| QueryFilter Query Cache
| RangeFilter FieldName RangeValue RangeExecution Cache
| RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey
| TermFilter Term Cache
deriving (Eq, Show)
data ZeroTermsQuery = ZeroTermsNone
| ZeroTermsAll deriving (Eq, Show)
data RangeExecution = RangeExecutionIndex
| RangeExecutionFielddata deriving (Eq, Show)
newtype Regexp = Regexp Text deriving (Eq, Show)
data RegexpFlags = AllRegexpFlags
| NoRegexpFlags
| SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show)
data RegexpFlag = AnyString
| Automaton
| Complement
| Empty
| Intersection
| Interval deriving (Eq, Show)
newtype LessThan = LessThan Double deriving (Eq, Show)
newtype LessThanEq = LessThanEq Double deriving (Eq, Show)
newtype GreaterThan = GreaterThan Double deriving (Eq, Show)
newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show)
newtype LessThanD = LessThanD UTCTime deriving (Eq, Show)
newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show)
newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show)
newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show)
data RangeValue = RangeDateLte LessThanEqD
| RangeDateLt LessThanD
| RangeDateGte GreaterThanEqD
| RangeDateGt GreaterThanD
| RangeDateGtLt GreaterThanD LessThanD
| RangeDateGteLte GreaterThanEqD LessThanEqD
| RangeDateGteLt GreaterThanEqD LessThanD
| RangeDateGtLte GreaterThanD LessThanEqD
| RangeDoubleLte LessThanEq
| RangeDoubleLt LessThan
| RangeDoubleGte GreaterThanEq
| RangeDoubleGt GreaterThan
| RangeDoubleGtLt GreaterThan LessThan
| RangeDoubleGteLte GreaterThanEq LessThanEq
| RangeDoubleGteLt GreaterThanEq LessThan
| RangeDoubleGtLte GreaterThan LessThanEq
deriving (Eq, Show)
rangeValueToPair :: RangeValue -> [Pair]
rangeValueToPair rv = case rv of
RangeDateLte (LessThanEqD t) -> ["lte" .= t]
RangeDateGte (GreaterThanEqD t) -> ["gte" .= t]
RangeDateLt (LessThanD t) -> ["lt" .= t]
RangeDateGt (GreaterThanD t) -> ["gt" .= t]
RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g]
RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g]
RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g]
RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g]
RangeDoubleLte (LessThanEq t) -> ["lte" .= t]
RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t]
RangeDoubleLt (LessThan t) -> ["lt" .= t]
RangeDoubleGt (GreaterThan t) -> ["gt" .= t]
RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g]
RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g]
RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g]
RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g]
data Term = Term { termField :: Text
, termValue :: Text } deriving (Eq, Show)
data BoolMatch = MustMatch Term Cache
| MustNotMatch Term Cache
| ShouldMatch [Term] Cache deriving (Eq, Show)
-- "memory" or "indexed"
data GeoFilterType = GeoFilterMemory
| GeoFilterIndexed deriving (Eq, Show)
data LatLon = LatLon { lat :: Double
, lon :: Double } deriving (Eq, Show)
data GeoBoundingBox =
GeoBoundingBox { topLeft :: LatLon
, bottomRight :: LatLon } deriving (Eq, Show)
data GeoBoundingBoxConstraint =
GeoBoundingBoxConstraint { geoBBField :: FieldName
, constraintBox :: GeoBoundingBox
, bbConstraintcache :: Cache
, geoType :: GeoFilterType
} deriving (Eq, Show)
data GeoPoint =
GeoPoint { geoField :: FieldName
, latLon :: LatLon} deriving (Eq, Show)
data DistanceUnit = Miles
| Yards
| Feet
| Inches
| Kilometers
| Meters
| Centimeters
| Millimeters
| NauticalMiles deriving (Eq, Show)
data DistanceType = Arc
| SloppyArc -- doesn't exist <1.0
| Plane deriving (Eq, Show)
data OptimizeBbox = OptimizeGeoFilterType GeoFilterType
| NoOptimizeBbox deriving (Eq, Show)
data Distance =
Distance { coefficient :: Double
, unit :: DistanceUnit } deriving (Eq, Show)
data DistanceRange =
DistanceRange { distanceFrom :: Distance
, distanceTo :: Distance } deriving (Eq, Show)
data SearchResult a =
SearchResult { took :: Int
, timedOut :: Bool
, shards :: ShardResult
, searchHits :: SearchHits a
, aggregations :: Maybe AggregationResults } deriving (Eq, Show)
type Score = Maybe Double
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend (SearchHits ta ma ha) (SearchHits tb mb hb) =
SearchHits (ta + tb) (max ma mb) (ha <> hb)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: a
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
data ShardResult =
ShardResult { shardTotal :: Int
, shardsSuccessful :: Int
, shardsFailed :: Int } deriving (Eq, Show, Generic)
type HitHighlight = M.Map Text [Text]
showText :: Show a => a -> Text
showText = T.pack . show
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data TermOrder = TermOrder{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
data TimeInterval = Weeks
| Days
| Hours
| Minutes
| Seconds deriving (Eq)
data Interval = Year
| Quarter
| Month
| Week
| Day
| Hour
| Minute
| Second
| FractionalInterval Float TimeInterval deriving (Eq, Show)
data Aggregation = TermsAgg TermsAggregation
| DateHistogramAgg DateHistogramAggregation deriving (Eq, Show)
data TermsAggregation = TermsAggregation { term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
-- pre and post deprecated in 1.5
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder]
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern,
"flags" .= flags]
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
instance ToJSON Interval where
toJSON Year = "year"
toJSON Quarter = "quarter"
toJSON Month = "month"
toJSON Week = "week"
toJSON Day = "day"
toJSON Hour = "hour"
toJSON Minute = "minute"
toJSON Second = "second"
toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval
instance Show TimeInterval where
show Weeks = "w"
show Days = "d"
show Hours = "h"
show Minutes = "m"
show Seconds = "s"
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> Text
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data Bucket a = Bucket { buckets :: [a]} deriving (Show)
data TermsResult = TermsResult { termKey :: Text
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults } deriving (Show)
data DateHistogramResult = DateHistogramResult { dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults } deriving (Show)
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
instance BucketAggregation DateHistogramResult where
key = showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
instance (FromJSON a, BucketAggregation a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance Monoid Filter where
mempty = IdentityFilter
mappend a b = AndFilter [a, b] defaultCache
instance Seminearring Filter where
a <||> b = OrFilter [a, b] defaultCache
instance ToJSON Filter where
toJSON (AndFilter filters cache) =
object ["and" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (OrFilter filters cache) =
object ["or" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (NotFilter notFilter cache) =
object ["not" .=
object ["filter" .= notFilter
, "_cache" .= cache]]
toJSON (IdentityFilter) =
object ["match_all" .= object []]
toJSON (TermFilter (Term termFilterField termFilterValue) cache) =
object ["term" .= object base]
where base = [termFilterField .= termFilterValue,
"_cache" .= cache]
toJSON (ExistsFilter (FieldName fieldName)) =
object ["exists" .= object
["field" .= fieldName]]
toJSON (BoolFilter boolMatch) =
object ["bool" .= boolMatch]
toJSON (GeoBoundingBoxFilter bbConstraint) =
object ["geo_bounding_box" .= bbConstraint]
toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon)
distance distanceType optimizeBbox cache) =
object ["geo_distance" .=
object ["distance" .= distance
, "distance_type" .= distanceType
, "optimize_bbox" .= optimizeBbox
, distanceGeoField .= geoDistLatLon
, "_cache" .= cache]]
toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon)
(DistanceRange geoDistRangeDistFrom drDistanceTo)) =
object ["geo_distance_range" .=
object ["from" .= geoDistRangeDistFrom
, "to" .= drDistanceTo
, gddrField .= drLatLon]]
toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) =
object ["geo_polygon" .=
object [geoPolygonFilterField .=
object ["points" .= fmap toJSON latLons]]]
toJSON (IdsFilter (MappingName mappingName) values) =
object ["ids" .=
object ["type" .= mappingName
, "values" .= fmap unpackId values]]
toJSON (LimitFilter limit) =
object ["limit" .= object ["value" .= limit]]
toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) =
object ["missing" .=
object ["field" .= fieldName
, "existence" .= existence
, "null_value" .= nullValue]]
toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) =
object ["prefix" .=
object [fieldName .= fieldValue
, "_cache" .= cache]]
toJSON (QueryFilter query False) =
object ["query" .= toJSON query ]
toJSON (QueryFilter query True) =
object ["fquery" .=
object [ "query" .= toJSON query
, "_cache" .= True ]]
toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) =
object ["range" .=
object [ fieldName .= object (rangeValueToPair rangeValue)
, "execution" .= rangeExecution
, "_cache" .= cache]]
toJSON (RegexpFilter (FieldName fieldName)
(Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) =
object ["regexp" .=
object [fieldName .=
object ["value" .= regexText
, "flags" .= flags]
, "_name" .= cacheName
, "_cache" .= cache
, "_cache_key" .= cacheKey]]
instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) =
object [ geoPointField .= geoPointLatLon ]
instance ToJSON Query where
toJSON (TermQuery (Term termQueryField termQueryValue) boost) =
object [ "term" .=
object [termQueryField .= object merged]]
where
base = [ "value" .= termQueryValue ]
boosted = maybe [] (return . ("boost" .=)) boost
merged = mappend base boosted
toJSON (TermsQuery terms) =
object [ "terms" .= object conjoined ]
where conjoined = [ getTermsField terms .=
fmap (toJSON . getTermValue) (toList terms)]
getTermsField ((Term f _ ) :| _) = f
getTermValue (Term _ v) = v
toJSON (IdsQuery idsQueryMappingName docIds) =
object [ "ids" .= object conjoined ]
where conjoined = [ "type" .= idsQueryMappingName
, "values" .= fmap toJSON docIds ]
toJSON (QueryQueryStringQuery qQueryStringQuery) =
object [ "query_string" .= qQueryStringQuery ]
toJSON (QueryMatchQuery matchQuery) =
object [ "match" .= matchQuery ]
toJSON (QueryMultiMatchQuery multiMatchQuery) =
toJSON multiMatchQuery
toJSON (QueryBoolQuery boolQuery) =
object [ "bool" .= boolQuery ]
toJSON (QueryBoostingQuery boostingQuery) =
object [ "boosting" .= boostingQuery ]
toJSON (QueryCommonTermsQuery commonTermsQuery) =
object [ "common" .= commonTermsQuery ]
toJSON (ConstantScoreFilter csFilter boost) =
object [ "constant_score" .= csFilter
, "boost" .= boost]
toJSON (ConstantScoreQuery query boost) =
object [ "constant_score" .= query
, "boost" .= boost]
toJSON (QueryDisMaxQuery disMaxQuery) =
object [ "dis_max" .= disMaxQuery ]
toJSON (QueryFilteredQuery qFilteredQuery) =
object [ "filtered" .= qFilteredQuery ]
toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) =
object [ "fuzzy_like_this" .= fuzzyQuery ]
toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) =
object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ]
toJSON (QueryFuzzyQuery fuzzyQuery) =
object [ "fuzzy" .= fuzzyQuery ]
toJSON (QueryHasChildQuery childQuery) =
object [ "has_child" .= childQuery ]
toJSON (QueryHasParentQuery parentQuery) =
object [ "has_parent" .= parentQuery ]
toJSON (QueryIndicesQuery qIndicesQuery) =
object [ "indices" .= qIndicesQuery ]
toJSON (MatchAllQuery boost) =
object [ "match_all" .= omitNulls [ "boost" .= boost ] ]
toJSON (QueryMoreLikeThisQuery query) =
object [ "more_like_this" .= query ]
toJSON (QueryMoreLikeThisFieldQuery query) =
object [ "more_like_this_field" .= query ]
toJSON (QueryNestedQuery query) =
object [ "nested" .= query ]
toJSON (QueryPrefixQuery query) =
object [ "prefix" .= query ]
toJSON (QueryRangeQuery query) =
object [ "range" .= query ]
toJSON (QueryRegexpQuery query) =
object [ "regexp" .= query ]
toJSON (QuerySimpleQueryStringQuery query) =
object [ "simple_query_string" .= query ]
omitNulls :: [(Text, Value)] -> Value
omitNulls = object . filter notNull where
notNull (_, Null) = False
notNull (_, Array a) = (not . V.null) a
notNull _ = True
instance ToJSON SimpleQueryStringQuery where
toJSON SimpleQueryStringQuery {..} =
omitNulls (base ++ maybeAdd)
where base = [ "query" .= simpleQueryStringQuery ]
maybeAdd = [ "fields" .= simpleQueryStringField
, "default_operator" .= simpleQueryStringOperator
, "analyzer" .= simpleQueryStringAnalyzer
, "flags" .= simpleQueryStringFlags
, "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded
, "locale" .= simpleQueryStringLocale ]
instance ToJSON FieldOrFields where
toJSON (FofField fieldName) =
toJSON fieldName
toJSON (FofFields fieldNames) =
toJSON fieldNames
instance ToJSON SimpleQueryFlag where
toJSON SimpleQueryAll = "ALL"
toJSON SimpleQueryNone = "NONE"
toJSON SimpleQueryAnd = "AND"
toJSON SimpleQueryOr = "OR"
toJSON SimpleQueryPrefix = "PREFIX"
toJSON SimpleQueryPhrase = "PHRASE"
toJSON SimpleQueryPrecedence = "PRECEDENCE"
toJSON SimpleQueryEscape = "ESCAPE"
toJSON SimpleQueryWhitespace = "WHITESPACE"
toJSON SimpleQueryFuzzy = "FUZZY"
toJSON SimpleQueryNear = "NEAR"
toJSON SimpleQuerySlop = "SLOP"
instance ToJSON RegexpQuery where
toJSON (RegexpQuery (FieldName rqQueryField)
(Regexp regexpQueryQuery) rqQueryFlags
rqQueryBoost) =
object [ rqQueryField .= omitNulls base ]
where base = [ "value" .= regexpQueryQuery
, "flags" .= rqQueryFlags
, "boost" .= rqQueryBoost ]
instance ToJSON QueryStringQuery where
toJSON (QueryStringQuery qsQueryString
qsDefaultField qsOperator
qsAnalyzer qsAllowWildcard
qsLowercaseExpanded qsEnablePositionIncrements
qsFuzzyMaxExpansions qsFuzziness
qsFuzzyPrefixLength qsPhraseSlop
qsBoost qsAnalyzeWildcard
qsGeneratePhraseQueries qsMinimumShouldMatch
qsLenient qsLocale) =
omitNulls base
where
base = [ "query" .= qsQueryString
, "default_field" .= qsDefaultField
, "default_operator" .= qsOperator
, "analyzer" .= qsAnalyzer
, "allow_leading_wildcard" .= qsAllowWildcard
, "lowercase_expanded_terms" .= qsLowercaseExpanded
, "enable_position_increments" .= qsEnablePositionIncrements
, "fuzzy_max_expansions" .= qsFuzzyMaxExpansions
, "fuzziness" .= qsFuzziness
, "fuzzy_prefix_length" .= qsFuzzyPrefixLength
, "phrase_slop" .= qsPhraseSlop
, "boost" .= qsBoost
, "analyze_wildcard" .= qsAnalyzeWildcard
, "auto_generate_phrase_queries" .= qsGeneratePhraseQueries
, "minimum_should_match" .= qsMinimumShouldMatch
, "lenient" .= qsLenient
, "locale" .= qsLocale ]
instance ToJSON RangeQuery where
toJSON (RangeQuery (FieldName fieldName) range boost) =
object [ fieldName .= conjoined ]
where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range)
instance ToJSON PrefixQuery where
toJSON (PrefixQuery (FieldName fieldName) queryValue boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryValue
, "boost" .= boost ]
instance ToJSON NestedQuery where
toJSON (NestedQuery nqPath nqScoreType nqQuery) =
object [ "path" .= nqPath
, "score_mode" .= nqScoreType
, "query" .= nqQuery ]
instance ToJSON MoreLikeThisFieldQuery where
toJSON (MoreLikeThisFieldQuery text (FieldName fieldName)
percent mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
object [ fieldName .= omitNulls base ]
where base = [ "like_text" .= text
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON MoreLikeThisQuery where
toJSON (MoreLikeThisQuery text fields percent
mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
omitNulls base
where base = [ "like_text" .= text
, "fields" .= fields
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON IndicesQuery where
toJSON (IndicesQuery indices query noMatch) =
omitNulls [ "indices" .= indices
, "no_match_query" .= noMatch
, "query" .= query ]
instance ToJSON HasParentQuery where
toJSON (HasParentQuery queryType query scoreType) =
omitNulls [ "parent_type" .= queryType
, "score_type" .= scoreType
, "query" .= query ]
instance ToJSON HasChildQuery where
toJSON (HasChildQuery queryType query scoreType) =
omitNulls [ "query" .= query
, "score_type" .= scoreType
, "type" .= queryType ]
instance ToJSON FuzzyQuery where
toJSON (FuzzyQuery (FieldName fieldName) queryText
prefixLength maxEx fuzziness boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryText
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "boost" .= boost
, "max_expansions" .= maxEx ]
instance ToJSON FuzzyLikeFieldQuery where
toJSON (FuzzyLikeFieldQuery (FieldName fieldName)
fieldText maxTerms ignoreFreq fuzziness prefixLength
boost analyzer) =
object [ fieldName .=
omitNulls [ "like_text" .= fieldText
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]]
instance ToJSON FuzzyLikeThisQuery where
toJSON (FuzzyLikeThisQuery fields text maxTerms
ignoreFreq fuzziness prefixLength boost analyzer) =
omitNulls base
where base = [ "fields" .= fields
, "like_text" .= text
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]
instance ToJSON FilteredQuery where
toJSON (FilteredQuery query fFilter) =
object [ "query" .= query
, "filter" .= fFilter ]
instance ToJSON DisMaxQuery where
toJSON (DisMaxQuery queries tiebreaker boost) =
omitNulls base
where base = [ "queries" .= queries
, "boost" .= boost
, "tie_breaker" .= tiebreaker ]
instance ToJSON CommonTermsQuery where
toJSON (CommonTermsQuery (FieldName fieldName)
(QueryString query) cf lfo hfo msm
boost analyzer disableCoord) =
object [fieldName .= omitNulls base ]
where base = [ "query" .= query
, "cutoff_frequency" .= cf
, "low_freq_operator" .= lfo
, "minimum_should_match" .= msm
, "boost" .= boost
, "analyzer" .= analyzer
, "disable_coord" .= disableCoord
, "high_freq_operator" .= hfo ]
instance ToJSON CommonMinimumMatch where
toJSON (CommonMinimumMatch mm) = toJSON mm
toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) =
object [ "low_freq" .= lowF
, "high_freq" .= highF ]
instance ToJSON BoostingQuery where
toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) =
object [ "positive" .= bqPositiveQuery
, "negative" .= bqNegativeQuery
, "negative_boost" .= bqNegativeBoost ]
instance ToJSON BoolQuery where
toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) =
omitNulls base
where base = [ "must" .= mustM
, "must_not" .= notM
, "should" .= shouldM
, "minimum_should_match" .= bqMin
, "boost" .= boost
, "disable_coord" .= disableCoord ]
instance ToJSON MatchQuery where
toJSON (MatchQuery (FieldName fieldName)
(QueryString mqQueryString) booleanOperator
zeroTermsQuery cutoffFrequency matchQueryType
analyzer maxExpansions lenient) =
object [ fieldName .= omitNulls base ]
where base = [ "query" .= mqQueryString
, "operator" .= booleanOperator
, "zero_terms_query" .= zeroTermsQuery
, "cutoff_frequency" .= cutoffFrequency
, "type" .= matchQueryType
, "analyzer" .= analyzer
, "max_expansions" .= maxExpansions
, "lenient" .= lenient ]
instance ToJSON MultiMatchQuery where
toJSON (MultiMatchQuery fields (QueryString query) boolOp
ztQ tb mmqt cf analyzer maxEx lenient) =
object ["multi_match" .= omitNulls base]
where base = [ "fields" .= fmap toJSON fields
, "query" .= query
, "operator" .= boolOp
, "zero_terms_query" .= ztQ
, "tiebreaker" .= tb
, "type" .= mmqt
, "cutoff_frequency" .= cf
, "analyzer" .= analyzer
, "max_expansions" .= maxEx
, "lenient" .= lenient ]
instance ToJSON MultiMatchQueryType where
toJSON MultiMatchBestFields = "best_fields"
toJSON MultiMatchMostFields = "most_fields"
toJSON MultiMatchCrossFields = "cross_fields"
toJSON MultiMatchPhrase = "phrase"
toJSON MultiMatchPhrasePrefix = "phrase_prefix"
instance ToJSON BooleanOperator where
toJSON And = String "and"
toJSON Or = String "or"
instance ToJSON ZeroTermsQuery where
toJSON ZeroTermsNone = String "none"
toJSON ZeroTermsAll = String "all"
instance ToJSON MatchQueryType where
toJSON MatchPhrase = "phrase"
toJSON MatchPhrasePrefix = "phrase_prefix"
instance ToJSON FieldName where
toJSON (FieldName fieldName) = String fieldName
instance ToJSON ReplicaCount
instance ToJSON ShardCount
instance ToJSON CutoffFrequency
instance ToJSON Analyzer
instance ToJSON MaxExpansions
instance ToJSON Lenient
instance ToJSON Boost
instance ToJSON Version
instance ToJSON Tiebreaker
instance ToJSON MinimumMatch
instance ToJSON DisableCoord
instance ToJSON PrefixLength
instance ToJSON Fuzziness
instance ToJSON IgnoreTermFrequency
instance ToJSON MaxQueryTerms
instance ToJSON TypeName
instance ToJSON IndexName
instance ToJSON BoostTerms
instance ToJSON MaxWordLength
instance ToJSON MinWordLength
instance ToJSON MaxDocFrequency
instance ToJSON MinDocFrequency
instance ToJSON PhraseSlop
instance ToJSON StopWord
instance ToJSON QueryPath
instance ToJSON MinimumTermFrequency
instance ToJSON PercentMatch
instance ToJSON MappingName
instance ToJSON DocId
instance ToJSON QueryString
instance ToJSON AllowLeadingWildcard
instance ToJSON LowercaseExpanded
instance ToJSON AnalyzeWildcard
instance ToJSON GeneratePhraseQueries
instance ToJSON Locale
instance ToJSON EnablePositionIncrements
instance FromJSON Version
instance FromJSON IndexName
instance FromJSON MappingName
instance FromJSON DocId
instance FromJSON Status where
parseJSON (Object v) = Status <$>
v .:? "ok" <*>
v .: "status" <*>
v .: "name" <*>
v .: "version" <*>
v .: "tagline"
parseJSON _ = empty
instance ToJSON IndexSettings where
toJSON (IndexSettings s r) = object ["settings" .= object ["shards" .= s, "replicas" .= r]]
instance (FromJSON a) => FromJSON (EsResult a) where
parseJSON (Object v) = EsResult <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_version" <*>
v .:? "found" <*>
v .: "_source"
parseJSON _ = empty
instance ToJSON Search where
toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize) =
omitNulls [ "query" .= query
, "filter" .= sFilter
, "sort" .= sort
, "aggregations" .= searchAggs
, "highlight" .= highlight
, "from" .= sFrom
, "size" .= sSize
, "track_scores" .= sTrackSortScores]
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs (Just
(FastVectorHighlight fvCom fvNonPostSettings fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder
chNoMatchSize chHighlightQuery
chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags]
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"]
highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre
, "post_tags" .= post]
highlightTagToPairs Nothing = []
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "ignore_unmapped" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
instance ToJSON ScoreType where
toJSON ScoreTypeMax = "max"
toJSON ScoreTypeAvg = "avg"
toJSON ScoreTypeSum = "sum"
toJSON ScoreTypeNone = "none"
instance ToJSON Distance where
toJSON (Distance dCoefficient dUnit) =
String boltedTogether where
coefText = showText dCoefficient
(String unitText) = toJSON dUnit
boltedTogether = mappend coefText unitText
instance ToJSON DistanceUnit where
toJSON Miles = String "mi"
toJSON Yards = String "yd"
toJSON Feet = String "ft"
toJSON Inches = String "in"
toJSON Kilometers = String "km"
toJSON Meters = String "m"
toJSON Centimeters = String "cm"
toJSON Millimeters = String "mm"
toJSON NauticalMiles = String "nmi"
instance ToJSON DistanceType where
toJSON Arc = String "arc"
toJSON SloppyArc = String "sloppy_arc"
toJSON Plane = String "plane"
instance ToJSON OptimizeBbox where
toJSON NoOptimizeBbox = String "none"
toJSON (OptimizeGeoFilterType gft) = toJSON gft
instance ToJSON GeoBoundingBoxConstraint where
toJSON (GeoBoundingBoxConstraint
(FieldName gbbcGeoBBField) gbbcConstraintBox cache type') =
object [gbbcGeoBBField .= gbbcConstraintBox
, "_cache" .= cache
, "type" .= type']
instance ToJSON GeoFilterType where
toJSON GeoFilterMemory = String "memory"
toJSON GeoFilterIndexed = String "indexed"
instance ToJSON GeoBoundingBox where
toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) =
object ["top_left" .= gbbTopLeft
, "bottom_right" .= gbbBottomRight]
instance ToJSON LatLon where
toJSON (LatLon lLat lLon) =
object ["lat" .= lLat
, "lon" .= lLon]
-- index for smaller ranges, fielddata for longer ranges
instance ToJSON RangeExecution where
toJSON RangeExecutionIndex = "index"
toJSON RangeExecutionFielddata = "fielddata"
instance ToJSON RegexpFlags where
toJSON AllRegexpFlags = String "ALL"
toJSON NoRegexpFlags = String "NONE"
toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs
where flagStrs = map flagStr . nub $ h:fs
flagStr AnyString = "ANYSTRING"
flagStr Automaton = "AUTOMATON"
flagStr Complement = "COMPLEMENT"
flagStr Empty = "EMPTY"
flagStr Intersection = "INTERSECTION"
flagStr Interval = "INTERVAL"
instance ToJSON Term where
toJSON (Term field value) = object ["term" .= object
[field .= value]]
instance ToJSON BoolMatch where
toJSON (MustMatch term cache) = object ["must" .= term,
"_cache" .= cache]
toJSON (MustNotMatch term cache) = object ["must_not" .= term,
"_cache" .= cache]
toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms,
"_cache" .= cache]
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
v .: "_shards" <*>
v .: "hits" <*>
v .:? "aggregations"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .: "_source" <*>
v .:? "highlight"
parseJSON _ = empty
instance FromJSON ShardResult where
parseJSON (Object v) = ShardResult <$>
v .: "total" <*>
v .: "successful" <*>
v .: "failed"
parseJSON _ = empty
|
bnordbo/bloodhound
|
src/Database/Bloodhound/Types.hs
|
bsd-3-clause
| 80,749 | 0 | 17 | 25,115 | 16,585 | 9,152 | 7,433 | 1,651 | 16 |
{-# LANGUAGE UnicodeSyntax #-}
module AXT.TicTacToe.Graphics
(
display,
idle
) where
import Data.Ratio(Ratio, (%))
import Control.Monad(forM_)
import AXT.TicTacToe.Types as AT (Coor3DRI(..), CoorOnField, GameField, GameType(..), Field(F), GameLevel(..), RangeCoor(..), RatI, SCoor(..), State(..), StepResult(..))
import GL.Types as GLT (V2FL, V3FL, Coor2(..), Coor3(..), Coor3D(..), fromDegrees)
import Data.IORef as DI(IORef)
import Graphics.UI.GLUT as GUG(ClearBuffer(ColorBuffer, DepthBuffer), Color3, DisplayCallback, GLfloat, IdleCallback, PrimitiveMode(Lines), StrokeFont( Roman ), Vector3,
clear, color, get, postRedisplay, preservingMatrix, renderPrimitive, renderString, swapBuffers, translate, vertex)
import Graphics.Rendering.OpenGL.GL.VertexSpec as GROGL( Color3(..) )
import Graphics.Rendering.OpenGL.GL.Tensor as GROGL ( Vector3(..), Vertex3(..) )
import GL.Types(V2FL, V3FL, Coor2(..), Coor3(..), Coor3D(..), fromDegrees)
import AXT.TicTacToe.Conversions (c3D, cv, cher, re, toGLFloat)
xₘᵢₙ = negate xₘₐₓ
xₘₐₓ = 300 ∷ RatI
yₘᵢₙ = negate yₘₐₓ
yₘₐₓ = xₘₐₓ
nₓ = 3
ny = nₓ
stₓ = (xₘₐₓ - xₘᵢₙ) /nₓ
sty = (yₘₐₓ - yₘᵢₙ) /ny
offsetₓ = 50∷RatI
offsety = 80∷RatI
step = 100 -- для текста 0.6
idle ∷ IORef (StepResult, GameField) → IdleCallback
idle reWF = do
-- d <- get delta
-- angle $~! (+ d)
-- (F fi) ← get fs
-- ds
postRedisplay Nothing
showState ∷ GameField → IO ()
showState (F fs) = do
let
dd = zip [0..] $ map (zip [0..]) fs
forM_ dd $ \(i,ss) → do
-- putStrLn $ show i
-- raw X (C3D 0 0 (0∷RatI))
-- line3D (C3D (stₓ/2) (-50) 0) (C3D 40 (stₓ/2) 0)
forM_ ss $ \(j,ts) → do
let sd = cv ts
case sd of
X → showR X $ c3D j i
O → showR O $ c3D j i
otherwise → return ()
display ∷ IORef GLfloat → IORef (StepResult, GameField) → DisplayCallback
display ang gs = do
clear [ColorBuffer, DepthBuffer]
a ← get ang
preservingMatrix $ do
-- scale 0.5 0.5 (0.5∷GLfloat)
color $ Color3 (0.2 ∷ GLfloat) 0.8 0.8
-- rotate a $ Vector3 1 0 0
(w, F fi) ← get gs
renderPrimitive Lines $ do
field3D
-- rotate a $ Vector3 0 0 1
-- translate $ Vector3 (-200∷GLfloat) 0 0
-- rasterPos (Vertex2 0 0)
-- renderString Roman "Xк☺-✔+×"
-- scale 1.0 1.0 (1.0∷GLfloat)
showState (F fi)
swapBuffers
{-
showA∷Field String → IO ()
showA (F fi) = forM_ [0..2] $ \x → do
forM_ [0..2] fi
-}
field3D ∷ IO ()
field3D = forM_ [negate stₓ/2,stₓ/2] $ \x → do
line3D (C3D x yₘᵢₙ 0) (C3D x yₘₐₓ 0)
line3D (C3D xₘᵢₙ x 0) (C3D xₘₐₓ x 0)
xToRawX, yToRawY ∷ RatI → GLfloat
xToRawX x = toGLFloat $ xₘᵢₙ + x
yToRawY y = toGLFloat $ yₘₐₓ - y
showS ∷ State → SCoor → IO ()
showS s (C3D x y z) = rawT s (C3D (xₘᵢₙ + (150 * x % 1)) (yₘₐₓ - (150 * y % 1)) 0)
showR ∷ State → SCoor → IO ()
showR s (C3D x y z) = raw s (C3D (xₘᵢₙ + (stₓ* (x % 1))) (yₘₐₓ - (sty * (y % 1))) 0)
-- | Нарисовать знак
raw ∷ State → Coor3DRI → IO ()
raw s (C3D x y z) =
case s of
X → do
line3D (C3D x₁ y₁ z) (C3D x₂ y₂ z)
line3D (C3D x₁ y₂ z) (C3D x₂ y₁ z)
-- line3D (C3D x₂ y₁ z) (C3D x₂ y₂ z)
-- line3D (C3D x₁ y₁ z) (C3D x₁ y₂ z)
O → do
line3D (C3D x₁ y₁ z) (C3D x₂ y₁ z)
line3D (C3D x₁ y₁ z) (C3D x₁ y₂ z)
line3D (C3D x₁ y₂ z) (C3D x₂ y₂ z)
line3D (C3D x₂ y₁ z) (C3D x₂ y₂ z)
where
x₁ = x + offsetₓ
x₂ = x₁ + step
y₁ = y - offsety
y₂ = y₁ - step
rawT ∷ State → Coor3DRI → IO ()
rawT s (C3D x y z) =
do
translate $ Vector3 (toGLFloat x₁) (toGLFloat y₁) 0
renderString Roman $ re s
where
x₁ = x + offsetₓ
y₁ = y - offsety
line3D ∷ Coor3DRI → Coor3DRI → IO ()
line3D beg end = do
vertex (Vertex3 (toGLFloat (x beg)) (toGLFloat (y beg)) (toGLFloat (z beg)))
vertex (Vertex3 (toGLFloat (x end)) (toGLFloat (y end)) (toGLFloat (z end)))
|
xruzzz/axt-tic-tac-toe-gl-haskell
|
src/AXT/TicTacToe/Graphics.hs
|
bsd-3-clause
| 4,405 | 130 | 19 | 1,160 | 1,732 | 936 | 796 | 89 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
module RetrySpec where
-------------------------------------------------------------------------------
import Control.Applicative
import Control.Exception (MaskingState (..), getMaskingState)
import Control.Monad.Catch
import Data.Default.Class (def)
import Data.IORef
import Data.Monoid
import Data.Time.Clock
import Data.Time.LocalTime ()
import Data.Typeable
import System.IO.Error
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.HUnit (Test (TestCase), (@?=))
import Test.QuickCheck
import Test.QuickCheck.Function
import Test.QuickCheck.Monadic as QCM
-------------------------------------------------------------------------------
import Control.Retry
-------------------------------------------------------------------------------
isLeftAnd :: (a -> Bool) -> Either a b -> Bool
isLeftAnd f ei = case ei of
Left v -> f v
_ -> False
testHandlers :: [Int -> Handler IO Bool]
testHandlers = [const $ Handler (\(_::SomeException) -> return True)]
data Custom1 = Custom1 deriving (Eq,Show,Read,Ord,Typeable)
data Custom2 = Custom2 deriving (Eq,Show,Read,Ord,Typeable)
instance Exception Custom1
instance Exception Custom2
-- | Create an action that will fail exactly N times with the given
-- exception and will then return () in any subsequent calls.
mkFailN :: (Num a, Ord a, Exception e) => e -> a -> IO (IO ())
mkFailN e n = do
r <- newIORef 0
return $ do
old <- atomicModifyIORef' r $ \ old -> (old+1, old)
case old >= n of
True -> return ()
False -> throwM e
{-# ANN spec ("HLint: ignore Redundant do"::String) #-}
spec :: Spec
spec = parallel $ describe "retry" $ do
it "recovering test without quadratic retry delay"
. property . monadicIO $ do
startTime <- run getCurrentTime
timeout <- pick . choose $ (0,15)
retries <- getSmall . getPositive <$> pick arbitrary
res <- run . try $ recovering
(constantDelay timeout <> limitRetries retries)
testHandlers
(throwM (userError "booo"))
endTime <- run getCurrentTime
QCM.assert (isLeftAnd isUserError res)
let ms' = (fromInteger . toInteger $ (timeout * retries)) / 1000000.0
QCM.assert (diffUTCTime endTime startTime >= ms')
describe "recovering - exception hierarcy semantics" $ do
it "recovers from custom exceptions" $ do
f <- mkFailN Custom1 2
res <- try $ recovering
(constantDelay 5000 <> limitRetries 3)
[const $ Handler $ \ Custom1 -> return True]
f
(res :: Either Custom1 ()) `shouldBe` Right ()
it "fails beyond policy using custom exceptions" $ do
f <- mkFailN Custom1 3
res <- try $ recovering
(constantDelay 5000 <> limitRetries 2)
[const $ Handler $ \ Custom1 -> return True]
f
(res :: Either Custom1 ()) `shouldBe` Left Custom1
it "does not recover from unhandled exceptions" $ do
f <- mkFailN Custom2 2
res <- try $ recovering
(constantDelay 5000 <> limitRetries 5)
[const $ Handler $ \ Custom1 -> return True]
f
(res :: Either Custom2 ()) `shouldBe` Left Custom2
it "recovers in presence of multiple handlers" $ do
f <- mkFailN Custom2 2
res <- try $ recovering
(constantDelay 5000 <> limitRetries 5)
[ const $ Handler $ \ Custom1 -> return True
, const $ Handler $ \ Custom2 -> return True ]
f
(res :: Either Custom2 ()) `shouldBe` Right ()
it "general exceptions catch specific ones" $ do
f <- mkFailN Custom2 2
res <- try $ recovering
(constantDelay 5000 <> limitRetries 5)
[ const $ Handler $ \ (_::SomeException) -> return True ]
f
(res :: Either Custom2 ()) `shouldBe` Right ()
it "(redundant) even general catchers don't go beyond policy" $ do
f <- mkFailN Custom2 3
res <- try $ recovering
(constantDelay 5000 <> limitRetries 2)
[ const $ Handler $ \ (_::SomeException) -> return True ]
f
(res :: Either Custom2 ()) `shouldBe` Left Custom2
it "works as expected in presence of failed exception casts" $ do
f <- mkFailN Custom2 3
flip shouldThrow anyException $ do
res <- try $ recovering
(constantDelay 5000 <> limitRetries 2)
[ const $ Handler $ \ (_::SomeException) -> return True ]
f
(res :: Either Custom1 ()) `shouldBe` Left Custom1
describe "Policy is a monoid" $ do
let toPolicy = RetryPolicy . apply
let prop left right =
property $ \a x ->
let applyPolicy f = getRetryPolicy (f $ toPolicy a) x
l = applyPolicy left
r = applyPolicy right
validRes = maybe True (>= 0)
in (validRes r && validRes l) ==> l == r
let prop3 left right =
property $ \a b c x ->
let applyPolicy f = getRetryPolicy (f (toPolicy a) (toPolicy b) (toPolicy c)) x
in applyPolicy left == applyPolicy right
it "left identity" $
prop (\p -> mempty <> p) id
it "right identity" $
prop (\p -> p <> mempty) id
it "associativity" $
prop3 (\x y z -> x <> (y <> z)) (\x y z -> (x <> y) <> z)
describe "masking state" $ do
it "shouldn't change masking state in a recovered action" $ do
maskingState <- getMaskingState
shouldThrow
(recovering def testHandlers $ do
maskingState' <- getMaskingState
maskingState' @?= maskingState
fail "Retrying...")
anyIOException
it "should mask asynchronous exceptions in exception handlers" $ do
let checkMaskingStateHandlers =
[ const $ Handler $ \(_ :: SomeException) -> do
maskingState <- getMaskingState
maskingState @?= MaskedInterruptible
return True
]
shouldThrow
(recovering def checkMaskingStateHandlers $ fail "Retrying...")
anyIOException
|
bergmark/retry
|
test/RetrySpec.hs
|
bsd-3-clause
| 6,242 | 9 | 21 | 1,844 | 1,863 | 934 | 929 | 143 | 2 |
{-# LANGUAGE ImplicitParams #-}
import Graphics.FreeGame
import Data.Vect
import Lib.Action
import Lib.Field
import Lib.Settings
test :: (?player :: Picture, ?block :: Picture) => Game ()
test = do
drawPicture $ Translate startPos ?player
drawField
tick
test
main :: IO (Maybe ())
main = runGame gameParam $ do
player <- loadPictureFromFile "images/player.png"
block <- loadPictureFromFile "images/block.png"
let ?player = player
?block = block
run $ action startPos (Vec2 0 0) Air
where
run game = do
game' <- untickGame game
tick
run game'
|
amutake/free-game-ex
|
main.hs
|
bsd-3-clause
| 615 | 0 | 11 | 155 | 191 | 93 | 98 | 23 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
-- | All of the datatypes and class definitions which are required to
-- control the process, represent the state, and report on the status, of
-- crawling a domain. The common feature of everything included in this module
-- is that it supports this core functionality.
module Network.SiteCheck.Data
( URLish(..)
-- * Links
, Link(..), StatusCode(..)
, isRedirect, wasRedirected, isStatusOk, newLinkWithPrev, distinctLinks
-- * Scripts
, Script(..), Decision(..), Option(..)
, getResultFile, isOptionSet, getLimit
-- * Configuration
, Curls(..), Config
, filterResults, isOverLimit
-- * Visited Links
, VisitStore(..), Visited
, emptyVisited
-- * Crawl State
, State, Mapping, Mappings(..), Action(..)
, initState, isNextVisited
-- * Stack functions
, getStack, stackAsStrings, stackAsURLs
, stackLength, isStackEmpty
, initStack, pushStack, popStack, deleteStack, tailStack, mergeWithStack
-- * Response
, Response(..), header
) where
import Network.Curl
import Data.List (nubBy)
import qualified Data.Map as M
import Network.SiteCheck.URL
import Network.SiteCheck.Util
-- | Some functions only need to know that a datatype is URLish.
class URLish a where
toURL :: a -> URL
toLink :: a -> Link
instance URLish URL where
toURL u = u
toLink u = urlToLink u
instance URLish String where
toURL s = case importURL s of
Just x -> x
Nothing -> emptyURL
toLink s = toLink $ toURL s
-- | The HTTP status code for a 'Link'.
data StatusCode = NoCode
| Code Int
deriving (Show, Eq)
-- | A Link holds the URL being tracked, the parent URL where this URL
-- was found, the final StatusCode and a list of the previous URLs which
-- redirected to this page.
--
-- When a URL returns a 301 or 302 status code and a location header, a new
-- Link is created with the location as the URL and the old URL added to
-- the previous list.
data Link = Link {
parent :: URL -- ^ the URL where this Link was found
, previous :: [URL] -- ^ a list of URLs which redirected to this one
, status :: StatusCode -- ^ the final status code for this URL
, theURL :: URL -- ^ the URL being tracked
} deriving (Show, Eq)
-- | A Link is 'URLish'.
instance URLish Link where
toURL l = (theURL l)
toLink l = l
-- | Create a Link from a URL.
urlToLink :: URL -> Link
urlToLink url = (Link emptyURL [] NoCode url)
-- | Create a new Link from something URLish adding the 'URL' from the old
-- Link to the list of previous URLs.
newLinkWithPrev :: URLish a => Link -> a -> Link
newLinkWithPrev old new =
(old { previous = (toURL old) : (previous old), theURL = (toURL new)})
-- | Is the 'StatusCode' in a Link 301 or 302?
isRedirect :: Link -> Bool
isRedirect (Link _ _ (Code 301) _) = True
isRedirect (Link _ _ (Code 302) _) = True
isRedirect (Link _ _ _ _) = False
-- | Was this Link the result of a redirect?
wasRedirected :: Link -> Bool
wasRedirected (Link _ (_:_) _ _) = True
wasRedirected (Link _ _ _ _) = False
-- | Is the 'StatusCode' for this Link 200?
isStatusOk :: Link -> Bool
isStatusOk (Link _ _ (Code 200) _) = True
isStatusOk (Link _ _ _ _) = False
-- | Returns a list of distinct Links. Two Links are considered the same if
-- they have the same textual representation.
distinctLinks :: [Link] -> [Link]
distinctLinks =
nubBy (\a b -> (u a) == (u b))
where u = (exportURL . toURL)
-- | As SiteCheck runs it can be configured to print status information using
-- the following options. When a crawl is complete any non-200 status codes
-- will be reported. If you set AllResults then all results will be shown, if
-- you set RedirectResults then all redirects are shown in addition to
-- failures.
data Option = Limit Int -- ^ the maximum number of URLs to crawl
| ResultFile String -- ^ a file name where results are printed
| PrintStatus -- ^ print each URL as it is retrieved
| PrintStack -- ^ print the complete 'Stack'
| PrintTopStack -- ^ print the top five Stack entries
| PrintActions -- ^ print the actions chosen for each URL
| PrintPosts -- ^ print each post action
| PrintParent -- ^ print the parent of each URL
| AllResults -- ^ show all results when finished
| RedirectResults -- ^ show errors and redirects
deriving (Show, Eq)
-- | Get the 'Limit' value from a list of Options.
getLimit :: [Option] -> Maybe Int
getLimit (x:xs) =
case x of
(Limit l) -> Just l
_ -> getLimit xs
getLimit [] = Nothing
-- | Get the 'ResultFile' value from a list of Options.
getResultFile :: [Option] -> Maybe String
getResultFile (x:xs) =
case x of
ResultFile name -> Just name
_ -> getResultFile xs
getResultFile [] = Nothing
type Mapping a = (a -> Maybe a)
-- | Mappings control how a string, which comes from an anchor href attribute
-- becomes a 'URL' which will be reported. Each mapping function returns a
-- Maybe value. Because a mapping may return Nothing, it can be used for
-- filtering as well as transforming URLs.
--
-- During the link filtering process raw strings are first passed through
-- the stringToString mappings then the urlToUrl mappings and finally the
-- urlToListOfURL mappings.
data Mappings = Mappings {
stringToString :: [Mapping String]
, urlToURL :: [Mapping URL]
, urlToListOfURL :: [(URL -> [Maybe URL])]
}
data Script = Script { dn :: String
, options :: [Option]
, intOpts :: [CurlOption]
, extOpts :: [CurlOption]
, seed :: [String]
, decisions :: [Decision]
, mappings :: Mappings
}
data Decision = Decision { isMatch :: String -> Bool
, actions :: [Action]
}
data Action = Ignore
| PushLinks
| Post String [(String, String)]
| Get String [(String, String)]
deriving (Show)
data Curls = Curls { intra :: Curl, extra :: Curl }
type Config = (Curls, Script)
isOptionSet :: Option -> Config -> Bool
isOptionSet opt (_, script) =
opt `elem` (options script)
initStack :: Config -> Stack
initStack (_, script) = map urlToLink .
fromMaybe .
map importURL .
seed $ script
isOverLimit :: Config -> State -> Bool
isOverLimit (_, script) state =
case getLimit $ options script of
Just x -> x <= visitedLength state
Nothing -> False
filterResults :: Config -> [Link] -> [Link]
filterResults config links
| (isOptionSet AllResults config) = links
| (isOptionSet RedirectResults config) = filter redirectedOrNon200 links
| otherwise = filter (not . isStatusOk) links
where redirectedOrNon200 x = wasRedirected x || (not . isStatusOk) x
class VisitStore a where
addVisited :: (URLish b) => b -> StatusCode -> a -> a
deleteVisited :: (URLish b) => b -> a -> a
visitedLength :: a -> Int
exportVisited :: a -> [Link]
newtype Visited = V (M.Map String Link) deriving (Show, Eq)
instance VisitStore Visited where
addVisited u code (V visited) =
let k = (exportURL (toURL u))
v = ((toLink u) {status = code})
in
(V (M.insert k v visited))
deleteVisited u (V visited) =
let k = (exportURL (toURL u)) in
(V (M.delete k visited))
visitedLength (V v) = M.size v
exportVisited (V v) = M.elems v
emptyVisited :: Visited
emptyVisited = V M.empty
isVisited :: (URLish a) => Visited -> a -> Bool
isVisited (V visited) u = M.member (exportURL (toURL u)) visited
notVisited :: (URLish a) => Visited -> a -> Bool
notVisited visited u = (not (isVisited visited u))
distinct :: Visited -> [Link] -> [URL] -> [URL]
distinct visited queue new =
filter (`notElem` queueStrings) $
filter (notVisited visited) new
where queueStrings = map theURL queue
type Stack = [Link]
data State = State Visited Stack
deriving (Show, Eq)
instance VisitStore State where
addVisited l code (State visited stack) =
(State (addVisited l code visited) stack)
deleteVisited l (State v s) = (State (deleteVisited l v) s)
visitedLength (State v _) = visitedLength v
exportVisited (State v _) = exportVisited v
initState :: Visited -> Stack -> State
initState v s = State v s
isNextVisited :: State -> Bool
isNextVisited (State v (x:_)) = isVisited v x
isNextVisited (State _ []) = False
tailStack :: State -> State
tailStack (State v (_:xs)) = State v xs
tailStack state@(State _ []) = state
deleteStack :: State -> State
deleteStack (State v _) = State v []
isStackEmpty :: State -> Bool
isStackEmpty (State _ (_:_)) = False
isStackEmpty _ = True
popStack :: State -> (Maybe Link, State)
popStack (State v (x:xs)) = (Just x, (State v xs))
popStack s@(State _ []) = (Nothing, s)
pushStack :: (URLish a) => a -> State -> State
pushStack l (State v xs) = (State v ((toLink l) : xs))
getStack :: State -> [Link]
getStack (State _ xs) = xs
stackLength :: State -> Int
stackLength (State _ stack) = length stack
stackAsURLs :: State -> [URL]
stackAsURLs = map toURL . getStack
stackAsStrings :: State -> [String]
stackAsStrings = map exportURL . stackAsURLs
mergeWithStack :: Link -> [URL] -> State -> State
mergeWithStack pnt new (State visited stack) =
(State visited) $
(stack ++) $
(map (Link (theURL pnt) [] NoCode)) $
distinct visited stack new
data Response = Error
| Response { rStatus :: Int
, rHeaders :: [(String, String)]
, rBody :: String}
deriving (Show, Eq)
-- | Lookup a header and remove any leading spaces.
header :: Response -> String -> Maybe String
header r k = case r of
Error -> Nothing
(Response _ hs _) -> do
h <- lookup k hs
return (dropWhile (== ' ') h)
|
brentonashworth/sitecheck
|
src/Network/SiteCheck/Data.hs
|
bsd-3-clause
| 10,231 | 6 | 13 | 2,795 | 2,829 | 1,560 | 1,269 | 206 | 2 |
module PostgREST.RangeQuery (
rangeParse
, rangeRequested
, rangeLimit
, rangeOffset
, NonnegRange
) where
import Control.Applicative
import Network.HTTP.Types.Header
import PostgREST.Types ()
import qualified Data.ByteString.Char8 as BS
import Data.Ranged.Boundaries
import Data.Ranged.Ranges
import Data.String.Conversions (cs)
import Text.Read (readMaybe)
import Text.Regex.TDFA ((=~))
import Data.Maybe (fromMaybe, listToMaybe)
import Prelude
type NonnegRange = Range Int
rangeParse :: BS.ByteString -> Maybe NonnegRange
rangeParse range = do
let rangeRegex = "^([0-9]+)-([0-9]*)$" :: BS.ByteString
parsedRange <- listToMaybe (range =~ rangeRegex :: [[BS.ByteString]])
let [_, from, to] = readMaybe . cs <$> parsedRange
let lower = fromMaybe emptyRange (rangeGeq <$> from)
let upper = fromMaybe (rangeGeq 0) (rangeLeq <$> to)
return $ rangeIntersection lower upper
rangeRequested :: RequestHeaders -> Maybe NonnegRange
rangeRequested = (rangeParse =<<) . lookup hRange
rangeLimit :: NonnegRange -> Maybe Int
rangeLimit range =
case [rangeLower range, rangeUpper range] of
[BoundaryBelow from, BoundaryAbove to] -> Just (1 + to - from)
_ -> Nothing
rangeOffset :: NonnegRange -> Int
rangeOffset range =
case rangeLower range of
BoundaryBelow from -> from
_ -> error "range without lower bound" -- should never happen
rangeGeq :: Int -> NonnegRange
rangeGeq n =
Range (BoundaryBelow n) BoundaryAboveAll
rangeLeq :: Int -> NonnegRange
rangeLeq n =
Range BoundaryBelowAll (BoundaryAbove n)
|
pap/postgrest
|
src/PostgREST/RangeQuery.hs
|
mit
| 1,703 | 0 | 12 | 412 | 475 | 256 | 219 | 44 | 2 |
-- Copyright (c) 2014 Contributors as noted in the AUTHORS file
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Arduino.Uno
main = compileProgram $ do
let doubleOutput = output2 (digitalOutput pin12) (digitalOutput pin13)
doubleOutput =: every 5000 ~> flip2TupleStream
flip2TupleStream :: Stream a -> Stream (Bit, Bit)
flip2TupleStream = foldpS (\_ -> flip2Tuple) (pack2 (bitLow, bitHigh))
where
flip2Tuple :: Expression (a, b) -> Expression (b, a)
flip2Tuple tuple = let (aValue, bValue) = unpack2 tuple
in pack2 (bValue, aValue)
|
frp-arduino/frp-arduino
|
examples/DoubleBlink.hs
|
gpl-3.0
| 1,199 | 3 | 13 | 246 | 189 | 103 | 86 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CognitoIdentity.LookupDeveloperIdentity
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves the 'IdentityID' associated with a 'DeveloperUserIdentifier' or the
-- list of 'DeveloperUserIdentifier's associated with an 'IdentityId' for an
-- existing identity. Either 'IdentityID' or 'DeveloperUserIdentifier' must not be
-- null. If you supply only one of these values, the other value will be
-- searched in the database and returned as a part of the response. If you
-- supply both, 'DeveloperUserIdentifier' will be matched against 'IdentityID'. If
-- the values are verified against the database, the response returns both
-- values and is the same as the request. Otherwise a 'ResourceConflictException'
-- is thrown.
--
-- <http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_LookupDeveloperIdentity.html>
module Network.AWS.CognitoIdentity.LookupDeveloperIdentity
(
-- * Request
LookupDeveloperIdentity
-- ** Request constructor
, lookupDeveloperIdentity
-- ** Request lenses
, ldiDeveloperUserIdentifier
, ldiIdentityId
, ldiIdentityPoolId
, ldiMaxResults
, ldiNextToken
-- * Response
, LookupDeveloperIdentityResponse
-- ** Response constructor
, lookupDeveloperIdentityResponse
-- ** Response lenses
, ldirDeveloperUserIdentifierList
, ldirIdentityId
, ldirNextToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CognitoIdentity.Types
import qualified GHC.Exts
data LookupDeveloperIdentity = LookupDeveloperIdentity
{ _ldiDeveloperUserIdentifier :: Maybe Text
, _ldiIdentityId :: Maybe Text
, _ldiIdentityPoolId :: Text
, _ldiMaxResults :: Maybe Nat
, _ldiNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'LookupDeveloperIdentity' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ldiDeveloperUserIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'ldiIdentityId' @::@ 'Maybe' 'Text'
--
-- * 'ldiIdentityPoolId' @::@ 'Text'
--
-- * 'ldiMaxResults' @::@ 'Maybe' 'Natural'
--
-- * 'ldiNextToken' @::@ 'Maybe' 'Text'
--
lookupDeveloperIdentity :: Text -- ^ 'ldiIdentityPoolId'
-> LookupDeveloperIdentity
lookupDeveloperIdentity p1 = LookupDeveloperIdentity
{ _ldiIdentityPoolId = p1
, _ldiIdentityId = Nothing
, _ldiDeveloperUserIdentifier = Nothing
, _ldiMaxResults = Nothing
, _ldiNextToken = Nothing
}
-- | A unique ID used by your backend authentication process to identify a user.
-- Typically, a developer identity provider would issue many developer user
-- identifiers, in keeping with the number of users.
ldiDeveloperUserIdentifier :: Lens' LookupDeveloperIdentity (Maybe Text)
ldiDeveloperUserIdentifier =
lens _ldiDeveloperUserIdentifier
(\s a -> s { _ldiDeveloperUserIdentifier = a })
-- | A unique identifier in the format REGION:GUID.
ldiIdentityId :: Lens' LookupDeveloperIdentity (Maybe Text)
ldiIdentityId = lens _ldiIdentityId (\s a -> s { _ldiIdentityId = a })
-- | An identity pool ID in the format REGION:GUID.
ldiIdentityPoolId :: Lens' LookupDeveloperIdentity Text
ldiIdentityPoolId =
lens _ldiIdentityPoolId (\s a -> s { _ldiIdentityPoolId = a })
-- | The maximum number of identities to return.
ldiMaxResults :: Lens' LookupDeveloperIdentity (Maybe Natural)
ldiMaxResults = lens _ldiMaxResults (\s a -> s { _ldiMaxResults = a }) . mapping _Nat
-- | A pagination token. The first call you make will have 'NextToken' set to null.
-- After that the service will return 'NextToken' values as needed. For example,
-- let's say you make a request with 'MaxResults' set to 10, and there are 20
-- matches in the database. The service will return a pagination token as a part
-- of the response. This token can be used to call the API again and get results
-- starting from the 11th match.
ldiNextToken :: Lens' LookupDeveloperIdentity (Maybe Text)
ldiNextToken = lens _ldiNextToken (\s a -> s { _ldiNextToken = a })
data LookupDeveloperIdentityResponse = LookupDeveloperIdentityResponse
{ _ldirDeveloperUserIdentifierList :: List "DeveloperUserIdentifierList" Text
, _ldirIdentityId :: Maybe Text
, _ldirNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'LookupDeveloperIdentityResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ldirDeveloperUserIdentifierList' @::@ ['Text']
--
-- * 'ldirIdentityId' @::@ 'Maybe' 'Text'
--
-- * 'ldirNextToken' @::@ 'Maybe' 'Text'
--
lookupDeveloperIdentityResponse :: LookupDeveloperIdentityResponse
lookupDeveloperIdentityResponse = LookupDeveloperIdentityResponse
{ _ldirIdentityId = Nothing
, _ldirDeveloperUserIdentifierList = mempty
, _ldirNextToken = Nothing
}
-- | This is the list of developer user identifiers associated with an identity
-- ID. Cognito supports the association of multiple developer user identifiers
-- with an identity ID.
ldirDeveloperUserIdentifierList :: Lens' LookupDeveloperIdentityResponse [Text]
ldirDeveloperUserIdentifierList =
lens _ldirDeveloperUserIdentifierList
(\s a -> s { _ldirDeveloperUserIdentifierList = a })
. _List
-- | A unique identifier in the format REGION:GUID.
ldirIdentityId :: Lens' LookupDeveloperIdentityResponse (Maybe Text)
ldirIdentityId = lens _ldirIdentityId (\s a -> s { _ldirIdentityId = a })
-- | A pagination token. The first call you make will have 'NextToken' set to null.
-- After that the service will return 'NextToken' values as needed. For example,
-- let's say you make a request with 'MaxResults' set to 10, and there are 20
-- matches in the database. The service will return a pagination token as a part
-- of the response. This token can be used to call the API again and get results
-- starting from the 11th match.
ldirNextToken :: Lens' LookupDeveloperIdentityResponse (Maybe Text)
ldirNextToken = lens _ldirNextToken (\s a -> s { _ldirNextToken = a })
instance ToPath LookupDeveloperIdentity where
toPath = const "/"
instance ToQuery LookupDeveloperIdentity where
toQuery = const mempty
instance ToHeaders LookupDeveloperIdentity
instance ToJSON LookupDeveloperIdentity where
toJSON LookupDeveloperIdentity{..} = object
[ "IdentityPoolId" .= _ldiIdentityPoolId
, "IdentityId" .= _ldiIdentityId
, "DeveloperUserIdentifier" .= _ldiDeveloperUserIdentifier
, "MaxResults" .= _ldiMaxResults
, "NextToken" .= _ldiNextToken
]
instance AWSRequest LookupDeveloperIdentity where
type Sv LookupDeveloperIdentity = CognitoIdentity
type Rs LookupDeveloperIdentity = LookupDeveloperIdentityResponse
request = post "LookupDeveloperIdentity"
response = jsonResponse
instance FromJSON LookupDeveloperIdentityResponse where
parseJSON = withObject "LookupDeveloperIdentityResponse" $ \o -> LookupDeveloperIdentityResponse
<$> o .:? "DeveloperUserIdentifierList" .!= mempty
<*> o .:? "IdentityId"
<*> o .:? "NextToken"
|
romanb/amazonka
|
amazonka-cognito-identity/gen/Network/AWS/CognitoIdentity/LookupDeveloperIdentity.hs
|
mpl-2.0
| 8,265 | 0 | 14 | 1,730 | 919 | 552 | 367 | 98 | 1 |
{-# LANGUAGE FlexibleContexts #-}
import System.Environment(getArgs)
import Control.Monad.IO.Class(liftIO)
import Text.Printf(printf)
import qualified Data.Vector.Storable as DVS(mapM_)
import Clang.String(unpack)
import Clang.TranslationUnit(getCursor)
import Clang(parseSourceFile, getChildren)
import Clang.Cursor(getKind, getDisplayName, getCursorKindSpelling)
test tu = getCursor tu >>= getChildren >>= DVS.mapM_ printInfo
where
printInfo c = do
name <- getDisplayName c >>= unpack
tstr <- getCursorKindSpelling (getKind c) >>= unpack
liftIO $ printf "Name:%s, Kind:%s\n" name tstr
main = do
(arg:args) <- getArgs
parseSourceFile arg args test
|
chetant/LibClang
|
test/Test_ChildVisitor.hs
|
bsd-3-clause
| 680 | 1 | 11 | 102 | 210 | 111 | 99 | 17 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Futhark.Representation.AST.Attributes.Context
( expExtContext
)
where
import qualified Data.HashMap.Lazy as HM
import Futhark.Representation.AST.Attributes.Types
import Futhark.Representation.AST.Attributes.Patterns
import Futhark.Representation.AST.Attributes.TypeOf
import Futhark.Representation.AST.Syntax
expExtContext :: (Annotations lore, HasScope lore m, Monad m) =>
Pattern lore -> Exp lore -> m [Maybe SubExp]
expExtContext pat (If _ tbranch fbranch _) = do
ttype <- bodyExtType tbranch
ftype <- bodyExtType fbranch
let combtype = generaliseExtTypes ttype ftype
extdims = map (extShapeDims . arrayShape) combtype
ext_mapping :: HM.HashMap VName ExtDimSize
ext_mapping = shapeMapping' (patternValueTypes pat) extdims
hasFreeDim name = case HM.lookup name ext_mapping of
Just (Free se) -> Just se
_ -> Nothing
return $ map (hasFreeDim . identName) $ patternContextIdents pat
expExtContext pat _ =
return $ replicate (length $ patternContextElements pat) Nothing
|
CulpaBS/wbBach
|
src/Futhark/Representation/AST/Attributes/Context.hs
|
bsd-3-clause
| 1,099 | 0 | 15 | 210 | 303 | 159 | 144 | 23 | 2 |
{-# LANGUAGE LambdaCase #-}
module OpenCog.Lojban
( WordList
, initParserPrinter
, lojbanToAtomese
, lojbanToAtomeseRaw
, atomeseToLojban
, loadWordLists
) where
import OpenCog.Lojban.Syntax
import OpenCog.Lojban.Util
import OpenCog.Lojban.WordList
import OpenCog.Lojban.Syntax.Types (WordList)
import OpenCog.AtomSpace
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Control.Exception
import System.Random
import Data.Char (chr)
import Data.Maybe
import qualified Data.Map as M
import Text.Syntax.Parser.Naive
import qualified Text.Syntax.Printer.Naive as P
initParserPrinter :: String -> IO (String -> Maybe Atom, Atom -> Maybe String)
initParserPrinter path = do
wordlist <- loadWordLists path
return (lojbanToAtomese wordlist,atomeseToLojban wordlist)
lojbanToAtomese :: WordList -> String -> Maybe Atom
lojbanToAtomese state text =
wrapAtom <$> listToMaybe (parse (runReaderT lojban state) (text++" "))
lojbanToAtomeseRaw :: WordList -> String -> Maybe (Atom,String)
lojbanToAtomeseRaw state text =
listToMaybe (rawparse (runReaderT lojban state) (text++" "))
wrapAtom :: Atom -> Atom
wrapAtom atom@(Link "SatisfactionLink" _ _) = cLL [cAN "QuestionAnchor" , atom]
wrapAtom atom@(Link "PutLink" _ _) = cLL [cAN "QuestionAnchor" , atom]
wrapAtom atom = cLL [cAN "StatementAnchor", atom]
atomeseToLojban :: WordList -> Atom -> Maybe String
atomeseToLojban state a@(LL [_an,s]) = P.print (runReaderT preti state) s
{-tvToLojban :: TruthVal -> String
tvToLojban tv
| tvMean tv > 0.5 = "go'i"
| tvMean tv <= 0.5 = "nago'i"-}
{-EquivalenceLink
EvaluationLink
VariableNode "var1!!!"
ListLink
VariableNode "var2"
ConceptNode "vo'a"
EvaluationLink
PredicateNode "sumti1"
ListLink "var2"
VariableNode "var2"
ConceptNode "something"
-}
|
ruiting/opencog
|
opencog/nlp/lojban/HaskellLib/src/OpenCog/Lojban.hs
|
agpl-3.0
| 1,932 | 0 | 10 | 392 | 461 | 255 | 206 | 38 | 1 |
module GoToSymbolFunction_RecordsType where
data Pool a = Pool (Maybe a)
getResource :: P<caret>ool a -> Maybe a
getResource (Pool maybeA) = maybeA
|
charleso/intellij-haskforce
|
tests/gold/codeInsight/GoToSymbolFunction_RecordsType.hs
|
apache-2.0
| 149 | 0 | 8 | 23 | 57 | 30 | 27 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Местоположение изображения и сканер конфиденциальности | ZAP-расширение </title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/imagelocationscanner/src/main/javahelp/org/zaproxy/zap/extension/imagelocationscanner/resources/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 1,109 | 78 | 67 | 162 | 586 | 293 | 293 | -1 | -1 |
-- !!! A rules test
-- At one time the rule got too specialised a type:
--
-- _R "ffoo" forall {@ a1 v :: (a1, ((), ()))}
-- fst @ a1 @ () (sndSnd @ a1 @ () @ () v) = fst @ a1 @ ((), ()) v
module Main where
import System.IO
import System.IO.Unsafe ( unsafePerformIO )
{-# NOINLINE [0] sndSnd #-}
-- Don't inline till last, to give the rule a chance
sndSnd (a,(b,c)) = (a,c)
{-# NOINLINE [2] myFst #-}
-- Don't inline till last, to give the rule a chance
myFst (a,b) = a
trace x y = unsafePerformIO (hPutStr stderr x >> hPutStr stderr "\n" >> return y)
{-# RULES "foo" forall v . myFst (sndSnd v) = trace "Yes" (fst v) #-}
main :: IO ()
main = print (myFst (sndSnd (True, (False,True))))
|
ezyang/ghc
|
testsuite/tests/simplCore/should_run/simplrun002.hs
|
bsd-3-clause
| 709 | 0 | 11 | 162 | 149 | 87 | 62 | 11 | 1 |
module Bugs where
data A a = A a (a -> Int)
|
Acidburn0zzz/haddock
|
html-test/src/Bugs.hs
|
bsd-2-clause
| 45 | 0 | 8 | 13 | 23 | 14 | 9 | 2 | 0 |
{-# LANGUAGE RankNTypes, PolyKinds, TypeInType, GADTs, UndecidableSuperClasses #-}
module T11520 where
import GHC.Types hiding (TyCon)
data TypeRep (a :: k)
class Typeable k => Typeable (a :: k) where
typeRep :: TypeRep a
data Compose (f :: k1 -> *) (g :: k2 -> k1) (a :: k2) = Compose (f (g a))
-- Note how the kind signature on g is incorrect
instance (Typeable f, Typeable (g :: k), Typeable k) => Typeable (Compose f g) where
typeRep = undefined
|
ezyang/ghc
|
testsuite/tests/polykinds/T11520.hs
|
bsd-3-clause
| 464 | 3 | 9 | 95 | 159 | 90 | 69 | -1 | -1 |
module T8469 where
import T8469a
|
sdiehl/ghc
|
testsuite/tests/ghci/scripts/T8469.hs
|
bsd-3-clause
| 34 | 0 | 3 | 6 | 7 | 5 | 2 | 2 | 0 |
module A (a) where
a = "2.0"
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/driver/recomp007/a2/A.hs
|
bsd-3-clause
| 30 | 0 | 4 | 8 | 14 | 9 | 5 | 2 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Sproxy.Application
( sproxy
, redirect
) where
import Blaze.ByteString.Builder (toByteString)
import Blaze.ByteString.Builder.ByteString (fromByteString)
import Control.Exception
(Exception, Handler(..), SomeException, catches, displayException)
import qualified Data.Aeson as JSON
import Data.ByteString (ByteString)
import Data.ByteString as BS (break, intercalate)
import Data.ByteString.Char8 (pack, unpack)
import Data.ByteString.Lazy (fromStrict)
import Data.Conduit (Flush(Chunk), mapOutput)
import Data.HashMap.Strict as HM (HashMap, foldrWithKey, lookup)
import Data.List (find, partition)
import Data.Map as Map
(delete, fromListWith, insert, insertWith, toList)
import Data.Maybe (fromJust, fromMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Word (Word16)
import Data.Word8 (_colon)
import Foreign.C.Types (CTime(..))
import qualified Network.HTTP.Client as BE
import Network.HTTP.Client.Conduit (bodyReaderSource)
import Network.HTTP.Conduit
(requestBodySourceChunkedIO, requestBodySourceIO)
import Network.HTTP.Types
(RequestHeaders, ResponseHeaders, methodGet, methodPost)
import Network.HTTP.Types.Header
(hConnection, hContentLength, hContentType, hCookie, hLocation,
hTransferEncoding)
import Network.HTTP.Types.Status
(Status(..), badGateway502, badRequest400, forbidden403, found302,
internalServerError500, methodNotAllowed405, movedPermanently301,
networkAuthenticationRequired511, notFound404, ok200, seeOther303,
temporaryRedirect307)
import Network.Socket (NameInfoFlag(NI_NUMERICHOST), getNameInfo)
import qualified Network.Wai as W
import Network.Wai.Conduit (responseSource, sourceRequestBody)
import System.FilePath.Glob (Pattern, match)
import System.Posix.Time (epochTime)
import Text.InterpolatedString.Perl6 (qc)
import Web.Cookie (Cookies, parseCookies, renderCookies)
import qualified Web.Cookie as WC
import Sproxy.Application.Cookie
(AuthCookie(..), AuthUser, cookieDecode, cookieEncode, getEmail,
getEmailUtf8, getFamilyNameUtf8, getGivenNameUtf8)
import Sproxy.Application.OAuth2.Common (OAuth2Client(..))
import qualified Sproxy.Application.State as State
import Sproxy.Config (BackendConf(..))
import qualified Sproxy.Logging as Log
import Sproxy.Server.DB
(Database, userAccess, userExists, userGroups)
redirect :: Word16 -> W.Application
redirect p req resp =
case requestDomain req of
Nothing -> badRequest "missing host" req resp
Just domain -> do
Log.info $ "redirecting to " ++ show location ++ ": " ++ showReq req
resp $ W.responseBuilder status [(hLocation, location)] mempty
where status =
if W.requestMethod req == methodGet
then movedPermanently301
else temporaryRedirect307
newhost =
if p == 443
then domain
else domain <> ":" <> pack (show p)
location =
"https://" <> newhost <> W.rawPathInfo req <> W.rawQueryString req
sproxy ::
ByteString
-> Database
-> HashMap Text OAuth2Client
-> [(Pattern, BackendConf, BE.Manager)]
-> W.Application
sproxy key db oa2 backends =
logException $ \req resp -> do
Log.debug $ "sproxy <<< " ++ showReq req
case requestDomain req of
Nothing -> badRequest "missing host" req resp
Just domain ->
case find (\(p, _, _) -> match p (unpack domain)) backends of
Nothing -> notFound "backend" req resp
Just (_, be, mgr) -> do
let cookieName = pack $ beCookieName be
cookieDomain = pack <$> beCookieDomain be
case W.pathInfo req of
["robots.txt"] -> get robots req resp
(".sproxy":proxy) ->
case proxy of
["logout"] ->
get (logout key cookieName cookieDomain) req resp
["oauth2", provider] ->
case HM.lookup provider oa2 of
Nothing -> notFound "OAuth2 provider" req resp
Just oa2c ->
get (oauth2callback key db (provider, oa2c) be) req resp
["access"] -> do
now <- Just <$> epochTime
case extractCookie key now cookieName req of
Nothing -> authenticationRequired key oa2 req resp
Just (authCookie, _) ->
post (checkAccess db authCookie) req resp
_ -> notFound "proxy" req resp
_ -> do
now <- Just <$> epochTime
case extractCookie key now cookieName req of
Nothing -> authenticationRequired key oa2 req resp
Just cs@(authCookie, _) ->
authorize db cs req >>= \case
Nothing -> forbidden authCookie req resp
Just req' -> forward mgr req' resp
robots :: W.Application
robots _ resp =
resp $
W.responseLBS
ok200
[(hContentType, "text/plain; charset=utf-8")]
"User-agent: *\nDisallow: /"
oauth2callback ::
ByteString
-> Database
-> (Text, OAuth2Client)
-> BackendConf
-> W.Application
oauth2callback key db (provider, oa2c) be req resp =
case param "code" of
Nothing -> badRequest "missing auth code" req resp
Just code ->
case param "state" of
Nothing -> badRequest "missing auth state" req resp
Just state ->
case State.decode key state of
Left msg -> badRequest ("invalid state: " ++ msg) req resp
Right url -> do
au <- oauth2Authenticate oa2c code (redirectURL req provider)
let email = getEmail au
Log.info $ "login " ++ show email ++ " by " ++ show provider
exists <- userExists db email
if exists
then authenticate key be au url req resp
else userNotFound au req resp
where
param p = do
(_, v) <- find ((==) p . fst) $ W.queryString req
v
-- XXX: RFC6265: the user agent MUST NOT attach more than one Cookie header field
extractCookie ::
ByteString
-> Maybe CTime
-> ByteString
-> W.Request
-> Maybe (AuthCookie, Cookies)
extractCookie key now name req = do
(_, cookies) <- find ((==) hCookie . fst) $ W.requestHeaders req
(auth, others) <- discriminate cookies
case cookieDecode key auth of
Left _ -> Nothing
Right cookie ->
if maybe True (acExpiry cookie >) now
then Just (cookie, others)
else Nothing
where
discriminate cs =
case partition ((==) name . fst) $ parseCookies cs of
((_, x):_, xs) -> Just (x, xs)
_ -> Nothing
authenticate ::
ByteString -> BackendConf -> AuthUser -> ByteString -> W.Application
authenticate key be user url _req resp = do
now <- epochTime
let domain = pack <$> beCookieDomain be
expiry = now + CTime (beCookieMaxAge be)
authCookie = AuthCookie {acUser = user, acExpiry = expiry}
cookie =
WC.def
{ WC.setCookieName = pack $ beCookieName be
, WC.setCookieHttpOnly = True
, WC.setCookiePath = Just "/"
, WC.setCookieSameSite = Nothing
, WC.setCookieSecure = True
, WC.setCookieValue = cookieEncode key authCookie
, WC.setCookieDomain = domain
, WC.setCookieExpires =
Just . posixSecondsToUTCTime . realToFrac $ expiry
}
resp $
W.responseLBS
seeOther303
[ (hLocation, url)
, ("Set-Cookie", toByteString $ WC.renderSetCookie cookie)
]
""
authorize ::
Database -> (AuthCookie, Cookies) -> W.Request -> IO (Maybe W.Request)
authorize db (authCookie, otherCookies) req = do
let user = acUser authCookie
domain = decodeUtf8 . fromJust $ requestDomain req
email = getEmail user
emailUtf8 = getEmailUtf8 user
familyUtf8 = getFamilyNameUtf8 user
givenUtf8 = getGivenNameUtf8 user
method = decodeUtf8 $ W.requestMethod req
path = decodeUtf8 $ W.rawPathInfo req
grps <- userGroups db email domain path method
if null grps
then return Nothing
else do
ip <-
pack . fromJust . fst <$>
getNameInfo [NI_NUMERICHOST] True False (W.remoteHost req)
return . Just $
req
{ W.requestHeaders =
toList $
insert "From" emailUtf8 $
insert "X-Groups" (BS.intercalate "," $ encodeUtf8 <$> grps) $
insert "X-Given-Name" givenUtf8 $
insert "X-Family-Name" familyUtf8 $
insert "X-Forwarded-Proto" "https" $
insertWith (flip combine) "X-Forwarded-For" ip $
setCookies otherCookies $
fromListWith combine $ W.requestHeaders req
}
where
combine a b = a <> "," <> b
setCookies [] = delete hCookie
setCookies cs = insert hCookie (toByteString . renderCookies $ cs)
checkAccess :: Database -> AuthCookie -> W.Application
checkAccess db authCookie req resp = do
let email = getEmail . acUser $ authCookie
domain = decodeUtf8 . fromJust $ requestDomain req
body <- W.strictRequestBody req
case JSON.eitherDecode' body of
Left err -> badRequest err req resp
Right inq -> do
Log.debug $ "access <<< " ++ show inq
tags <- userAccess db email domain inq
Log.debug $ "access >>> " ++ show tags
resp $
W.responseLBS
ok200
[(hContentType, "application/json")]
(JSON.encode tags)
-- XXX If something seems strange, think about HTTP/1.1 <-> HTTP/1.0.
-- FIXME For HTTP/1.0 backends we might need an option
-- FIXME in config file. HTTP Client does HTTP/1.1 by default.
forward :: BE.Manager -> W.Application
forward mgr req resp = do
let beReq =
BE.defaultRequest
{ BE.method = W.requestMethod req
, BE.path = W.rawPathInfo req
, BE.queryString = W.rawQueryString req
, BE.requestHeaders = modifyRequestHeaders $ W.requestHeaders req
, BE.redirectCount = 0
, BE.decompress = const False
, BE.requestBody =
case W.requestBodyLength req of
W.ChunkedBody ->
requestBodySourceChunkedIO (sourceRequestBody req)
W.KnownLength l ->
requestBodySourceIO (fromIntegral l) (sourceRequestBody req)
}
msg =
unpack (BE.method beReq <> " " <> BE.path beReq <> BE.queryString beReq)
Log.debug $ "BACKEND <<< " ++ msg ++ " " ++ show (BE.requestHeaders beReq)
BE.withResponse beReq mgr $ \res -> do
let status = BE.responseStatus res
headers = BE.responseHeaders res
body =
mapOutput (Chunk . fromByteString) . bodyReaderSource $
BE.responseBody res
logging =
if statusCode status `elem` [400, 500]
then Log.warn
else Log.debug
logging $
"BACKEND >>> " ++
show (statusCode status) ++ " on " ++ msg ++ " " ++ show headers ++ "\n"
resp $ responseSource status (modifyResponseHeaders headers) body
modifyRequestHeaders :: RequestHeaders -> RequestHeaders
modifyRequestHeaders = filter (\(n, _) -> n `notElem` ban)
where
ban =
[ hConnection
, hContentLength -- XXX This is set automtically before sending request to backend
, hTransferEncoding -- XXX Likewise
]
modifyResponseHeaders :: ResponseHeaders -> ResponseHeaders
modifyResponseHeaders = filter (\(n, _) -> n `notElem` ban)
where
ban =
[ hConnection
-- XXX WAI docs say we MUST NOT add (keep) Content-Length, Content-Range, and Transfer-Encoding,
-- XXX but we use streaming body, which may add Transfer-Encoding only.
-- XXX Thus we keep Content-* headers.
, hTransferEncoding
]
authenticationRequired ::
ByteString -> HashMap Text OAuth2Client -> W.Application
authenticationRequired key oa2 req resp = do
Log.info $ "511 Unauthenticated: " ++ showReq req
resp $
W.responseLBS
networkAuthenticationRequired511
[(hContentType, "text/html; charset=utf-8")]
page
where
path =
if W.requestMethod req == methodGet
then W.rawPathInfo req <> W.rawQueryString req
else "/"
state =
State.encode key $
"https://" <> fromJust (W.requestHeaderHost req) <> path
authLink :: Text -> OAuth2Client -> ByteString -> ByteString
authLink provider oa2c html =
let u = oauth2AuthorizeURL oa2c state (redirectURL req provider)
d = pack $ oauth2Description oa2c
in [qc|{html}<p><a href="{u}">Authenticate with {d}</a></p>|]
authHtml = foldrWithKey authLink "" oa2
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Authentication required</title>
</head>
<body style="text-align:center;">
<h1>Authentication required</h1>
{authHtml}
</body>
</html>
|]
forbidden :: AuthCookie -> W.Application
forbidden ac req resp = do
Log.info $ "403 Forbidden: " ++ show email ++ ": " ++ showReq req
resp $
W.responseLBS forbidden403 [(hContentType, "text/html; charset=utf-8")] page
where
email = getEmailUtf8 . acUser $ ac
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Access Denied</title>
</head>
<body>
<h1>Access Denied</h1>
<p>You are currently logged in as <strong>{email}</strong></p>
<p><a href="/.sproxy/logout">Logout</a></p>
</body>
</html>
|]
userNotFound :: AuthUser -> W.Application
userNotFound au _ resp = do
Log.info $ "404 User not found: " ++ show email
resp $
W.responseLBS notFound404 [(hContentType, "text/html; charset=utf-8")] page
where
email = getEmailUtf8 au
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Access Denied</title>
</head>
<body>
<h1>Access Denied</h1>
<p>You are not allowed to login as <strong>{email}</strong></p>
<p><a href="/">Main page</a></p>
</body>
</html>
|]
logout :: ByteString -> ByteString -> Maybe ByteString -> W.Application
logout key cookieName cookieDomain req resp = do
let host = fromJust $ W.requestHeaderHost req
case extractCookie key Nothing cookieName req of
Nothing ->
resp $ W.responseLBS found302 [(hLocation, "https://" <> host)] ""
Just _ -> do
let cookie =
WC.def
{ WC.setCookieName = cookieName
, WC.setCookieHttpOnly = True
, WC.setCookiePath = Just "/"
, WC.setCookieSameSite = Just WC.sameSiteStrict
, WC.setCookieSecure = True
, WC.setCookieValue = "goodbye"
, WC.setCookieDomain = cookieDomain
, WC.setCookieExpires =
Just . posixSecondsToUTCTime . realToFrac $ CTime 0
}
resp $
W.responseLBS
found302
[ (hLocation, "https://" <> host)
, ("Set-Cookie", toByteString $ WC.renderSetCookie cookie)
]
""
badRequest :: String -> W.Application
badRequest msg req resp = do
Log.warn $ "400 Bad Request (" ++ msg ++ "): " ++ showReq req
resp $ W.responseLBS badRequest400 [] "Bad Request"
notFound :: String -> W.Application
notFound msg req resp = do
Log.warn $ "404 Not Found (" ++ msg ++ "): " ++ showReq req
resp $ W.responseLBS notFound404 [] "Not Found"
logException :: W.Middleware
logException app req resp =
catches (app req resp) [Handler badGateway, Handler internalError]
where
internalError :: SomeException -> IO W.ResponseReceived
internalError = response internalServerError500
badGateway :: BE.HttpException -> IO W.ResponseReceived
badGateway = response badGateway502
response :: Exception e => Status -> e -> IO W.ResponseReceived
response st e = do
Log.error $
show (statusCode st) ++
" " ++
unpack (statusMessage st) ++
": " ++ displayException e ++ " on " ++ showReq req
resp $
W.responseLBS
st
[(hContentType, "text/plain")]
(fromStrict $ statusMessage st)
get :: W.Middleware
get app req resp
| W.requestMethod req == methodGet = app req resp
| otherwise = do
Log.warn $ "405 Method Not Allowed: " ++ showReq req
resp $
W.responseLBS methodNotAllowed405 [("Allow", "GET")] "Method Not Allowed"
post :: W.Middleware
post app req resp
| W.requestMethod req == methodPost = app req resp
| otherwise = do
Log.warn $ "405 Method Not Allowed: " ++ showReq req
resp $
W.responseLBS methodNotAllowed405 [("Allow", "POST")] "Method Not Allowed"
redirectURL :: W.Request -> Text -> ByteString
redirectURL req provider =
"https://" <> fromJust (W.requestHeaderHost req) <> "/.sproxy/oauth2/" <>
encodeUtf8 provider
requestDomain :: W.Request -> Maybe ByteString
requestDomain req = do
h <- W.requestHeaderHost req
return . fst . BS.break (== _colon) $ h
-- XXX: make sure not to reveal the cookie, which can be valid (!)
showReq :: W.Request -> String
showReq req =
unpack
(W.requestMethod req <> " " <>
fromMaybe "<no host>" (W.requestHeaderHost req) <>
W.rawPathInfo req <>
W.rawQueryString req <>
" ") ++
show (W.httpVersion req) ++
" " ++
show (fromMaybe "-" $ W.requestHeaderReferer req) ++
" " ++
show (fromMaybe "-" $ W.requestHeaderUserAgent req) ++
" from " ++ show (W.remoteHost req)
|
zalora/sproxy
|
src/Sproxy/Application.hs
|
mit
| 17,758 | 0 | 30 | 4,783 | 4,794 | 2,500 | 2,294 | 418 | 12 |
------------------------------------------------------------------------------
-- |
-- Module : Mahjong.Yaku
-- Copyright : (C) 2014 Samuli Thomasson
-- License : MIT (see the file LICENSE)
-- Maintainer : Samuli Thomasson <[email protected]>
-- Stability : experimental
-- Portability : non-portable
------------------------------------------------------------------------------
module Mahjong.Hand.Yaku
( standardCheck, checkYaku, runYakuCheck) where
------------------------------------------------------------------------------
import Import
import Control.Monad (msum)
------------------------------------------------------------------------------
import Mahjong.Hand.Algo (Grouping)
import Mahjong.Hand.Yaku.Builder
import Mahjong.Hand.Yaku.Standard
------------------------------------------------------------------------------
import Mahjong.Kyoku.Internal
------------------------------------------------------------------------------
-- | All standard Yaku. Internal list consists of mutually exclusive yaku.
--
-- We try to specify as much as possible in the YakuCheck code so that
-- mutual exclusivity is necessary at this level only for some exceptions,
-- i.e. yaku that wholly include some other yaku (iipeikou and ryanpeikou).
standardYaku :: [[YakuCheck Yaku]]
standardYaku =
[ [ chankan ]
, [ menzenTsumo ]
, [ haiteiRaoyui ]
, [ houteiRaoyui ]
, [ ippatsu ]
, [ riichi ]
, [ rinshanKaihou ]
, [ nagashiMangan ]
, [ yakuhaiRoundWind ]
, [ yakuhaiSeatWind ]
, [ yakuhaiRed ]
, [ yakuhaiGreen ]
, [ yakuhaiWhite ]
, [ shouSangen ]
, [ chanta ]
, [ ryanpeikou, chiitoitsu, iipeikou ]
, [ ittsuu ]
, [ pinfu ]
, [ chinitsu ]
, [ honitsu ]
, [ honroutou ]
, [ junchan ]
, [ sanAnkou ]
, [ sanKantsu ]
, [ sanshokuDoujin ]
, [ sanshokuDoukou ]
, [ tanyao ]
, [ kuitan ]
, [ toitoi ]
, [ countingDora ]
, [ countingUraDora ]
, [ countingAkaDora ]
]
standardYakumans :: [YakuCheck Yaku]
standardYakumans = [ kokushiMusou, daisangen, suuankou, suushiihou, tsuuiisou, ryuuiisou, chinroutou, chuurenPoutou, suuKantsu, tenhouOrChiihou ]
-- | Check a list of yaku that are
checkYaku :: [[YakuCheck Yaku]] -> ValueInfo -> Grouping -> [Yaku]
checkYaku yakus vi grp = mapMaybe (msum . map (runYakuCheck vi grp)) yakus
-- | The "standard" checker. If there are yakuman, then yield it or them.
-- Otherwise resolve the normal yaku. If normal yaku add up to a Kazoe then
-- yield Kazoe Yakuman. Otherwise return all matched yaku.
standardCheck :: ValueInfo -> Grouping -> [Yaku]
standardCheck vi grp
| yakumans <- mapMaybe (runYakuCheck vi grp) standardYakumans, not (null yakumans) = yakumans
| yaku <- checkYaku standardYaku vi grp = if sumOf (each.yHan) yaku >= 13 then [Yaku 13 "Kazoe Yakuman"] else yaku
|
SimSaladin/hajong
|
hajong-server/src/Mahjong/Hand/Yaku.hs
|
mit
| 2,971 | 0 | 11 | 635 | 523 | 323 | 200 | 50 | 2 |
{-# LANGUAGE RecordWildCards #-}
module Learning.IOHMM.Internal
( IOHMM (..)
, LogLikelihood
, init
, withEmission
, euclideanDistance
, viterbi
, baumWelch
, baumWelch'
-- , baumWelch1
-- , forward
-- , backward
-- , posterior
) where
import Control.Applicative ( (<$>) )
import Control.DeepSeq ( NFData, force, rnf )
import Control.Monad ( forM_, replicateM )
import Control.Monad.ST ( runST )
import qualified Data.Map.Strict as M ( findWithDefault )
import Data.Random.Distribution.Simplex ( stdSimplex )
import Data.Random.RVar ( RVar )
import qualified Data.Vector as V ( Vector, filter, foldl', foldl1', generate, map, replicateM, unsafeFreeze, unsafeIndex , unsafeTail , zip, zipWith3 )
import qualified Data.Vector.Generic as G ( convert )
import qualified Data.Vector.Generic.Extra as G ( frequencies )
import qualified Data.Vector.Mutable as MV ( unsafeNew, unsafeRead, unsafeWrite )
import qualified Data.Vector.Unboxed as U ( Vector, fromList, length, map, sum, unsafeFreeze, unsafeIndex, unsafeTail, unzip, zip )
import qualified Data.Vector.Unboxed.Mutable as MU ( unsafeNew, unsafeRead, unsafeWrite )
import qualified Numeric.LinearAlgebra.Data as H ( (!), Matrix, Vector, diag, fromColumns, fromList, fromLists, fromRows, ident, konst, maxElement, maxIndex, toColumns, tr )
import qualified Numeric.LinearAlgebra.HMatrix as H ( (<>), (#>), sumElements )
import Prelude hiding ( init )
type LogLikelihood = Double
-- | More efficient data structure of the 'IOHMM' model. The 'inputs',
-- 'states', and 'outputs' in 'IOHMM' are represented by their indices.
-- The 'initialStateDist', 'transitionDist', and 'emissionDist' are
-- represented by matrices. The 'emissionDistT' is a transposed matrix
-- in order to simplify the calculation.
data IOHMM = IOHMM { nInputs :: Int -- ^ Number of inputs
, nStates :: Int -- ^ Number of states
, nOutputs :: Int -- ^ Number of outputs
, initialStateDist :: H.Vector Double
, transitionDist :: V.Vector (H.Matrix Double)
, emissionDistT :: H.Matrix Double
}
instance NFData IOHMM where
rnf IOHMM {..} = rnf nInputs `seq`
rnf nStates `seq`
rnf nOutputs `seq`
rnf initialStateDist `seq`
rnf transitionDist `seq`
rnf emissionDistT
init :: Int -> Int -> Int -> RVar IOHMM
init m k l = do
pi0 <- H.fromList <$> stdSimplex (k-1)
w <- V.replicateM m (H.fromLists <$> replicateM k (stdSimplex (k-1)))
phi <- H.fromLists <$> replicateM k (stdSimplex (l-1))
return IOHMM { nInputs = m
, nStates = k
, nOutputs = l
, initialStateDist = q_ H.#> pi0
, transitionDist = V.map (H.<> q_) w
, emissionDistT = q_ H.<> H.tr phi
}
where
q_ = q k -- Error matrix
withEmission :: IOHMM -> U.Vector (Int, Int) -> IOHMM
withEmission (model @ IOHMM {..}) xys = model'
where
n = U.length xys
ss = [0..(nStates - 1)]
os = [0..(nOutputs - 1)]
ys = U.map snd xys
step m = fst $ baumWelch1 (m { emissionDistT = H.tr phi }) n xys
where
phi :: H.Matrix Double
phi = let zs = fst $ viterbi m xys
fs = G.frequencies $ U.zip zs ys
hs = H.fromLists $ map (\s -> map (\o ->
M.findWithDefault 0 (s, o) fs) os) ss
-- hs' is needed to not yield NaN vectors
hs' = hs + H.konst 1e-9 (nStates, nOutputs)
ns = hs' H.#> H.konst 1 nStates
in hs' / H.fromColumns (replicate nOutputs ns)
ms = iterate step model
ms' = tail ms
ds = zipWith euclideanDistance ms ms'
model' = fst $ head $ dropWhile ((> 1e-9) . snd) $ zip ms' ds
euclideanDistance :: IOHMM -> IOHMM -> Double
euclideanDistance model model' =
sqrt $ sum $ H.sumElements ((phi - phi') ** 2) :
map (\i -> H.sumElements ((w i - w' i) ** 2)) is
where
is = [0..(nInputs model - 1)]
w = V.unsafeIndex (transitionDist model)
w' = V.unsafeIndex (transitionDist model')
phi = emissionDistT model
phi' = emissionDistT model'
viterbi :: IOHMM -> U.Vector (Int, Int) -> (U.Vector Int, LogLikelihood)
viterbi IOHMM {..} xys = (path, logL)
where
n = U.length xys
-- First, we calculate the value function and the state maximizing it
-- for each time.
deltas :: V.Vector (H.Vector Double)
psis :: V.Vector (U.Vector Int)
(deltas, psis) = runST $ do
ds <- MV.unsafeNew n
ps <- MV.unsafeNew n
let (_, y0) = U.unsafeIndex xys 0
MV.unsafeWrite ds 0 $ log (emissionDistT H.! y0) + log initialStateDist
forM_ [1..(n-1)] $ \t -> do
d <- MV.unsafeRead ds (t-1)
let (x, y) = U.unsafeIndex xys t
dws = map (\wj -> d + log wj) (w' x)
MV.unsafeWrite ds t $ log (emissionDistT H.! y) + H.fromList (map H.maxElement dws)
MV.unsafeWrite ps t $ U.fromList (map H.maxIndex dws)
ds' <- V.unsafeFreeze ds
ps' <- V.unsafeFreeze ps
return (ds', ps')
where
w' = H.toColumns . V.unsafeIndex transitionDist
deltaE = V.unsafeIndex deltas (n-1)
-- The most likely path and corresponding log likelihood are as follows.
path = runST $ do
ix <- MU.unsafeNew n
MU.unsafeWrite ix (n-1) $ H.maxIndex deltaE
forM_ [n-l | l <- [1..(n-1)]] $ \t -> do
i <- MU.unsafeRead ix t
let psi = V.unsafeIndex psis t
MU.unsafeWrite ix (t-1) $ U.unsafeIndex psi i
U.unsafeFreeze ix
logL = H.maxElement deltaE
baumWelch :: IOHMM -> U.Vector (Int, Int) -> [(IOHMM, LogLikelihood)]
baumWelch model xys = zip models (tail logLs)
where
n = U.length xys
step (m, _) = baumWelch1 m n xys
(models, logLs) = unzip $ iterate step (model, undefined)
baumWelch' :: IOHMM -> U.Vector (Int, Int) -> (IOHMM, LogLikelihood)
baumWelch' model xys = go (10000 :: Int) (undefined, -1/0) (baumWelch1 model n xys)
where
n = U.length xys
go k (m, l) (m', l')
| k > 0 && l' - l > 1.0e-9 = go (k - 1) (m', l') (baumWelch1 m' n xys)
| otherwise = (m, l')
-- | Perform one step of the Baum-Welch algorithm and return the updated
-- model and the likelihood of the old model.
baumWelch1 :: IOHMM -> Int -> U.Vector (Int, Int) -> (IOHMM, LogLikelihood)
baumWelch1 (model @ IOHMM {..}) n xys = force (model', logL)
where
(xs, ys) = U.unzip xys
-- First, we calculate the alpha, beta, and scaling values using the
-- forward-backward algorithm.
(alphas, cs) = forward model n xys
betas = backward model n xys cs
-- Based on the alpha, beta, and scaling values, we calculate the
-- posterior distribution, i.e., gamma and xi values.
(gammas, xis) = posterior model n xys alphas betas cs
-- Error matrix
q_ = q nStates
-- Using the gamma and xi values, we obtain the optimal initial state
-- probability vector, transition probability matrix, and emission
-- probability matrix.
-- Each simplex in pi0, w, and phi' remains old if their numerators are
-- zero.
pi0 = let g0 = V.unsafeIndex gammas 0
g0_ = g0 / H.konst (H.sumElements g0) nStates
in q_ H.#> g0_
w = let xis' i = V.map snd $ V.filter ((== i) . fst) $ V.zip (G.convert $ U.unsafeTail xs) xis
ds = V.foldl1' (+) . xis' -- denominators
ns i = ds i H.#> H.konst 1 nStates -- numerators
w_ i = H.diag (H.konst 1 nStates / ns i) H.<> ds i
in flip V.map (V.generate nInputs id) $ \i -> w_ i H.<> q_
{- H.fromRows $ zipWith3 (\n_ t t0 -> if n_ > eps then t else t0)
- (H.toList $ ns i)
- (H.toRows $ w_ i H.<> q_)
- (H.toRows $ V.unsafeIndex transitionDist i)
-}
phi' = let gs' o = V.map snd $ V.filter ((== o) . fst) $ V.zip (G.convert ys) gammas
ds = V.foldl' (+) (H.konst 0 nStates) . gs' -- denominators
ns = V.foldl1' (+) gammas -- numerators
phi_ = H.fromRows $ map (\o -> ds o / ns) [0..(nOutputs - 1)]
in q_ H.<> phi_
{- in H.fromColumns $ zipWith3 (\n_ e e0 -> if n_ > eps then e else e0)
- (H.toList ns)
- (H.toColumns $ q_ H.<> phi_)
- (H.toColumns emissionDistT)
-}
-- We finally obtain the new model and the likelihood for the old model.
model' = model { initialStateDist = pi0
, transitionDist = w
, emissionDistT = phi'
}
logL = - (U.sum $ U.map log cs)
-- | Return alphas and scaling variables.
forward :: IOHMM -> Int -> U.Vector (Int, Int) -> (V.Vector (H.Vector Double), U.Vector Double)
{-# INLINE forward #-}
forward IOHMM {..} n xys = runST $ do
as <- MV.unsafeNew n
cs <- MU.unsafeNew n
let (_, y0) = U.unsafeIndex xys 0
a0 = (emissionDistT H.! y0) * initialStateDist
c0 = 1 / H.sumElements a0
MV.unsafeWrite as 0 (H.konst c0 nStates * a0)
MU.unsafeWrite cs 0 c0
forM_ [1..(n-1)] $ \t -> do
a <- MV.unsafeRead as (t-1)
let (x, y) = U.unsafeIndex xys t
a' = (emissionDistT H.! y) * (w' x H.#> a)
c' = 1 / H.sumElements a'
MV.unsafeWrite as t (H.konst c' nStates * a')
MU.unsafeWrite cs t c'
as' <- V.unsafeFreeze as
cs' <- U.unsafeFreeze cs
return (as', cs')
where
w' = H.tr . V.unsafeIndex transitionDist
-- | Return betas using scaling variables.
backward :: IOHMM -> Int -> U.Vector (Int, Int) -> U.Vector Double -> V.Vector (H.Vector Double)
{-# INLINE backward #-}
backward IOHMM {..} n xys cs = runST $ do
bs <- MV.unsafeNew n
let bE = H.konst 1 nStates
cE = U.unsafeIndex cs (n-1)
MV.unsafeWrite bs (n-1) (H.konst cE nStates * bE)
forM_ [n-l | l <- [1..(n-1)]] $ \t -> do
b <- MV.unsafeRead bs t
let (x, y) = U.unsafeIndex xys t
b' = w x H.#> ((emissionDistT H.! y) * b)
c' = U.unsafeIndex cs (t-1)
MV.unsafeWrite bs (t-1) (H.konst c' nStates * b')
V.unsafeFreeze bs
where
w = V.unsafeIndex transitionDist
-- | Return the posterior distribution.
posterior :: IOHMM -> Int -> U.Vector (Int, Int) -> V.Vector (H.Vector Double) -> V.Vector (H.Vector Double) -> U.Vector Double -> (V.Vector (H.Vector Double), V.Vector (H.Matrix Double))
{-# INLINE posterior #-}
posterior IOHMM {..} _ xys alphas betas cs = (gammas, xis)
where
gammas = V.zipWith3 (\a b c -> a * b / H.konst c nStates)
alphas betas (G.convert cs)
xis = V.zipWith3 (\a b (x, y) -> H.diag a H.<> w x H.<> H.diag (b * (emissionDistT H.! y)))
alphas (V.unsafeTail betas) (G.convert $ U.unsafeTail xys)
w = V.unsafeIndex transitionDist
-- | Global error threshold.
{-# INLINE eps #-}
eps :: Double
eps = 1e-4
-- | Error matrix @q k@ is required to guarantee that the elements of initial
-- states vector and emission/transition matrix are all larger than zero.
-- @k@ is assumed to be the number of states. @q k@ is given by
-- [ 1 - eps, (1/k') eps, ..., (1/k') eps ]
-- [ (1/k') eps, 1 - eps, ..., (1/k') eps ]
-- [ ... ]
-- [ (1/k') eps, ..., (1/k') eps, 1 - eps ],
-- where the diagonal elements are @1 - eps@ and the remains are @(1/k')
-- eps@. Here @eps@ is a small error value (given by @1e-4@) and
-- @k' = k - 1@.
q :: Int -> H.Matrix Double
{-# INLINE q #-}
q k = H.konst (1 - eps) (k, k) * e + H.konst (eps / k') (k, k) * (one - e)
where
e = H.ident k
one = H.konst 1 (k, k)
k' = fromIntegral (k - 1)
|
mnacamura/learning-hmm
|
src/Learning/IOHMM/Internal.hs
|
mit
| 12,449 | 53 | 22 | 4,068 | 3,537 | 1,996 | 1,541 | 198 | 1 |
module Network.BitFunctor.Crypto.Signing ( Signature
, sign
, verify
) where
import Network.BitFunctor.Crypto.Signing.Types
import Network.BitFunctor.Crypto.Key.Types
import qualified Crypto.PubKey.Ed25519 as C (sign, verify)
import Data.ByteArray
sign :: ByteArrayAccess ba => SecretKey -> PublicKey -> ba -> Signature
sign = C.sign
verify :: ByteArrayAccess ba => PublicKey -> ba -> Signature -> Bool
verify = C.verify
|
BitFunctor/bitfunctor
|
src/Network/BitFunctor/Crypto/Signing.hs
|
mit
| 550 | 0 | 8 | 182 | 119 | 72 | 47 | 11 | 1 |
module Proteome.Test.AddFTest where
import qualified Data.Map as Map (fromList)
import Path (Dir, Rel, parseAbsDir, reldir, (</>))
import Ribosome.Api.Path (nvimCwd)
import Ribosome.Control.Ribosome (newRibosome)
import Ribosome.Nvim.Api.IO (vimCallFunction, vimCommand, vimSetVar)
import Ribosome.Test.Await (awaitEqual_)
import Ribosome.Test.Embed (integrationTestDef)
import Ribosome.Test.Run (UnitTest)
import Ribosome.Test.Unit (fixture)
import Proteome.Data.Env (Proteome)
import Proteome.Data.ProjectConfig (ProjectConfig (ProjectConfig))
import Proteome.Plugin (plugin')
import Proteome.Test.Unit (ProteomeTest)
addTestN :: Proteome () -> ProteomeTest ()
addTestN request = do
projectsDir <- parseAbsDir =<< fixture "projects"
() <- vimSetVar "proteome_project_config" (toMsgpack $ ProjectConfig [projectsDir] def def def def def def)
() <- lift request
awaitEqual_ (projectsDir </> [reldir|haskell|] </> [reldir|cilia|]) (parseAbsDir =<< nvimCwd)
addTest :: Proteome () -> UnitTest
addTest request = do
ribo <- newRibosome "proteome" def
integrationTestDef (plugin' ribo) (addTestN request)
test_addFunction :: UnitTest
test_addFunction =
addTest request
where
request =
vimCallFunction "ProAddProject" [toMsgpack $ Map.fromList [
("name" :: Text, toMsgpack ("cilia" :: Text)),
("tpe", toMsgpack ("haskell" :: Text)),
("activate", toMsgpack True)
]]
test_addCommand :: UnitTest
test_addCommand =
addTest request
where
request =
vimCommand "ProAdd! haskell/cilia"
|
tek/proteome
|
packages/test/test/Proteome/Test/AddFTest.hs
|
mit
| 1,550 | 0 | 14 | 239 | 464 | 264 | 200 | -1 | -1 |
module PPOL.Permutation.APermutation
(
)
where
import qualified Data.Array as Array
type APermutation = Array.Array Int Int
newAPermutation :: Int -> APermutation
newAPermutation n = Array.array (1,n) [(i, i) | i <- [1..n]]
fromList :: [Int] -> APermutation
fromLPermutation :: LP.LPermutation -> APermutation
toLPermutation :: APermutation -> LP.LPermutation
fromMPermutation :: MP.MPermutation -> APermutation
toMPermutation :: APermutation -> MP.MPermutation
|
vialette/PPOL
|
src/PPOL/Permutation/APermutation.hs
|
mit
| 469 | 0 | 9 | 64 | 139 | 80 | 59 | 11 | 1 |
{-# LANGUAGE LambdaCase, NamedFieldPuns, OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Prelude hiding (FilePath)
import Data.String (fromString)
import Data.Maybe (fromMaybe)
import Compile
import Control.Arrow
import Control.Monad
import Control.Monad.Except
import Control.Monad.Catch
import Control.Concurrent (threadDelay)
import qualified Data.Text.IO as T
import Options.Applicative hiding (Parser)
import Text.LaTeX.Base.Parser
import TranslateTex (translate)
import DecoratedTex (decorate)
import Filesystem as F
import Filesystem.Path.CurrentOS
import System.FSNotify
import Types
import Paths_proof (getDataFileName)
data AppData = AppData
{ inputPath :: FilePath
, outputDir :: Maybe FilePath
, watch :: Bool
}
opts :: ParserInfo AppData
opts = info (helper <*> optParser)
( fullDesc
<> progDesc "Compile FILEPATH to html"
<> header "proof - a markup language for structured mathematics")
where
optParser = AppData
<$> argument path (metavar "FILE" <> help "Path to the input proof file")
<*> optional (option path (
long "output" <>
short 'o' <>
metavar "OUTPUTDIR" <>
help "Path of output directory. Default is '.'"))
<*> switch (long "watch" <> short 'w' <> help "Recompile on file-change")
where
path :: ReadM FilePath
path = eitherReader (Right . fromString)
outputPath :: FilePath -> FilePath
outputPath p = replaceExtension p "html"
texReader :: (MonadIO m, Functor m) => FilePath -> Err m RawDocument
texReader = (ExceptT . liftIO . fmap (left show) . parseLaTeXFile . encodeString) >=> decorate >=> translate
-- `copyDirectory src dst` works as follows:
-- Let src = initialpath/dir. Then src gets moved to dst/dir assuming src
-- and dst are both directories
copyDirectory :: (MonadIO m, MonadCatch m) => FilePath -> FilePath -> Err m ()
copyDirectory = \src dst -> do
e <- liftIO ((&&) <$> isDirectory src <*> isDirectory dst)
if e then go src dst else throwError "Directories do not exist" -- TODO: Better error
where
leafName = last . splitDirectories
copyError p = throwError ("Error copying file \"" ++ show p ++ "\"")
go :: (MonadIO m, MonadCatch m) => FilePath -> FilePath -> Err m ()
go src dst = do
catch (liftIO $ createDirectory False dst')
(\(_::IOError) -> throwError ("Directory \"" ++ show dst' ++ "\" already exists."))
fs <- liftIO $ listDirectory src
forM_ fs $ \p ->
liftIO (isFile p) >>= \case
True -> liftIO (copyFile p (dst' </> filename p)) `catch` (\(_::IOError) -> copyError p)
False -> go p dst'
where dst' = dst </> leafName src
loadResources :: (MonadIO m, MonadCatch m, Applicative m) => Err m Resources
loadResources =
Resources <$> mapM readDataFile ["src/css/proof.css"]
<*> mapM readDataFile ["lib/js/jquery.min.js", "src/js/proof.js"]
where
readDataFile = (`catch` (\(_::IOError) ->throwError "Could not read data files"))
. liftIO . (T.readFile <=< getDataFileName)
pkgPath :: FilePath -> FilePath -> FilePath
pkgPath inputPath outputDir = outputDir </> addExtension (basename inputPath) "proofpkg"
compileAndOutput :: (MonadIO m, MonadCatch m, Applicative m) => FilePath -> FilePath -> Err m ()
compileAndOutput inputPath outputDir = do
let p = pkgPath inputPath outputDir
htmlPath = p </> "index.html"
html <- join (compile <$> loadResources <*> texReader inputPath)
-- TODO: Dangerous to remove directories. Remove when you merge
liftIO $ do
isDirectory p >>= flip when (removeTree p)
createDirectory False p
liftIO (getDataFilePath "lib") >>= \l -> copyDirectory l p
liftIO $ do
forM_ ["src/js/proof.js", "src/css/proof.css"] $ \q ->
getDataFilePath q >>= \q' -> copyFile q' (p </> filename q)
T.writeFile (encodeString htmlPath) html
where
getDataFilePath = fmap decodeString . getDataFileName . encodeString
main :: IO ()
main = do
AppData { inputPath, outputDir, watch } <- execParser opts
run inputPath outputDir
when watch (setupWatch inputPath outputDir)
where
setupWatch inputPath outputDir =
withManager $ \wm -> void $ do
watchDir wm (directory inputPath) fEvent $ \_ -> do
putStr "File changed. Recompiling..."
run inputPath outputDir
putStrLn "Done."
forever $ threadDelay maxBound
where
fEvent = \case { Modified p _ -> p == inputPath; _ -> False }
run inputPath outputDir =
runExceptT (compileAndOutput inputPath out) >>= \case
Left e -> putStrLn e
Right _ -> return ()
where out = fromMaybe (directory inputPath) outputDir
|
imeckler/proof
|
Main.hs
|
mit
| 4,653 | 0 | 21 | 980 | 1,422 | 728 | 694 | 100 | 3 |
module Chimera.Engine.Core (
module M
) where
import Chimera.Engine.Core.Types as M
import Chimera.Engine.Core.Field as M
import Chimera.Engine.Core.Layers as M
import Chimera.Engine.Core.Menu as M
import Chimera.Engine.Core.Util as M
|
myuon/Chimera
|
Chimera/Engine/Core.hs
|
mit
| 240 | 0 | 4 | 32 | 58 | 44 | 14 | 7 | 0 |
module GHCJS.DOM.SVGPathSegLinetoVerticalRel (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/SVGPathSegLinetoVerticalRel.hs
|
mit
| 57 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
lucky :: (Integral a) => a -> String
lucky 7 = "lucky number seven"
lucky x = "sorry"
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial n = n * factorial (n - 1)
addVectors :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
-- Patten match with x:xs
-- [a+b+c | a:b:c:xs <- [[1..5], [1..6], [1..7]]]
head' :: [a] -> a
head' [] = error "List empty"
head' (x:_) = x
tell :: (Show a) => [a] -> String
tell [] = "Empty list"
tell (x:[]) = "One elem: " ++ show x
tell (x:y:[]) = "Two elem: " ++ show x ++ " and " ++ show y
tell (x:y:_) = "Long list. First two: " ++ show x ++ " and " ++ show y
length' :: (Num b) => [a] -> b
length' [] = 0
length' (_:xs) = 1 + length' xs
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' (x:xs) = x + sum' xs
-- as pattern
capital :: String -> String
capital "" = "Empty"
capital all@(x:xs) = "Head of " ++ all ++ " is " ++ [x]
-- guards
bmiTell :: (RealFloat a) => a -> String
bmiTell b -- note no `=` here
| b <= 18.5 = "underweight"
| b <= 25.0 = "normal"
| b <= 30.0 = "fat"
| otherwise = "whale" -- otherwise equivalent to true
bmi :: (RealFloat a) => a -> a -> a
bmi w h = w / h^2
max' :: (Ord a) => a -> a -> a
-- can be written inline
-- max' a b | a > b = a | otherwise = b
max' a b
| a > b = a
| otherwise = b
myCompare :: (Ord a) => a -> a -> Ordering
a `myCompare` b
| a > b = GT
| a == b = EQ
| otherwise = LT
-- can also bind function to a name in the guard with `where`
bmiTell' :: (RealFloat a) => a -> a -> String
bmiTell' weight height
| bmi <= skinny = "underweight"
| bmi <= normal = "normal"
| bmi <= fat = "fat"
| otherwise = "whale"
where bmi = weight / height^2 -- weight, height types already defined
(skinny, normal, fat) = (18.5, 25.0, 30.0) -- type inferred
initials :: String -> String -> String
initials firstname lastname = [f] ++ ". " ++ [l] ++ "."
where (f:_) = firstname
(l:_) = lastname
calcBmis :: (RealFloat a) => [(a, a)] -> [a]
calcBmis xs = [bmi w h | (w, h) <- xs]
where bmi weight height = weight / height^2
-- can use let to bind names to values
-- useful for definining repeated intermediate values
cylinder :: (RealFloat a) => a -> a -> a
cylinder r h =
let sideArea = 2 * pi * r * h
topArea = pi * r^2
in sideArea + 2 * topArea
-- with let instead
calcBmis' :: (RealFloat a) => [(a, a)] -> [a]
-- can also add predicate to the end to filter
calcBmis' xs = [bmi | (w, h) <- xs, let bmi = w / h^2, bmi >= 25.0]
-- case
descList :: [a] -> String
descList xs = "List is" ++ case xs of [] -> "empty"
[x] -> "singleton"
xs -> "longer"
-- which is equivalent to
descList' :: [a] -> String
descList' xs = "List is " ++ what xs
where what [] = "empty"
what [x] = "singleton"
what xs = "longer"
|
autocorr/lyah
|
chap4.hs
|
mit
| 2,948 | 0 | 11 | 858 | 1,263 | 670 | 593 | 73 | 3 |
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.EnvVarSource
( EnvVarSource (..)
, fieldRef
, configMapKeyRef
, secretKeyRef
, mkEnvVarSource
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions,
deriveJSON,
fieldLabelModifier)
import GHC.Generics (Generic)
import Kubernetes.Model.V1.ConfigMapKeySelector (ConfigMapKeySelector)
import Kubernetes.Model.V1.ObjectFieldSelector (ObjectFieldSelector)
import Kubernetes.Model.V1.SecretKeySelector (SecretKeySelector)
import Prelude hiding (drop, error,
max, min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | EnvVarSource represents a source for the value of an EnvVar.
data EnvVarSource = EnvVarSource
{ _fieldRef :: !(Maybe ObjectFieldSelector)
, _configMapKeyRef :: !(Maybe ConfigMapKeySelector)
, _secretKeyRef :: !(Maybe SecretKeySelector)
} deriving (Show, Eq, Generic)
makeLenses ''EnvVarSource
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''EnvVarSource)
instance Arbitrary EnvVarSource where
arbitrary = EnvVarSource <$> arbitrary <*> arbitrary <*> arbitrary
-- | Use this method to build a EnvVarSource
mkEnvVarSource :: EnvVarSource
mkEnvVarSource = EnvVarSource Nothing Nothing Nothing
|
soundcloud/haskell-kubernetes
|
lib/Kubernetes/Model/V1/EnvVarSource.hs
|
mit
| 2,104 | 0 | 14 | 724 | 321 | 194 | 127 | 39 | 1 |
-- ex7.2.hs
import Control.Monad
find_ :: (a -> Bool) -> [a] -> Maybe a
find_ p l =
let pl = map (\e -> if p e then Just e else Nothing) l in
msum pl
|
hnfmr/beginning_haskell
|
ex7.2.hs
|
mit
| 175 | 0 | 13 | 61 | 84 | 43 | 41 | 5 | 2 |
module Main where
import Data.Char
import qualified Brainfuck.Types as BF
import Brainfuck.Interpreter
import Brixy.AST
import Brixy.Compiler
{-
simpl = Module "Main"
[Function (Ident "test") [Ident "x"]
[Print (VL (Ident "x"))]
,Function (Ident "main") []
[CallE (CallF (Ident "test") [Lit 48])]
]
prog1 = Module "Main"
[Function (Ident "not") [Ident "x"]
[IfThenElse (Equal (VL (Ident "x")) (Lit 0))
[Return (Lit 1)]
[Return (Lit 0)]
]
,Function (Ident "main") []
[Print (CallF (Ident "not") [Lit 0])]]
prog2 = Module "Main"
[Function (Ident "not") [Ident "x"]
[IfThenElse (Equal (VL (Ident "x")) (Lit 0))
[Return (Lit 1)]
[Return (Lit 0)]
]
,Function (Ident "mult") [Ident "x", Ident "y"]
[Decl (Ident "acc")
,Assign (Ident "acc") (Lit 0)
,While (CallF (Ident "not") [Equal (VL (Ident "y")) (Lit 0)])
[Assign (Ident "acc") (Add (VL (Ident "acc")) (VL (Ident "x")))
,Assign (Ident "y") (Add (VL (Ident "y")) (Lit (-1)))]
,Return (VL (Ident "acc"))
]
,Function (Ident "main") []
[Print (CallF (Ident "mult") [Lit 3, Lit 2])]]
prog3 =
Module "Main"
[Function (Ident "not") [Ident "x"]
[IfThenElse (Equal (VL (Ident "x")) (Lit 0))
[Return (Lit 1)]
[Return (Lit 0)]]
,Function (Ident "mult") [Ident "x", Ident "y"]
[Decl (Ident "acc")
,Assign (Ident "acc") (Lit 0)
,While (CallF (Ident "not") [Equal (VL (Ident "y")) (Lit 0)])
[Assign (Ident "acc") (Add (VL (Ident "acc")) (VL (Ident "x")))
,Assign (Ident "y") (Add (VL (Ident "y")) (Lit (-1)))]
,Return (VL (Ident "acc"))]
,Function (Ident "factorial") [Ident "x"]
[Decl (Ident "acc")
,Assign (Ident "acc") (Lit 1)
,While (CallF (Ident "not") [Equal (VL (Ident "x")) (Lit 0)])
[
Assign (Ident "acc") (CallF (Ident "mult") [(VL (Ident "acc")),(VL (Ident "x"))])
,Assign (Ident "x" ) (Minus (VL (Ident "x")) (Lit 1))
]
-- ,Assign (Ident "acc") (Add (VL (Ident "acc")) (Lit 48))
,Print (VL (Ident "acc"))
]
,Function (Ident "main") []
[CallE (CallF (Ident "factorial") [Lit 3])
--,CallE (CallF (Ident "factorial") [Lit 4])
]
]
prog4 =
Module "Main"
[Function (Ident "add") [Ident "i1", Ident "i2"]
[EBF (EBWhile (EIndir (Ident "i2"))
[EBValInc (EIndir (Ident "i2")) (-1)
,EBValInc (EIndir (Ident "i1")) 1
])
,Return (VL (Ident "i1"))
]
,Function (Ident "main") []
[Print (CallF (Ident "add") [Lit 2, Lit 3])
]
]
-}
math :: Module
math = Module "Math"
[Function (Ident "not") [ByValue (Ident "x")]
[IfThenElse (CallF (Ident "equal") [VL (Ident "x"),Lit 0])
[Return (Lit 1)]
[Return (Lit 0)]]
,Function (Ident "add") [ByValue (Ident "i1"), ByValue (Ident "i2")]
[EBF (EBWhile (EIndir (Ident "i2"))
[EBValInc (EIndir (Ident "i2")) (-1)
,EBValInc (EIndir (Ident "i1")) 1
])
,Return (VL (Ident "i1"))
]
,Function (Ident "minus") [ByValue (Ident "i1"), ByValue (Ident "i2")]
[EBF (EBWhile (EIndir (Ident "i2"))
[EBValInc (EIndir (Ident "i2")) (-1)
,EBValInc (EIndir (Ident "i1")) (-1)
])
,Return (VL (Ident "i1"))
]
,Function (Ident "equal") [ByValue (Ident "i1"), ByValue (Ident "i2")]
[EBF (EBWhile (EIndir (Ident "i1"))
[EBValInc (EIndir (Ident "i1")) (-1)
,EBValInc (EIndir (Ident "i2")) (-1)
])
,EBF (EBValInc (EIndir (Ident "i1")) 1)
,EBF (EBWhile (EIndir (Ident "i2"))
[EBValInc (EIndir (Ident "i1")) (-1)
,EBSet (EIndir (Ident "i2")) 0
])
,Return (VL (Ident "i1"))
]
,Function (Ident "mult") [ByValue (Ident "x"), ByValue (Ident "y")]
[Decl (Ident "acc")
,Assign (Ident "acc") (Lit 0)
,While (CallF (Ident "not") [CallF (Ident "equal") [VL (Ident "y"),Lit 0]])
[Assign (Ident "acc") (CallF (Ident "add") [VL (Ident "acc"),VL (Ident "x")])
,Assign (Ident "y") (CallF (Ident "add") [VL (Ident "y"),Lit (-1)])]
,Return (VL (Ident "acc"))]
]
prog5 =
Program . (:[math])$ Module "Main"
[Import "Math"
,Function (Ident "print") [ByValue (Ident "x")]
[EBF (EBIOOutput (EIndir (Ident "x")))
,Return (Lit 0)
]
,Function (Ident "read") []
[Decl (Ident "ret")
,EBF (EBIORead (EIndir (Ident "ret")))
,Return (VL (Ident "ret"))
]
,Function (Ident "factorial") [ByValue (Ident "x")]
[Decl (Ident "acc")
,Assign (Ident "acc") (Lit 1)
,While (CallF (Ident "Math.not") [CallF (Ident "Math.equal") [VL (Ident "x"),Lit 0]])
[
Assign (Ident "acc") (CallF (Ident "Math.mult") [(VL (Ident "acc")),(VL (Ident "x"))])
,Assign (Ident "x" ) (CallF (Ident "Math.minus") [VL (Ident "x"),Lit 1])
]
,CallE (CallF (Ident "add48") [VL (Ident "acc")])
,CallE (CallF (Ident "print") [VL (Ident "acc")])
]
,Function (Ident "add48") [ByRef (Ident "x")]
[Assign (Ident "x") (CallF (Ident "Math.add") [VL (Ident "x"), Lit 48])
,Return (Lit 0)
]
,Function (Ident "main") []
[Decl (Ident "x")
,Assign (Ident "x") (CallF (Ident "read") [])
,CallE (CallF (Ident "factorial") [VL (Ident "x")])
]
]
{-
compileExpr resAddr (Add e1 e2) = do dk_enter "#addition"
i1 <- dk_declare "#e1"
i2 <- dk_declare "#e2"
v1 <- compileExpr i1 e1
v2 <- compileExpr i2 e2
dk_leave
return (
[L1Set i1 0
,L1Set i2 0] ++ v1 ++ v2 ++
[L1While i2
[L1ValInc i2 (-1)
,L1ValInc i1 1
]
,L1Copy resAddr i1
])
-}
run xs p = ir_stdout $ eval_naive (case compile defSettings p of Right w -> w) xs
gen fn mainBody = case fn defSettings (Module "Main" [Function (Ident "main") [] mainBody]) of
Left{} -> error ".."
Right p -> p
var = Decl . Ident
(=:) a b = Ident a `Assign` b
v = Ident
main :: IO ()
main = putStrLn "Hello, Haskell!"
|
EXio4/brixy
|
src/Main.hs
|
mit
| 7,654 | 0 | 19 | 3,265 | 1,671 | 848 | 823 | 73 | 2 |
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Edison.Concrete.FingerTree
-- Copyright : (c) Ross Paterson, Ralf Hinze 2006
-- License : BSD-style
-- Maintainer : robdockins AT fastmail DOT fm
-- Stability : internal (non-stable)
-- Portability : non-portable (MPTCs and functional dependencies)
--
-- A general sequence representation with arbitrary annotations, for
-- use as a base for implementations of various collection types, as
-- described in section 4 of
--
-- * Ralf Hinze and Ross Paterson,
-- \"Finger trees: a simple general-purpose data structure\",
-- /Journal of Functional Programming/ 16:2 (2006) pp 197-217.
-- <http://www.soi.city.ac.uk/~ross/papers/FingerTree.html>
--
-- This data structure forms the basis of the "Data.Edison.Seq.FingerSeq"
-- sequence data structure.
--
-- An amortized running time is given for each operation, with /n/
-- referring to the length of the sequence. These bounds hold even in
-- a persistent (shared) setting.
--
-----------------------------------------------------------------------------
{------------------------------------------------------------------
Copyright 2004, 2008, The University Court of the University of Glasgow.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither name of the University nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF
GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
-----------------------------------------------------------------------------}
module Data.Edison.Concrete.FingerTree (
FingerTree,
Split(..),
empty, singleton, lcons, rcons, append,
fromList, toList, null, size, lview, rview,
split, takeUntil, dropUntil, splitTree,
reverse, mapTree, foldFT, reduce1, reduce1',
strict, strictWith, structuralInvariant
-- traverse'
) where
import Prelude hiding (null, reverse)
import Data.Monoid
import Test.QuickCheck
import Data.Edison.Prelude
import Control.Monad (liftM2, liftM3, liftM4)
infixr 5 `lcons`
infixl 5 `rcons0`
data Digit a
= One a
| Two a a
| Three a a a
| Four a a a a
deriving Show
foldDigit :: (b -> b -> b) -> (a -> b) -> Digit a -> b
foldDigit _ f (One a) = f a
foldDigit mapp f (Two a b) = f a `mapp` f b
foldDigit mapp f (Three a b c) = f a `mapp` f b `mapp` f c
foldDigit mapp f (Four a b c d) = f a `mapp` f b `mapp` f c `mapp` f d
reduceDigit :: (b -> b -> b) -> (a -> b) -> Digit a -> b
reduceDigit _ f (One a) = f a
reduceDigit mapp f (Two a b) = f a `mapp` f b
reduceDigit mapp f (Three a b c) = f a `mapp` f b `mapp` f c
reduceDigit mapp f (Four a b c d) = (f a `mapp` f b) `mapp` (f c `mapp` f d)
digitToList :: Digit a -> [a] -> [a]
digitToList (One a) xs = a : xs
digitToList (Two a b) xs = a : b : xs
digitToList (Three a b c) xs = a : b : c : xs
digitToList (Four a b c d) xs = a : b : c : d : xs
sizeDigit :: (a -> Int) -> Digit a -> Int
sizeDigit f (One x) = f x
sizeDigit f (Two x y) = f x + f y
sizeDigit f (Three x y z) = f x + f y + f z
sizeDigit f (Four x y z w) = f x + f y + f z + f w
instance (Measured v a) => Measured v (Digit a) where
measure = foldDigit mappend measure
data Node v a = Node2 !v a a | Node3 !v a a a
deriving Show
sizeNode :: (a -> Int) -> Node v a -> Int
sizeNode f (Node2 _ x y) = f x + f y
sizeNode f (Node3 _ x y z) = f x + f y + f z
foldNode :: (b -> b -> b) -> (a -> b) -> Node v a -> b
foldNode mapp f (Node2 _ a b) = f a `mapp` f b
foldNode mapp f (Node3 _ a b c) = f a `mapp` f b `mapp` f c
nodeToList :: Node v a -> [a] -> [a]
nodeToList (Node2 _ a b) xs = a : b : xs
nodeToList (Node3 _ a b c) xs = a : b : c : xs
node2 :: (Measured v a) => a -> a -> Node v a
node2 a b = Node2 (measure a `mappend` measure b) a b
node3 :: (Measured v a) => a -> a -> a -> Node v a
node3 a b c = Node3 (measure a `mappend` measure b `mappend` measure c) a b c
instance (Monoid v) => Measured v (Node v a) where
measure (Node2 v _ _) = v
measure (Node3 v _ _ _) = v
nodeToDigit :: Node v a -> Digit a
nodeToDigit (Node2 _ a b) = Two a b
nodeToDigit (Node3 _ a b c) = Three a b c
-- | Finger trees with element type @a@, annotated with measures of type @v@.
-- The operations enforce the constraint @'Measured' v a@.
data FingerTree v a
= Empty
| Single a
| Deep !v !(Digit a) (FingerTree v (Node v a)) !(Digit a)
deep :: (Measured v a) =>
Digit a -> FingerTree v (Node v a) -> Digit a -> FingerTree v a
deep pr m sf = Deep ((measure pr `mappendVal` m) `mappend` measure sf) pr m sf
structuralInvariant :: (Eq v, Measured v a) => FingerTree v a -> Bool
structuralInvariant Empty = True
structuralInvariant (Single _) = True
structuralInvariant (Deep v pr m sf) =
v == foldDigit mappend measure pr `mappend`
foldFT mempty mappend (foldNode mappend measure) m `mappend`
foldDigit mappend measure sf
instance (Measured v a) => Measured v (FingerTree v a) where
measure Empty = mempty
measure (Single x) = measure x
measure (Deep v _ _ _) = v
sizeFT :: (a -> Int) -> FingerTree v a -> Int
sizeFT _ Empty = 0
sizeFT f (Single x) = f x
sizeFT f (Deep _ d1 m d2) = sizeDigit f d1 + sizeFT (sizeNode f) m + sizeDigit f d2
size :: FingerTree v a -> Int
size = sizeFT (const 1)
foldFT :: b -> (b -> b -> b) -> (a -> b) -> FingerTree v a -> b
foldFT mz _ _ Empty = mz
foldFT _ _ f (Single x) = f x
foldFT mz mapp f (Deep _ pr m sf) =
foldDigit mapp f pr `mapp` foldFT mz mapp (foldNode mapp f) m `mapp` foldDigit mapp f sf
ftToList :: FingerTree v a -> [a] -> [a]
ftToList Empty xs = xs
ftToList (Single a) xs = a : xs
ftToList (Deep _ d1 ft d2) xs = digitToList d1 (foldr nodeToList [] . ftToList ft $ []) ++ (digitToList d2 xs)
toList :: FingerTree v a -> [a]
toList ft = ftToList ft []
reduce1_aux :: (b -> b -> b) -> (a -> b) -> Digit a -> FingerTree v (Node v a) -> Digit a -> b
reduce1_aux mapp f pr Empty sf =
(reduceDigit mapp f pr) `mapp`
(reduceDigit mapp f sf)
reduce1_aux mapp f pr (Single x) sf =
(reduceDigit mapp f pr) `mapp`
(foldNode mapp f x) `mapp`
(reduceDigit mapp f sf)
reduce1_aux mapp f pr (Deep _ pr' m sf') sf =
(reduceDigit mapp f pr) `mapp`
(reduce1_aux mapp
(foldNode mapp f)
pr' m sf') `mapp`
(reduceDigit mapp f sf)
reduce1 :: (a -> a -> a) -> FingerTree v a -> a
reduce1 _ Empty = error "FingerTree.reduce1: empty tree"
reduce1 _ (Single x) = x
reduce1 mapp (Deep _ pr m sf) = reduce1_aux mapp id pr m sf
reduce1' :: (a -> a -> a) -> FingerTree v a -> a
reduce1' _ Empty = error "FingerTree.reduce1': empty tree"
reduce1' _ (Single x) = x
reduce1' mapp (Deep _ pr m sf) = reduce1_aux mapp' id pr m sf
where mapp' x y = x `seq` y `seq` mapp x y
strict :: FingerTree v a -> FingerTree v a
strict xs = foldFT () seq (const ()) xs `seq` xs
strictWith :: (a -> b) -> FingerTree v a -> FingerTree v a
strictWith f xs = foldFT () seq (\x -> f x `seq` ()) xs `seq` xs
instance (Measured v a, Eq a) => Eq (FingerTree v a) where
xs == ys = toList xs == toList ys
instance (Measured v a, Ord a) => Ord (FingerTree v a) where
compare xs ys = compare (toList xs) (toList ys)
instance (Measured v a, Show a) => Show (FingerTree v a) where
showsPrec p xs = showParen (p > 10) $
showString "fromList " . shows (toList xs)
mapTree :: (Measured v2 a2) =>
(a1 -> a2) -> FingerTree v1 a1 -> FingerTree v2 a2
mapTree _ Empty = Empty
mapTree f (Single x) = Single (f x)
mapTree f (Deep _ pr m sf) =
deep (mapDigit f pr) (mapTree (mapNode f) m) (mapDigit f sf)
mapNode :: (Measured v2 a2) =>
(a1 -> a2) -> Node v1 a1 -> Node v2 a2
mapNode f (Node2 _ a b) = node2 (f a) (f b)
mapNode f (Node3 _ a b c) = node3 (f a) (f b) (f c)
mapDigit :: (a -> b) -> Digit a -> Digit b
mapDigit f (One a) = One (f a)
mapDigit f (Two a b) = Two (f a) (f b)
mapDigit f (Three a b c) = Three (f a) (f b) (f c)
mapDigit f (Four a b c d) = Four (f a) (f b) (f c) (f d)
{-
-- | Like 'traverse', but with a more constrained type.
traverse' :: (Measured v1 a1, Measured v2 a2, Applicative f) =>
(a1 -> f a2) -> FingerTree v1 a1 -> f (FingerTree v2 a2)
traverse' = traverseTree
traverseTree :: (Measured v2 a2, Applicative f) =>
(a1 -> f a2) -> FingerTree v1 a1 -> f (FingerTree v2 a2)
traverseTree _ Empty = pure Empty
traverseTree f (Single x) = Single <$> f x
traverseTree f (Deep _ pr m sf) =
deep <$> traverseDigit f pr <*> traverseTree (traverseNode f) m <*> traverseDigit f sf
traverseNode :: (Measured v2 a2, Applicative f) =>
(a1 -> f a2) -> Node v1 a1 -> f (Node v2 a2)
traverseNode f (Node2 _ a b) = node2 <$> f a <*> f b
traverseNode f (Node3 _ a b c) = node3 <$> f a <*> f b <*> f c
traverseDigit :: (Applicative f) => (a -> f b) -> Digit a -> f (Digit b)
traverseDigit f (One a) = One <$> f a
traverseDigit f (Two a b) = Two <$> f a <*> f b
traverseDigit f (Three a b c) = Three <$> f a <*> f b <*> f c
traverseDigit f (Four a b c d) = Four <$> f a <*> f b <*> f c <*> f d
-}
-- | /O(1)/. The empty sequence.
empty :: Measured v a => FingerTree v a
empty = Empty
-- | /O(1)/. A singleton sequence.
singleton :: Measured v a => a -> FingerTree v a
singleton = Single
-- | /O(n)/. Create a sequence from a finite list of elements.
fromList :: (Measured v a) => [a] -> FingerTree v a
fromList = foldr lcons Empty
-- | /O(1)/. Add an element to the left end of a sequence.
lcons :: (Measured v a) => a -> FingerTree v a -> FingerTree v a
a `lcons` Empty = Single a
a `lcons` Single b = deep (One a) Empty (One b)
a `lcons` Deep _ (Four b c d e) m sf = m `seq`
deep (Two a b) (node3 c d e `lcons` m) sf
a `lcons` Deep _ pr m sf = deep (consDigit a pr) m sf
consDigit :: a -> Digit a -> Digit a
consDigit a (One b) = Two a b
consDigit a (Two b c) = Three a b c
consDigit a (Three b c d) = Four a b c d
consDigit _ _ = error "FingerTree.consDigit: bug!"
-- | /O(1)/. Add an element to the right end of a sequence.
rcons :: (Measured v a) => a -> FingerTree v a -> FingerTree v a
rcons = flip rcons0
rcons0 :: (Measured v a) => FingerTree v a -> a -> FingerTree v a
Empty `rcons0` a = Single a
Single a `rcons0` b = deep (One a) Empty (One b)
Deep _ pr m (Four a b c d) `rcons0` e = m `seq`
deep pr (m `rcons0` node3 a b c) (Two d e)
Deep _ pr m sf `rcons0` x = deep pr m (snocDigit sf x)
snocDigit :: Digit a -> a -> Digit a
snocDigit (One a) b = Two a b
snocDigit (Two a b) c = Three a b c
snocDigit (Three a b c) d = Four a b c d
snocDigit _ _ = error "FingerTree.snocDigit: bug!"
-- | /O(1)/. Is this the empty sequence?
null :: (Measured v a) => FingerTree v a -> Bool
null Empty = True
null _ = False
-- | /O(1)/. Analyse the left end of a sequence.
lview :: (Measured v a, Monad m) => FingerTree v a -> m (a,FingerTree v a)
lview Empty = fail "FingerTree.lview: empty tree"
lview (Single x) = return (x, Empty)
lview (Deep _ (One x) m sf) = return . (,) x $
case lview m of
Nothing -> digitToTree sf
Just (a,m') -> deep (nodeToDigit a) m' sf
lview (Deep _ pr m sf) = return (lheadDigit pr, deep (ltailDigit pr) m sf)
lheadDigit :: Digit a -> a
lheadDigit (One a) = a
lheadDigit (Two a _) = a
lheadDigit (Three a _ _) = a
lheadDigit (Four a _ _ _) = a
ltailDigit :: Digit a -> Digit a
ltailDigit (Two _ b) = One b
ltailDigit (Three _ b c) = Two b c
ltailDigit (Four _ b c d) = Three b c d
ltailDigit _ = error "FingerTree.ltailDigit: bug!"
-- | /O(1)/. Analyse the right end of a sequence.
rview :: (Measured v a, Monad m) => FingerTree v a -> m (a, FingerTree v a)
rview Empty = fail "FingerTree.rview: empty tree"
rview (Single x) = return (x, Empty)
rview (Deep _ pr m (One x)) = return . (,) x $
case rview m of
Nothing -> digitToTree pr
Just (a,m') -> deep pr m' (nodeToDigit a)
rview (Deep _ pr m sf) = return (rheadDigit sf, deep pr m (rtailDigit sf))
rheadDigit :: Digit a -> a
rheadDigit (One a) = a
rheadDigit (Two _ b) = b
rheadDigit (Three _ _ c) = c
rheadDigit (Four _ _ _ d) = d
rtailDigit :: Digit a -> Digit a
rtailDigit (Two a _) = One a
rtailDigit (Three a b _) = Two a b
rtailDigit (Four a b c _) = Three a b c
rtailDigit _ = error "FingerTree.rtailDigit: bug!"
digitToTree :: (Measured v a) => Digit a -> FingerTree v a
digitToTree (One a) = Single a
digitToTree (Two a b) = deep (One a) Empty (One b)
digitToTree (Three a b c) = deep (Two a b) Empty (One c)
digitToTree (Four a b c d) = deep (Two a b) Empty (Two c d)
-- | /O(log(min(n1,n2)))/. Concatenate two sequences.
append :: (Measured v a) => FingerTree v a -> FingerTree v a -> FingerTree v a
append = appendTree0
appendTree0 :: (Measured v a) => FingerTree v a -> FingerTree v a -> FingerTree v a
appendTree0 Empty xs =
xs
appendTree0 xs Empty =
xs
appendTree0 (Single x) xs =
x `lcons` xs
appendTree0 xs (Single x) =
xs `rcons0` x
appendTree0 (Deep _ pr1 m1 sf1) (Deep _ pr2 m2 sf2) =
deep pr1 (addDigits0 m1 sf1 pr2 m2) sf2
addDigits0 :: (Measured v a) => FingerTree v (Node v a) -> Digit a -> Digit a -> FingerTree v (Node v a) -> FingerTree v (Node v a)
addDigits0 m1 (One a) (One b) m2 =
appendTree1 m1 (node2 a b) m2
addDigits0 m1 (One a) (Two b c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits0 m1 (One a) (Three b c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (One a) (Four b c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Two a b) (One c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits0 m1 (Two a b) (Two c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (Two a b) (Three c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Two a b) (Four c d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Three a b c) (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits0 m1 (Three a b c) (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Three a b c) (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Three a b c) (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits0 m1 (Four a b c d) (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits0 m1 (Four a b c d) (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits0 m1 (Four a b c d) (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits0 m1 (Four a b c d) (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
appendTree1 :: (Measured v a) => FingerTree v a -> a -> FingerTree v a -> FingerTree v a
appendTree1 Empty a xs =
a `lcons` xs
appendTree1 xs a Empty =
xs `rcons0` a
appendTree1 (Single x) a xs =
x `lcons` (a `lcons` xs)
appendTree1 xs a (Single x) =
xs `rcons0` a `rcons0` x
appendTree1 (Deep _ pr1 m1 sf1) a (Deep _ pr2 m2 sf2) =
deep pr1 (addDigits1 m1 sf1 a pr2 m2) sf2
addDigits1 :: (Measured v a) => FingerTree v (Node v a) -> Digit a -> a -> Digit a -> FingerTree v (Node v a) -> FingerTree v (Node v a)
addDigits1 m1 (One a) b (One c) m2 =
appendTree1 m1 (node3 a b c) m2
addDigits1 m1 (One a) b (Two c d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits1 m1 (One a) b (Three c d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (One a) b (Four c d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Two a b) c (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits1 m1 (Two a b) c (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (Two a b) c (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Two a b) c (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Three a b c) d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits1 m1 (Three a b c) d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Three a b c) d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Three a b c) d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits1 m1 (Four a b c d) e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits1 m1 (Four a b c d) e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits1 m1 (Four a b c d) e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits1 m1 (Four a b c d) e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
appendTree2 :: (Measured v a) => FingerTree v a -> a -> a -> FingerTree v a -> FingerTree v a
appendTree2 Empty a b xs =
a `lcons` (b `lcons` xs)
appendTree2 xs a b Empty =
xs `rcons0` a `rcons0` b
appendTree2 (Single x) a b xs =
x `lcons` (a `lcons` (b `lcons` xs))
appendTree2 xs a b (Single x) =
xs `rcons0` a `rcons0` b `rcons0` x
appendTree2 (Deep _ pr1 m1 sf1) a b (Deep _ pr2 m2 sf2) =
deep pr1 (addDigits2 m1 sf1 a b pr2 m2) sf2
addDigits2 :: (Measured v a) => FingerTree v (Node v a) -> Digit a -> a -> a -> Digit a -> FingerTree v (Node v a) -> FingerTree v (Node v a)
addDigits2 m1 (One a) b c (One d) m2 =
appendTree2 m1 (node2 a b) (node2 c d) m2
addDigits2 m1 (One a) b c (Two d e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits2 m1 (One a) b c (Three d e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (One a) b c (Four d e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Two a b) c d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits2 m1 (Two a b) c d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (Two a b) c d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Two a b) c d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Three a b c) d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits2 m1 (Three a b c) d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Three a b c) d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Three a b c) d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits2 m1 (Four a b c d) e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits2 m1 (Four a b c d) e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits2 m1 (Four a b c d) e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits2 m1 (Four a b c d) e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
appendTree3 :: (Measured v a) => FingerTree v a -> a -> a -> a -> FingerTree v a -> FingerTree v a
appendTree3 Empty a b c xs =
a `lcons` (b `lcons` (c `lcons` xs))
appendTree3 xs a b c Empty =
xs `rcons0` a `rcons0` b `rcons0` c
appendTree3 (Single x) a b c xs =
x `lcons` (a `lcons` (b `lcons` (c `lcons` xs)))
appendTree3 xs a b c (Single x) =
xs `rcons0` a `rcons0` b `rcons0` c `rcons0` x
appendTree3 (Deep _ pr1 m1 sf1) a b c (Deep _ pr2 m2 sf2) =
deep pr1 (addDigits3 m1 sf1 a b c pr2 m2) sf2
addDigits3 :: (Measured v a) => FingerTree v (Node v a) -> Digit a -> a -> a -> a -> Digit a -> FingerTree v (Node v a) -> FingerTree v (Node v a)
addDigits3 m1 (One a) b c d (One e) m2 =
appendTree2 m1 (node3 a b c) (node2 d e) m2
addDigits3 m1 (One a) b c d (Two e f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits3 m1 (One a) b c d (Three e f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (One a) b c d (Four e f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Two a b) c d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits3 m1 (Two a b) c d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (Two a b) c d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Two a b) c d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Three a b c) d e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits3 m1 (Three a b c) d e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Three a b c) d e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Three a b c) d e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits3 m1 (Four a b c d) e f g (One h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits3 m1 (Four a b c d) e f g (Two h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits3 m1 (Four a b c d) e f g (Three h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits3 m1 (Four a b c d) e f g (Four h i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
appendTree4 :: (Measured v a) => FingerTree v a -> a -> a -> a -> a -> FingerTree v a -> FingerTree v a
appendTree4 Empty a b c d xs =
a `lcons` b `lcons` c `lcons` d `lcons` xs
appendTree4 xs a b c d Empty =
xs `rcons0` a `rcons0` b `rcons0` c `rcons0` d
appendTree4 (Single x) a b c d xs =
x `lcons` a `lcons` b `lcons` c `lcons` d `lcons` xs
appendTree4 xs a b c d (Single x) =
xs `rcons0` a `rcons0` b `rcons0` c `rcons0` d `rcons0` x
appendTree4 (Deep _ pr1 m1 sf1) a b c d (Deep _ pr2 m2 sf2) =
deep pr1 (addDigits4 m1 sf1 a b c d pr2 m2) sf2
addDigits4 :: (Measured v a) => FingerTree v (Node v a) -> Digit a -> a -> a -> a -> a -> Digit a -> FingerTree v (Node v a) -> FingerTree v (Node v a)
addDigits4 m1 (One a) b c d e (One f) m2 =
appendTree2 m1 (node3 a b c) (node3 d e f) m2
addDigits4 m1 (One a) b c d e (Two f g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits4 m1 (One a) b c d e (Three f g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (One a) b c d e (Four f g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Two a b) c d e f (One g) m2 =
appendTree3 m1 (node3 a b c) (node2 d e) (node2 f g) m2
addDigits4 m1 (Two a b) c d e f (Two g h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (Two a b) c d e f (Three g h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Two a b) c d e f (Four g h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Three a b c) d e f g (One h) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node2 g h) m2
addDigits4 m1 (Three a b c) d e f g (Two h i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Three a b c) d e f g (Three h i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Three a b c) d e f g (Four h i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
addDigits4 m1 (Four a b c d) e f g h (One i) m2 =
appendTree3 m1 (node3 a b c) (node3 d e f) (node3 g h i) m2
addDigits4 m1 (Four a b c d) e f g h (Two i j) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node2 g h) (node2 i j) m2
addDigits4 m1 (Four a b c d) e f g h (Three i j k) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node2 j k) m2
addDigits4 m1 (Four a b c d) e f g h (Four i j k l) m2 =
appendTree4 m1 (node3 a b c) (node3 d e f) (node3 g h i) (node3 j k l) m2
-- | /O(log(min(i,n-i)))/. Split a sequence at a point where the predicate
-- on the accumulated measure changes from 'False' to 'True'.
split :: (Measured v a) =>
(v -> Bool) -> FingerTree v a -> (FingerTree v a, FingerTree v a)
split _p Empty = (Empty, Empty)
split p xs
| p (measure xs) = (l, x `lcons` r)
| otherwise = (xs, Empty)
where Split l x r = splitTree p mempty xs
takeUntil :: (Measured v a) => (v -> Bool) -> FingerTree v a -> FingerTree v a
takeUntil p = fst . split p
dropUntil :: (Measured v a) => (v -> Bool) -> FingerTree v a -> FingerTree v a
dropUntil p = snd . split p
data Split t a = Split t a t
splitTree :: (Measured v a) =>
(v -> Bool) -> v -> FingerTree v a -> Split (FingerTree v a) a
splitTree _ _ Empty = error "FingerTree.splitTree: bug!"
splitTree _p _i (Single x) = Split Empty x Empty
splitTree p i (Deep _ pr m sf)
| p vpr = let Split l x r = splitDigit p i pr
in Split (maybe Empty digitToTree l) x (deepL r m sf)
| p vm = let Split ml xs mr = splitTree p vpr m
Split l x r = splitNode p (vpr `mappendVal` ml) xs
in Split (deepR pr ml l) x (deepL r mr sf)
| otherwise = let Split l x r = splitDigit p vm sf
in Split (deepR pr m l) x (maybe Empty digitToTree r)
where vpr = i `mappend` measure pr
vm = vpr `mappendVal` m
mappendVal :: (Measured v a) => v -> FingerTree v a -> v
mappendVal v Empty = v
mappendVal v t = v `mappend` measure t
deepL :: (Measured v a) =>
Maybe (Digit a) -> FingerTree v (Node v a) -> Digit a -> FingerTree v a
deepL Nothing m sf = case lview m of
Nothing -> digitToTree sf
Just (a,m') -> deep (nodeToDigit a) m' sf
deepL (Just pr) m sf = deep pr m sf
deepR :: (Measured v a) =>
Digit a -> FingerTree v (Node v a) -> Maybe (Digit a) -> FingerTree v a
deepR pr m Nothing = case rview m of
Nothing -> digitToTree pr
Just (a,m') -> deep pr m' (nodeToDigit a)
deepR pr m (Just sf) = deep pr m sf
splitNode :: (Measured v a) => (v -> Bool) -> v -> Node v a ->
Split (Maybe (Digit a)) a
splitNode p i (Node2 _ a b)
| p va = Split Nothing a (Just (One b))
| otherwise = Split (Just (One a)) b Nothing
where va = i `mappend` measure a
splitNode p i (Node3 _ a b c)
| p va = Split Nothing a (Just (Two b c))
| p vab = Split (Just (One a)) b (Just (One c))
| otherwise = Split (Just (Two a b)) c Nothing
where va = i `mappend` measure a
vab = va `mappend` measure b
splitDigit :: (Measured v a) => (v -> Bool) -> v -> Digit a ->
Split (Maybe (Digit a)) a
splitDigit _ i (One a) = i `seq` Split Nothing a Nothing
splitDigit p i (Two a b)
| p va = Split Nothing a (Just (One b))
| otherwise = Split (Just (One a)) b Nothing
where va = i `mappend` measure a
splitDigit p i (Three a b c)
| p va = Split Nothing a (Just (Two b c))
| p vab = Split (Just (One a)) b (Just (One c))
| otherwise = Split (Just (Two a b)) c Nothing
where va = i `mappend` measure a
vab = va `mappend` measure b
splitDigit p i (Four a b c d)
| p va = Split Nothing a (Just (Three b c d))
| p vab = Split (Just (One a)) b (Just (Two c d))
| p vabc = Split (Just (Two a b)) c (Just (One d))
| otherwise = Split (Just (Three a b c)) d Nothing
where va = i `mappend` measure a
vab = va `mappend` measure b
vabc = vab `mappend` measure c
-- | /O(n)/. The reverse of a sequence.
reverse :: (Measured v a) => FingerTree v a -> FingerTree v a
reverse = reverseTree id
reverseTree :: (Measured v2 a2) => (a1 -> a2) -> FingerTree v1 a1 -> FingerTree v2 a2
reverseTree _ Empty = Empty
reverseTree f (Single x) = Single (f x)
reverseTree f (Deep _ pr m sf) =
deep (reverseDigit f sf) (reverseTree (reverseNode f) m) (reverseDigit f pr)
reverseNode :: (Measured v2 a2) => (a1 -> a2) -> Node v1 a1 -> Node v2 a2
reverseNode f (Node2 _ a b) = node2 (f b) (f a)
reverseNode f (Node3 _ a b c) = node3 (f c) (f b) (f a)
reverseDigit :: (a -> b) -> Digit a -> Digit b
reverseDigit f (One a) = One (f a)
reverseDigit f (Two a b) = Two (f b) (f a)
reverseDigit f (Three a b c) = Three (f c) (f b) (f a)
reverseDigit f (Four a b c d) = Four (f d) (f c) (f b) (f a)
two :: Monad m => m a -> m (a, a)
two m = liftM2 (,) m m
three :: Monad m => m a -> m (a, a, a)
three m = liftM3 (,,) m m m
four :: Monad m => m a -> m (a, a, a, a)
four m = liftM4 (,,,) m m m m
instance (Arbitrary a) => Arbitrary (Digit a) where
arbitrary = oneof
[ arbitrary >>= \x -> return (One x)
, two arbitrary >>= \(x,y) -> return (Two x y)
, three arbitrary >>= \(x,y,z) -> return (Three x y z)
, four arbitrary >>= \(x,y,z,w) -> return (Four x y z w)
]
instance (CoArbitrary a) => CoArbitrary (Digit a) where
coarbitrary p = case p of
One x -> variant 0 . coarbitrary x
Two x y -> variant 1 . coarbitrary x . coarbitrary y
Three x y z -> variant 2 . coarbitrary x . coarbitrary y
. coarbitrary z
Four x y z w -> variant 3 . coarbitrary x . coarbitrary y
. coarbitrary z . coarbitrary w
instance (Measured v a, Arbitrary a) => Arbitrary (Node v a) where
arbitrary = oneof
[ two arbitrary >>= \(x,y) -> return (node2 x y)
, three arbitrary >>= \(x,y,z) -> return (node3 x y z)
]
instance (Measured v a, CoArbitrary a) => CoArbitrary (Node v a) where
coarbitrary p = case p of
Node2 _ x y -> variant 0 . coarbitrary x . coarbitrary y
Node3 _ x y z -> variant 1 . coarbitrary x . coarbitrary y . coarbitrary z
instance (Measured v a, Arbitrary a) => Arbitrary (FingerTree v a) where
arbitrary = oneof
[ return Empty
, arbitrary >>= return . Single
, do
pf <- arbitrary
m <- arbitrary
sf <- arbitrary
return (deep pf m sf)
]
instance (Measured v a, CoArbitrary a) => CoArbitrary (FingerTree v a) where
coarbitrary p = case p of
Empty -> variant 0
Single x -> variant 1 . coarbitrary x
Deep _ sf m pf -> variant 2 . coarbitrary sf . coarbitrary m . coarbitrary pf
|
robdockins/edison
|
edison-core/src/Data/Edison/Concrete/FingerTree.hs
|
mit
| 32,813 | 0 | 16 | 9,479 | 15,829 | 7,936 | 7,893 | -1 | -1 |
module Relations.Basics.Terms where
import Notes
makeDefs [
"relation"
, "unit relation"
, "inverse relation"
, "binary relation"
, "reflexive"
, "transitive"
, "symmetric"
, "total"
]
makeEx "divides is relation"
makeThm "Inverse of Inverse relation is normal"
|
NorfairKing/the-notes
|
src/Relations/Basics/Terms.hs
|
gpl-2.0
| 314 | 0 | 6 | 89 | 52 | 29 | 23 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Goto.Type where
import RAM.Builtin
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Size
import Autolib.TES.Identifier
import Data.Typeable
import Autolib.Xml
type Register = Int
type Address = Int
data Statement
= Inc Register | Dec Register
| Assign Register Builtin [Register]
| Stop | Goto Address | GotoZ Register Address
deriving ( Eq, Ord, Typeable )
type Program = [ Statement ]
example =
[ GotoZ 1 4 , Inc 0 , Dec 1 , Goto 0 , Stop ]
instance Size Program where
size = fromIntegral . length
free_register :: Program -> Register
free_register p = maximum $ 0 : map succ ( do
s <- p
case s of
Inc r -> [r] ; Dec r -> [r]
Assign res fin args -> res : args
GotoZ r _ -> [r]
_ -> [] )
$(derives [makeReader, makeToDoc] [''Statement])
instance Show Statement where show = render . toDoc
|
marcellussiegburg/autotool
|
collection/src/Goto/Type.hs
|
gpl-2.0
| 1,004 | 0 | 13 | 219 | 325 | 179 | 146 | 34 | 5 |
module Patat.Presentation
( PresentationSettings (..)
, defaultPresentationSettings
, Presentation (..)
, readPresentation
, Size
, getDisplaySize
, Display (..)
, displayPresentation
, displayPresentationError
, dumpPresentation
, PresentationCommand (..)
, readPresentationCommand
, UpdatedPresentation (..)
, updatePresentation
) where
import Patat.Presentation.Display
import Patat.Presentation.Interactive
import Patat.Presentation.Internal
import Patat.Presentation.Read
|
jaspervdj/patat
|
lib/Patat/Presentation.hs
|
gpl-2.0
| 582 | 0 | 5 | 150 | 94 | 64 | 30 | 19 | 0 |
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings, LambdaCase #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Completion
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : <[email protected]>
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.Completion (complete, cancel, setCompletionSize) where
import Prelude hiding(getChar, getLine)
import Data.List as List (stripPrefix, isPrefixOf, filter)
import Data.Char
import Data.IORef
import Control.Monad
import IDE.Core.State
import IDE.Metainfo.Provider(getDescription,getCompletionOptions)
import IDE.TextEditor as TE
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Reader (ask)
import qualified Control.Monad.Reader as Gtk (liftIO)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Applicative ((<$>))
import Data.Text (Text)
import qualified Data.Text as T
(empty, commonPrefixes, pack, unpack, null, stripPrefix,
isPrefixOf)
import System.Log.Logger (debugM)
import GI.Gtk.Objects.Window
(windowMove, windowGetScreen, windowGetSize, Window(..),
windowNew, setWindowTransientFor, setWindowDefaultHeight,
setWindowDefaultWidth, setWindowResizable, setWindowDecorated,
setWindowTypeHint, windowResize)
import Data.GI.Base
(unsafeManagedPtrGetPtr, unsafeCastTo, get, set, nullToNothing)
import GI.Gdk.Enums (GrabStatus(..), WindowTypeHint(..))
import GI.Gtk.Objects.Container
(containerRemove, containerAdd, containerSetBorderWidth)
import GI.Gtk.Objects.HPaned (hPanedNew)
import GI.Gtk.Objects.ScrolledWindow (scrolledWindowNew)
import GI.Gtk.Objects.Adjustment (noAdjustment)
import GI.Gtk.Objects.Widget
(Widget(..), widgetShowAll, widgetGetAllocation, widgetGetParent,
widgetHide, onWidgetButtonReleaseEvent, onWidgetMotionNotifyEvent,
widgetGetWindow, onWidgetButtonPressEvent, getWidgetVisible,
widgetModifyFont, widgetSetSizeRequest)
import GI.Gtk.Objects.TreeView
(treeViewSetCursor, onTreeViewRowActivated, treeViewRowActivated,
treeViewScrollToCell, treeViewGetColumn, treeViewGetModel,
TreeView(..), treeViewGetSelection, setTreeViewHeadersVisible,
treeViewAppendColumn, treeViewSetModel, treeViewNew)
import Data.GI.Gtk.ModelView.SeqStore
(seqStoreAppend, seqStoreClear, seqStoreGetValue, SeqStore(..),
seqStoreNew)
import GI.Pango.Structs.FontDescription
(fontDescriptionSetFamily, fontDescriptionNew,
fontDescriptionFromString)
import GI.Gtk.Objects.TreeViewColumn
(noTreeViewColumn, treeViewColumnPackStart, setTreeViewColumnMinWidth,
setTreeViewColumnSizing, treeViewColumnNew)
import GI.Gtk.Enums (TreeViewColumnSizing(..), WindowType(..))
import GI.Gtk.Objects.CellRendererText
(setCellRendererTextText, cellRendererTextNew)
import Data.GI.Gtk.ModelView.CellLayout
(cellLayoutSetDataFunction)
import GI.Gtk.Objects.TreeSelection
(treeSelectionGetSelected, treeSelectionSelectPath,
treeSelectionSelectedForeach,
onTreeSelectionChanged)
import GI.Gtk.Objects.Paned
(panedSetPosition, panedGetPosition, panedGetChild2, Paned(..),
panedGetChild1, panedAdd2, panedAdd1)
import GI.Gdk.Structs.EventKey
(eventKeyReadKeyval, eventKeyReadState)
import GI.Gdk.Functions
(pointerUngrab, pointerGrab, keyvalToUnicode, keyvalName)
import GI.Gtk.Interfaces.TreeModel
(treeModelGetPath, treeModelIterNChildren)
import GI.Gdk.Structs.EventButton
(eventButtonReadTime, eventButtonReadY, eventButtonReadX,
eventButtonReadButton)
import GI.Gdk.Flags (EventMask(..))
import GI.Gdk.Objects.Cursor (noCursor)
import GI.Gdk.Structs.EventMotion
(eventMotionReadY, eventMotionReadX)
import GI.Gtk.Structs.TreePath (treePathGetIndices, TreePath(..))
import Graphics.UI.Frame.Rectangle
(rectangleReadHeight, rectangleReadWidth, rectangleReadY,
rectangleReadX, Rectangle(..))
import GI.Gdk.Objects.Window (windowGetOrigin)
import qualified GI.Gdk.Objects.Window as Gdk (noWindow)
import GI.Gdk.Objects.Screen
(screenGetHeight, screenGetWidth, screenGetMonitorAtPoint)
import Data.GI.Gtk.ModelView.Types
(treePathGetIndices', treePathNewFromIndices')
import Data.Maybe (fromJust)
complete :: TextEditor editor => EditorView editor -> Bool -> IDEAction
complete sourceView always = do
currentState' <- readIDE currentState
prefs' <- readIDE prefs
(_, completion') <- readIDE completion
case (currentState',completion') of
(IsCompleting c, Just (CompletionWindow window tv st)) -> do
isWordChar <- getIsWordChar sourceView
updateOptions window tv st sourceView c isWordChar always
(IsRunning,_) -> when (always || not (completeRestricted prefs'))
(initCompletion sourceView always)
_ -> return ()
cancel :: IDEAction
cancel = do
currentState' <- readIDE currentState
(_, completion') <- readIDE completion
case (currentState',completion') of
(IsCompleting conn , Just (CompletionWindow window tv st)) ->
cancelCompletion window tv st conn
_ -> return ()
setCompletionSize :: Int -> Int -> IDEAction
setCompletionSize x y | x > 10 && y > 10 = do
(_, completion) <- readIDE completion
case completion of
Just (CompletionWindow window _ _) -> windowResize window (fromIntegral x) (fromIntegral y)
Nothing -> return ()
modifyIDE_ $ \ide -> ide{completion = ((x, y), completion)}
setCompletionSize _ _ = return ()
getIsWordChar :: forall editor. TextEditor editor => EditorView editor -> IDEM (Char -> Bool)
getIsWordChar sourceView = do
ideR <- ask
buffer <- getBuffer sourceView
(_, end) <- getSelectionBounds buffer
sol <- backwardToLineStartC end
eol <- forwardToLineEndC end
line <- getSlice buffer sol eol False
let isImport = "import " `T.isPrefixOf` line
isIdent a = isAlphaNum a || a == '\'' || a == '_' || (isImport && a == '.')
isOp a = isSymbol a || a == ':' || a == '\\' || a == '*' || a == '/' || a == '-'
|| a == '!' || a == '@' || a == '%' || a == '&' || a == '?'
prev <- backwardCharC end
prevChar <- getChar prev
case prevChar of
Just prevChar | isIdent prevChar -> return isIdent
Just prevChar | isOp prevChar -> return isOp
_ -> return $ const False
initCompletion :: forall editor. TextEditor editor => EditorView editor -> Bool -> IDEAction
initCompletion sourceView always = do
ideR <- ask
((width, height), completion') <- readIDE completion
isWordChar <- getIsWordChar sourceView
case completion' of
Just (CompletionWindow window' tree' store') -> do
cids <- addEventHandling window' sourceView tree' store' isWordChar always
modifyIDE_ (\ide -> ide{currentState = IsCompleting cids})
updateOptions window' tree' store' sourceView cids isWordChar always
Nothing -> do
windows <- getWindows
prefs <- readIDE prefs
window <- windowNew WindowTypePopup
setWindowTypeHint window WindowTypeHintUtility
setWindowDecorated window False
setWindowResizable window True
setWindowDefaultWidth window $ fromIntegral width
setWindowDefaultHeight window $ fromIntegral height
setWindowTransientFor window $ head windows
containerSetBorderWidth window 3
paned <- hPanedNew
containerAdd window paned
nameScrolledWindow <- scrolledWindowNew noAdjustment noAdjustment
widgetSetSizeRequest nameScrolledWindow 250 40
tree <- treeViewNew
containerAdd nameScrolledWindow tree
store <- seqStoreNew []
treeViewSetModel tree (Just store)
font <- case textviewFont prefs of
Just str ->
fontDescriptionFromString str
Nothing -> do
f <- fontDescriptionNew
fontDescriptionSetFamily f "Monospace"
return f
widgetModifyFont tree (Just font)
column <- treeViewColumnNew
setTreeViewColumnSizing column TreeViewColumnSizingFixed
setTreeViewColumnMinWidth column 800 -- OSX does not like it if there is no hscroll
treeViewAppendColumn tree column
renderer <- cellRendererTextNew
treeViewColumnPackStart column renderer True
cellLayoutSetDataFunction column renderer store $ setCellRendererTextText renderer
setTreeViewHeadersVisible tree False
descriptionBuffer <- newDefaultBuffer Nothing ""
descriptionView <- newView descriptionBuffer (textviewFont prefs)
updateStyle descriptionBuffer
descriptionScrolledWindow <- getScrolledWindow descriptionView
visible <- liftIO $ newIORef False
activeView <- liftIO $ newIORef Nothing
treeSelection <- treeViewGetSelection tree
onTreeSelectionChanged treeSelection $
treeSelectionSelectedForeach treeSelection $ \_model treePath _iter ->
reflectIDE (withWord store treePath (\name -> do
description <- getDescription name
setText descriptionBuffer description
)) ideR
panedAdd1 paned nameScrolledWindow
panedAdd2 paned descriptionScrolledWindow
cids <- addEventHandling window sourceView tree store isWordChar always
modifyIDE_ (\ide -> ide{currentState = IsCompleting cids,
completion = ((width, height), Just (CompletionWindow window tree store))})
updateOptions window tree store sourceView cids isWordChar always
addEventHandling :: TextEditor editor => Window -> EditorView editor -> TreeView -> SeqStore Text
-> (Char -> Bool) -> Bool -> IDEM Connections
addEventHandling window sourceView tree store isWordChar always = do
ideR <- ask
cidsPress <- TE.onKeyPress sourceView $ do
e <- lift ask
keyVal <- eventKeyReadKeyval e
name <- keyvalName keyVal
modifier <- eventKeyReadState e
char <- toEnum . fromIntegral <$> keyvalToUnicode keyVal
Just model <- treeViewGetModel tree
selection <- treeViewGetSelection tree
count <- treeModelIterNChildren model Nothing
Just column <- nullToNothing $ treeViewGetColumn tree 0
let whenVisible f = getWidgetVisible tree >>= \case
True -> f
False -> return False
down = whenVisible $ do
maybeRow <- liftIO $ getRow tree
let newRow = maybe 0 (+ 1) maybeRow
when (newRow < count) $ do
path <- treePathNewFromIndices' [newRow]
treeSelectionSelectPath selection path
treeViewScrollToCell tree (Just path) noTreeViewColumn False 0 0
return True
up = whenVisible $ do
maybeRow <- liftIO $ getRow tree
let newRow = maybe 0 (\ row -> row - 1) maybeRow
when (newRow >= 0) $ do
path <- treePathNewFromIndices' [newRow]
treeSelectionSelectPath selection path
treeViewScrollToCell tree (Just path) noTreeViewColumn False 0 0
return True
case (name, modifier, char) of
(Just "Tab", _, _) -> whenVisible . liftIDE $ do
tryToUpdateOptions window tree store sourceView True isWordChar always
return True
(Just "Return", _, _) -> whenVisible $ do
maybeRow <- liftIO $ getRow tree
case maybeRow of
Just row -> do
path <- treePathNewFromIndices' [row]
treeViewRowActivated tree path column
return True
Nothing -> do
liftIDE cancel
return False
(Just "Down", _, _) -> down
(Just "Up", _, _) -> up
(Just super, _, 'a') | super `elem` ["Super_L", "Super_R"] -> do
liftIO $ debugM "leksah" "Completion - Super 'a' key press"
down
(Just super, _, 'l') | super `elem` ["Super_L", "Super_R"] -> do
liftIO $ debugM "leksah" "Completion - Super 'l' key press"
up
(_, _, c) | isWordChar c -> return False
(Just "BackSpace", _, _) -> return False
(Just key, _, _) | key `elem` ["Shift_L", "Shift_R", "Super_L", "Super_R"] -> return False
_ -> do liftIDE cancel
return False
cidsRelease <- TE.onKeyRelease sourceView $ do
e <- lift ask
name <- eventKeyReadKeyval e >>= keyvalName
modifier <- eventKeyReadState e
case (name, modifier) of
(Just "BackSpace", _) -> do
liftIDE $ complete sourceView False
return False
_ -> return False
resizeHandler <- liftIO $ newIORef Nothing
idButtonPress <- ConnectC window <$> onWidgetButtonPressEvent window (\e -> do
button <- eventButtonReadButton e
x <- eventButtonReadX e
y <- eventButtonReadY e
time <- eventButtonReadTime e
nullToNothing (widgetGetWindow window) >>= \case
Nothing -> return ()
Just drawWindow -> do
status <- pointerGrab
drawWindow
False
[EventMaskPointerMotionMask, EventMaskButtonReleaseMask]
Gdk.noWindow
noCursor
time
when (status == GrabStatusSuccess) $ do
(width, height) <- windowGetSize window
liftIO $ writeIORef resizeHandler $ Just $ \newX newY ->
reflectIDE (
setCompletionSize (fromIntegral width + floor (newX - x)) (fromIntegral height + floor (newY - y))) ideR
return True)
idMotion <- ConnectC window <$> onWidgetMotionNotifyEvent window (\e -> do
mbResize <- readIORef resizeHandler
case mbResize of
Just resize -> do
x <- eventMotionReadX e
y <- eventMotionReadY e
resize x y
return True
Nothing -> return False)
idButtonRelease <- ConnectC window <$> onWidgetButtonReleaseEvent window (\e -> do
mbResize <- liftIO $ readIORef resizeHandler
case mbResize of
Just resize -> do
x <- eventButtonReadX e
y <- eventButtonReadY e
resize x y
eventButtonReadTime e >>= pointerUngrab
liftIO $ writeIORef resizeHandler Nothing
return True
Nothing -> return False)
idSelected <- ConnectC tree <$> onTreeViewRowActivated tree (\treePath column -> (`reflectIDE` ideR) $ do
withWord store treePath (replaceWordStart sourceView isWordChar)
postAsyncIDE cancel)
return $ concat [cidsPress, cidsRelease, [idButtonPress, idMotion, idButtonRelease, idSelected]]
withWord :: SeqStore Text -> TreePath -> (Text -> IDEM ()) -> IDEM ()
withWord store treePath f =
treePathGetIndices' treePath >>= \case
[row] -> do
value <- seqStoreGetValue store row
f value
_ -> return ()
replaceWordStart :: TextEditor editor => EditorView editor -> (Char -> Bool) -> Text -> IDEM ()
replaceWordStart sourceView isWordChar name = do
buffer <- getBuffer sourceView
(selStart, selEnd) <- getSelectionBounds buffer
start <- findWordStart selStart isWordChar
wordStart <- getText buffer start selEnd True
case T.stripPrefix wordStart name of
Just extra -> do
end <- findWordEnd selEnd isWordChar
wordFinish <- getText buffer selEnd end True
case T.stripPrefix wordFinish extra of
Just extra2 | not (T.null wordFinish) -> do
selectRange buffer end end
insert buffer end extra2
_ -> insert buffer selEnd extra
Nothing -> return ()
cancelCompletion :: Window -> TreeView -> SeqStore Text -> Connections -> IDEAction
cancelCompletion window tree store connections = do
seqStoreClear (store :: SeqStore Text)
signalDisconnectAll connections
widgetHide window
modifyIDE_ (\ide -> ide{currentState = IsRunning})
updateOptions :: forall editor. TextEditor editor => Window -> TreeView -> SeqStore Text -> EditorView editor -> Connections -> (Char -> Bool) -> Bool -> IDEAction
updateOptions window tree store sourceView connections isWordChar always = do
result <- tryToUpdateOptions window tree store sourceView False isWordChar always
unless result $ cancelCompletion window tree store connections
tryToUpdateOptions :: TextEditor editor => Window -> TreeView -> SeqStore Text -> EditorView editor -> Bool -> (Char -> Bool) -> Bool -> IDEM Bool
tryToUpdateOptions window tree store sourceView selectLCP isWordChar always = do
ideR <- ask
seqStoreClear (store :: SeqStore Text)
buffer <- getBuffer sourceView
(selStart, end) <- getSelectionBounds buffer
start <- findWordStart selStart isWordChar
equal <- iterEqual start end
if equal
then return False
else do
wordStart <- getText buffer start end True
liftIO $ do -- dont use postGUIAsync - it causes bugs related to several repeated tryToUpdateOptions in thread
reflectIDE (do
options <- getCompletionOptions wordStart
processResults window tree store sourceView wordStart options selectLCP isWordChar always) ideR
return ()
return True
findWordStart :: TextEditor editor => EditorIter editor -> (Char -> Bool) -> IDEM (EditorIter editor)
findWordStart iter isWordChar = do
maybeWS <- backwardFindCharC iter (not . isWordChar) Nothing
case maybeWS of
Nothing -> atOffset iter 0
Just ws -> forwardCharC ws
findWordEnd :: TextEditor editor => EditorIter editor -> (Char -> Bool) -> IDEM (EditorIter editor)
findWordEnd iter isWordChar = do
maybeWE <- forwardFindCharC iter (not . isWordChar) Nothing
case maybeWE of
Nothing -> forwardToLineEndC iter
Just we -> return we
longestCommonPrefix a b = case T.commonPrefixes a b of
Nothing -> T.empty
Just (p, _, _) -> p
processResults :: TextEditor editor => Window -> TreeView -> SeqStore Text -> EditorView editor -> Text -> [Text]
-> Bool -> (Char -> Bool) -> Bool -> IDEAction
processResults window tree store sourceView wordStart options selectLCP isWordChar always =
case options of
[] -> cancel
_ | not always && (not . null $ drop 200 options) -> cancel
_ -> do
buffer <- getBuffer sourceView
(selStart, end) <- getSelectionBounds buffer
start <- findWordStart selStart isWordChar
currentWordStart <- getText buffer start end True
let newWordStart = if selectLCP && currentWordStart == wordStart && not (null options)
then foldl1 longestCommonPrefix options
else currentWordStart
when (T.isPrefixOf wordStart newWordStart) $ do
seqStoreClear store
let newOptions = List.filter (T.isPrefixOf newWordStart) options
forM_ (take 200 newOptions) (seqStoreAppend store)
rect <- getIterLocation sourceView start
startx <- rectangleReadX rect
starty <- rectangleReadY rect
width <- rectangleReadWidth rect
height <- rectangleReadHeight rect
(wWindow, hWindow) <- windowGetSize window
(x, y) <- bufferToWindowCoords sourceView (fromIntegral startx, fromIntegral (starty+height))
mbDrawWindow <- getWindow sourceView
case mbDrawWindow of
Nothing -> return ()
Just drawWindow -> do
(_, ox, oy) <- windowGetOrigin drawWindow
Just namesSW <- nullToNothing $ widgetGetParent tree
rNames <- widgetGetAllocation namesSW
wNames <- rectangleReadWidth rNames
hNames <- rectangleReadHeight rNames
paned <- nullToNothing (widgetGetParent namesSW) >>= liftIO . unsafeCastTo Paned . fromJust
Just first <- nullToNothing $ panedGetChild1 paned
Just second <- nullToNothing $ panedGetChild2 paned
screen <- windowGetScreen window
monitor <- screenGetMonitorAtPoint screen (ox+fromIntegral x) (oy+fromIntegral y)
monitorLeft <- screenGetMonitorAtPoint screen (ox+fromIntegral x-wWindow+wNames) (oy+fromIntegral y)
monitorRight <- screenGetMonitorAtPoint screen (ox+fromIntegral x+wWindow) (oy+fromIntegral y)
monitorBelow <- screenGetMonitorAtPoint screen (ox+fromIntegral x) (oy+fromIntegral y+hWindow)
wScreen <- screenGetWidth screen
hScreen <- screenGetHeight screen
top <- if monitorBelow /= monitor || (oy+fromIntegral y+hWindow) > hScreen
then do
sourceSW <- getScrolledWindow sourceView
hSource <- widgetGetAllocation sourceSW >>= rectangleReadHeight
scrollToIter sourceView end 0.1 (Just (1.0, 1.0 - (fromIntegral hWindow / fromIntegral hSource)))
(_, newy) <- bufferToWindowCoords sourceView (fromIntegral startx, fromIntegral (starty+height))
return (oy+fromIntegral newy)
else return (oy+fromIntegral y)
swap <- if (monitorRight /= monitor || (ox+fromIntegral x+wWindow) > wScreen) && monitorLeft == monitor && (ox+fromIntegral x-wWindow+wNames) > 0
then do
windowMove window (ox+fromIntegral x-wWindow+wNames) top
return $ unsafeManagedPtrGetPtr first == unsafeManagedPtrGetPtr namesSW
else do
windowMove window (ox+fromIntegral x) top
return $ unsafeManagedPtrGetPtr first /= unsafeManagedPtrGetPtr namesSW
when swap $ do
pos <- panedGetPosition paned
containerRemove paned first
containerRemove paned second
panedAdd1 paned second
panedAdd2 paned first
panedSetPosition paned (wWindow-pos)
unless (null newOptions) $ do
path <- treePathNewFromIndices' [0]
treeViewSetCursor tree path noTreeViewColumn False
widgetShowAll window
when (newWordStart /= currentWordStart) $
replaceWordStart sourceView isWordChar newWordStart
getRow tree = do
Just model <- treeViewGetModel tree
selection <- treeViewGetSelection tree
treeSelectionGetSelected selection >>= \case
(True, _, iter) -> do
[row] <- treeModelGetPath model iter >>= treePathGetIndices
return $ Just row
_ -> return Nothing
|
JPMoresmau/leksah
|
src/IDE/Completion.hs
|
gpl-2.0
| 24,845 | 0 | 32 | 7,903 | 6,341 | 3,119 | 3,222 | 459 | 16 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies
, FlexibleInstances #-}
{- |
Module : $Header$
Description : Symbols and signature morphisms for the CASL logic
Copyright : (c) Christian Maeder, Till Mossakowski and Uni Bremen 2002-2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (MPTC+FD)
Symbols and signature morphisms for the CASL logic
-}
module CASL.Morphism
( SymbolSet
, SymbolMap
, RawSymbol (..)
, Morphism (..)
, idMor
, legalMor
, DefMorExt (..)
, emptyMorExt
, MorphismExtension (..)
, retExtMap
, CASLMor
, isInclusionMorphism
, isSortInjective
, isInjective
, Sort_map
, Pred_map
, Op_map
, embedMorphism
, mapCASLMor
, sigInclusion
, composeM
, plainMorphismUnion
, morphismUnion
, morphismUnionM
, idOrInclMorphism
, morphismToSymbMap
, symsetOf
, symOf
, sigSymsOf
, addSigM
, idToRaw
, typedSymbKindToRaw
, symbolToRaw
, insertRsys
, mapSort
, mapOpSym
, mapPredSym
, mapOpType
, mapPredType
, matches
, compatibleOpTypes
, imageOfMorphism
, RawSymbolMap
, InducedSign
, inducedSignAux
, rawSymName
, inducedOpMap
, inducedPredMap
, statSymbMapItems
, statSymbItems
) where
import CASL.Sign
import CASL.AS_Basic_CASL
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import Common.Doc
import Common.DocUtils
import Common.Id
import Common.Result
import Common.Utils (composeMap)
import Control.Monad
type SymbolSet = Set.Set Symbol
type SymbolMap = Map.Map Symbol Symbol
data RawSymbol = ASymbol Symbol | AKindedSymb SYMB_KIND Id
deriving (Show, Eq, Ord)
instance GetRange RawSymbol where
getRange rs = case rs of
ASymbol s -> getRange s
AKindedSymb _ i -> getRange i
type RawSymbolMap = Map.Map RawSymbol RawSymbol
type Sort_map = Map.Map SORT SORT
-- always use the partial profile as key!
type Op_map = Map.Map (Id, OpType) (Id, OpKind)
type Pred_map = Map.Map (Id, PredType) Id
data Morphism f e m = Morphism
{ msource :: Sign f e
, mtarget :: Sign f e
, sort_map :: Sort_map
, op_map :: Op_map
, pred_map :: Pred_map
, extended_map :: m
} deriving (Show, Eq, Ord)
data DefMorExt e = DefMorExt e
instance Show (DefMorExt e) where
show = const ""
instance Ord (DefMorExt e) where
compare _ = const EQ
instance Eq (DefMorExt e) where
(==) e = (== EQ) . compare e
emptyMorExt :: DefMorExt e
emptyMorExt = DefMorExt $ error "emptyMorExt"
instance Pretty (DefMorExt e) where
pretty _ = empty
class (Pretty e, Pretty m) => MorphismExtension e m | m -> e where
ideMorphismExtension :: e -> m
composeMorphismExtension :: Morphism f e m -> Morphism f e m -> Result m
inverseMorphismExtension :: Morphism f e m -> Result m
inverseMorphismExtension = return . extended_map
isInclusionMorphismExtension :: m -> Bool
prettyMorphismExtension :: Morphism f e m -> Doc
prettyMorphismExtension = pretty . extended_map
legalMorphismExtension :: Morphism f e m -> Result ()
legalMorphismExtension _ = return ()
instance MorphismExtension () () where
ideMorphismExtension _ = ()
composeMorphismExtension _ = return . extended_map
isInclusionMorphismExtension _ = True
instance Pretty e => MorphismExtension e (DefMorExt e) where
ideMorphismExtension _ = emptyMorExt
composeMorphismExtension _ = return . extended_map
isInclusionMorphismExtension _ = True
type CASLMor = Morphism () () ()
isInclusionMorphism :: (m -> Bool) -> Morphism f e m -> Bool
isInclusionMorphism f m = f (extended_map m) && Map.null (sort_map m)
&& Map.null (pred_map m) && isInclOpMap (op_map m)
mapSort :: Sort_map -> SORT -> SORT
mapSort sorts s = Map.findWithDefault s s sorts
mapOpType :: Sort_map -> OpType -> OpType
mapOpType sorts t = if Map.null sorts then t else
t { opArgs = map (mapSort sorts) $ opArgs t
, opRes = mapSort sorts $ opRes t }
makeTotal :: OpKind -> OpType -> OpType
makeTotal fk t = case fk of
Total -> mkTotal t
_ -> t
mapOpSym :: Sort_map -> Op_map -> (Id, OpType) -> (Id, OpType)
mapOpSym sMap oMap (i, ot) = let mot = mapOpType sMap ot in
case Map.lookup (i, mkPartial ot) oMap of
Nothing -> (i, mot)
Just (j, k) -> (j, makeTotal k mot)
-- | Check if two OpTypes are equal modulo totality or partiality
compatibleOpTypes :: OpType -> OpType -> Bool
compatibleOpTypes ot1 ot2 = opSorts ot1 == opSorts ot2
mapPredType :: Sort_map -> PredType -> PredType
mapPredType sorts t = if Map.null sorts then t else
t { predArgs = map (mapSort sorts) $ predArgs t }
mapPredSym :: Sort_map -> Pred_map -> (Id, PredType) -> (Id, PredType)
mapPredSym sMap oMap (i, pt) =
(Map.findWithDefault i (i, pt) oMap, mapPredType sMap pt)
embedMorphism :: m -> Sign f e -> Sign f e -> Morphism f e m
embedMorphism extEm a b = Morphism
{ msource = a
, mtarget = b
, sort_map = Map.empty
, op_map = Map.empty
, pred_map = Map.empty
, extended_map = extEm }
-- | given empty extensions convert a morphism
mapCASLMor :: e -> m -> Morphism f1 e1 m1 -> Morphism f e m
mapCASLMor e me m =
(embedMorphism me (embedSign e $ msource m) $ embedSign e $ mtarget m)
{ sort_map = sort_map m
, op_map = op_map m
, pred_map = pred_map m }
symbolToRaw :: Symbol -> RawSymbol
symbolToRaw = ASymbol
idToRaw :: Id -> RawSymbol
idToRaw = AKindedSymb Implicit
rawSymName :: RawSymbol -> Id
rawSymName rs = case rs of
ASymbol sym -> symName sym
AKindedSymb _ i -> i
sortSyms :: Sign f e -> SymbolSet
sortSyms = Set.map idToSortSymbol . sortSet
opSyms :: Sign f e -> [Symbol]
opSyms = map (uncurry idToOpSymbol) . mapSetToList . opMap
predSyms :: Sign f e -> [Symbol]
predSyms = map (uncurry idToPredSymbol) . mapSetToList . predMap
{- | returns the symbol sets of the signature in the correct dependency order
, i.e., sorts first, then ops and predicates. Result list is of length two. -}
symOf :: Sign f e -> [SymbolSet]
symOf s = [ sortSyms s, Set.fromList $ predSyms s ++ opSyms s ]
sigSymsOf :: Sign f e -> [Symbol]
sigSymsOf s = concat
[ Set.toList $ sortSyms s
, map (\ (a, b) -> Symbol a $ SubsortAsItemType b)
. Rel.toList . Rel.transReduce . Rel.irreflex $ sortRel s
-- assume sort relation to be the transitive closure
, opSyms s
, predSyms s ]
-- | set of symbols for a signature
symsetOf :: Sign f e -> SymbolSet
symsetOf = Set.unions . symOf
checkSymbList :: [SYMB_OR_MAP] -> [Diagnosis]
checkSymbList l = case l of
Symb (Symb_id a) : Symb (Qual_id b t _) : r -> mkDiag Warning
("profile '" ++ showDoc t "' does not apply to '"
++ showId a "' but only to") b : checkSymbList r
_ : r -> checkSymbList r
[] -> []
insertRsys :: (Pretty r, GetRange r, Ord r)
=> (r -> Id) -> (r -> Maybe Id) -> (Id -> r)
-> (r -> Maybe Id) -> (Id -> r) -> Map.Map r r -> (r, r)
-> Result (Map.Map r r)
insertRsys rawId getSort mkSort getImplicit mkImplicit m1 (rsy1, rsy2) =
let m3 = Map.insert rsy1 rsy2 m1 in
case Map.lookup rsy1 m1 of
Nothing -> case getSort rsy1 of
Just i ->
case Map.lookup (mkImplicit i) m1 of
Just r2 | Just (rawId rsy2) == getImplicit r2 ->
warning m1 ("ignoring separate mapping for sort " ++
show i) $ getRange i
_ -> return m3
Nothing -> case getImplicit rsy1 of
Just i -> let rsy3 = mkSort i in case Map.lookup rsy3 m1 of
Just r2 | Just (rawId rsy2) == getSort r2 ->
warning (Map.delete rsy3 m3)
("ignoring extra mapping of sort " ++
show i) $ getRange i
{- this case cannot occur, because unkinded names cannot
follow kinded ones:
in "sort s |-> t, o |-> q" "o" will be a sort, too. -}
_ -> return m3
_ -> return m3
Just rsy3 -> if rsy2 == rsy3 then
hint m1 ("ignoring duplicate mapping of "
++ showDoc rsy1 "") $ getRange rsy1 else
plain_error m1 ("Symbol " ++ showDoc rsy1 " mapped twice to "
++ showDoc rsy2 " and " ++ showDoc rsy3 "") nullRange
statSymbMapItems :: Sign f e -> Maybe (Sign f e) -> [SYMB_MAP_ITEMS]
-> Result RawSymbolMap
statSymbMapItems sig msig sl = do
let st (Symb_map_items kind l _) = do
appendDiags $ checkSymbList l
fmap concat $ mapM (symbOrMapToRaw sig msig kind) l
getSort rsy = case rsy of
ASymbol (Symbol i SortAsItemType) -> Just i
_ -> Nothing
getImplicit rsy = case rsy of
AKindedSymb Implicit i -> Just i
_ -> Nothing
mkSort i = ASymbol $ Symbol i SortAsItemType
mkImplicit = AKindedSymb Implicit
ls <- mapM st sl
foldM (insertRsys rawSymName getSort mkSort getImplicit mkImplicit)
Map.empty (concat ls)
symbOrMapToRaw :: Sign f e -> Maybe (Sign f e) -> SYMB_KIND -> SYMB_OR_MAP
-> Result [(RawSymbol, RawSymbol)]
symbOrMapToRaw sig msig k sm = case sm of
Symb s -> do
v <- symbToRaw True sig k s
return [(v, v)]
Symb_map s t _ -> do
appendDiags $ case (s, t) of
(Symb_id a, Symb_id b) | a == b ->
[mkDiag Hint "unneeded identical mapping of" a]
_ -> []
w <- symbToRaw True sig k s
x <- case msig of
Nothing -> symbToRaw False sig k t
Just tsig -> symbToRaw True tsig k t
let mkS = ASymbol . idToSortSymbol
case (s, t) of
(Qual_id _ t1 _, Qual_id _ t2 _) -> case (t1, t2) of
(O_type (Op_type _ args1 res1 _), O_type (Op_type _ args2 res2 _))
| length args1 == length args2 -> -- ignore partiality
return $ (w, x) : (mkS res1, mkS res2)
: zipWith (\ s1 s2 -> (mkS s1, mkS s2)) args1 args2
(P_type (Pred_type args1 _), P_type (Pred_type args2 _))
| length args1 == length args2 ->
return $ (w, x)
: zipWith (\ s1 s2 -> (mkS s1, mkS s2)) args1 args2
(O_type (Op_type _ [] res1 _), A_type s2) ->
return [(w, x), (mkS res1, mkS s2)]
(A_type s1, O_type (Op_type _ [] res2 _)) ->
return [(w, x), (mkS s1, mkS res2)]
(A_type s1, A_type s2) ->
return [(w, x), (mkS s1, mkS s2)]
_ -> fail $ "profiles '" ++ showDoc t1 "' and '"
++ showDoc t2 "' do not match"
_ -> return [(w, x)]
statSymbItems :: Sign f e -> [SYMB_ITEMS] -> Result [RawSymbol]
statSymbItems sig sl =
let st (Symb_items kind l _) = do
appendDiags $ checkSymbList $ map Symb l
mapM (symbToRaw True sig kind) l
in fmap concat (mapM st sl)
-- | bool indicates if a deeper symbol check is possible for target symbols
symbToRaw :: Bool -> Sign f e -> SYMB_KIND -> SYMB -> Result RawSymbol
symbToRaw b sig k si = case si of
Symb_id idt -> return $ case k of
Sorts_kind -> ASymbol $ idToSortSymbol idt
_ -> AKindedSymb k idt
Qual_id idt t _ -> typedSymbKindToRaw b sig k idt t
typedSymbKindToRaw :: Bool -> Sign f e -> SYMB_KIND -> Id -> TYPE
-> Result RawSymbol
typedSymbKindToRaw b sig k idt t = let
pm = predMap sig
om = opMap sig
getSet = MapSet.lookup idt
err = plain_error (AKindedSymb Implicit idt)
(showDoc idt ":" ++ showDoc t
"does not have kind" ++ showDoc k "") (getRange idt)
aSymb = ASymbol $ case t of
O_type ot -> idToOpSymbol idt $ toOpType ot
P_type pt -> idToPredSymbol idt $ toPredType pt
A_type s -> idToOpSymbol idt $ sortToOpType s
unKnown = do
appendDiags [mkDiag Error "unknown symbol" aSymb]
return aSymb
in case k of
Implicit -> case t of
A_type s -> if b then do
let pt = sortToPredType s
ot = sortToOpType s
pot = mkPartial ot
hasPred = Set.member pt $ getSet pm
hasOp = Set.member ot $ getSet om
hasPOp = Set.member pot $ getSet om
bothWarn = when hasPred $
appendDiags [mkDiag Warning "considering operation only" idt]
if hasOp then do
appendDiags [mkDiag Hint "matched constant" idt]
bothWarn
return aSymb
else if hasPOp then do
bothWarn
appendDiags [mkDiag Warning "constant is partial" idt]
return $ ASymbol $ idToOpSymbol idt pot
else if hasPred then do
appendDiags [mkDiag Hint "matched unary predicate" idt]
return $ ASymbol $ idToPredSymbol idt pt
else unKnown
else do
appendDiags [mkDiag Warning "qualify name as pred or op" idt]
return aSymb
_ -> return aSymb
Ops_kind -> case t of
P_type _ -> err
_ ->
let ot = case t of
O_type aot -> toOpType aot
A_type s -> sortToOpType s
P_type _ -> error "CASL.typedSymbKindToRaw.Ops_kind"
pot = mkPartial ot
isMem aot = Set.member aot $ getSet om
in if b then
if isMem ot then return aSymb
else if isMem pot then do
appendDiags [mkDiag Warning "operation is partial" idt]
return $ ASymbol $ idToOpSymbol idt pot
else unKnown
else return aSymb
Preds_kind -> case t of
O_type _ -> err
_ ->
let pt = case t of
A_type s -> sortToPredType s
P_type qt -> toPredType qt
O_type _ -> error "CASL.typedSymbKindToRaw.Preds_kind"
pSymb = ASymbol $ idToPredSymbol idt pt
in if b then
if Set.member pt $ getSet pm then do
appendDiags [mkDiag Hint "matched predicate" idt]
return pSymb
else unKnown
else return pSymb
Sorts_kind -> err
morphismToSymbMap :: Morphism f e m -> SymbolMap
morphismToSymbMap = morphismToSymbMapAux False
morphismToSymbMapAux :: Bool -> Morphism f e m -> SymbolMap
morphismToSymbMapAux b mor = let
src = msource mor
sorts = sort_map mor
ops = op_map mor
preds = pred_map mor
sortSymMap = Set.fold
(\ s -> let t = mapSort sorts s in
if b && s == t then id else
Map.insert (idToSortSymbol s) $ idToSortSymbol t)
Map.empty $ sortSet src
opSymMap = MapSet.foldWithKey
( \ i t -> let (j, k) = mapOpSym sorts ops (i, t) in
if b && i == j && opKind k == opKind t then id else
Map.insert (idToOpSymbol i t) $ idToOpSymbol j k)
Map.empty $ opMap src
predSymMap = MapSet.foldWithKey
( \ i t -> let (j, k) = mapPredSym sorts preds (i, t) in
if b && i == j then id else
Map.insert (idToPredSymbol i t) $ idToPredSymbol j k)
Map.empty $ predMap src
in foldr Map.union sortSymMap [opSymMap, predSymMap]
matches :: Symbol -> RawSymbol -> Bool
matches (Symbol idt k) rs = case rs of
ASymbol (Symbol id2 k2) -> idt == id2 && case (k, k2) of
(OpAsItemType ot, OpAsItemType ot2) -> compatibleOpTypes ot ot2
_ -> k == k2
AKindedSymb rk di -> let res = idt == di in case (k, rk) of
(_, Implicit) -> res
(SortAsItemType, Sorts_kind) -> res
(OpAsItemType _, Ops_kind) -> res
(PredAsItemType _, Preds_kind) -> res
_ -> False
idMor :: m -> Sign f e -> Morphism f e m
idMor extEm sigma = embedMorphism extEm sigma sigma
composeM :: Eq e => (Morphism f e m -> Morphism f e m -> Result m)
-> Morphism f e m -> Morphism f e m -> Result (Morphism f e m)
composeM comp mor1 mor2 = do
let sMap1 = sort_map mor1
src = msource mor1
tar = mtarget mor2
oMap1 = op_map mor1
pMap1 = pred_map mor1
sMap2 = sort_map mor2
oMap2 = op_map mor2
pMap2 = pred_map mor2
sMap = composeMap (MapSet.setToMap $ sortSet src) sMap1 sMap2
oMap = if Map.null oMap2 then oMap1 else
MapSet.foldWithKey ( \ i ot ->
let (ni, nt) = mapOpSym sMap2 oMap2
$ mapOpSym sMap1 oMap1 (i, ot)
k = opKind nt
in if i == ni && opKind ot == k then id else
Map.insert (i, mkPartial ot) (ni, k))
Map.empty $ opMap src
pMap = if Map.null pMap2 then pMap1 else
MapSet.foldWithKey ( \ i pt ->
let ni = fst $ mapPredSym sMap2 pMap2
$ mapPredSym sMap1 pMap1 (i, pt)
in if i == ni then id else Map.insert (i, pt) ni)
Map.empty $ predMap src
extComp <- comp mor1 mor2
let emb = embedMorphism extComp src tar
return $ cleanMorMaps emb
{ sort_map = sMap
, op_map = oMap
, pred_map = pMap }
legalSign :: Sign f e -> Bool
legalSign sigma =
MapSet.setAll legalSort (MapSet.setElems . Rel.toMap $ sortRel sigma)
&& MapSet.all legalOpType (opMap sigma)
&& MapSet.all legalPredType (predMap sigma)
where sorts = sortSet sigma
legalSort s = Set.member s sorts
legalOpType t = -- omitted for VSE Boolean: legalSort (opRes t)
all legalSort (opArgs t)
legalPredType = all legalSort . predArgs
-- | the image of a signature morphism
imageOfMorphism :: Morphism f e m -> Sign f e
imageOfMorphism m = imageOfMorphismAux (const $ extendedInfo $ mtarget m) m
-- | the generalized image of a signature morphism
imageOfMorphismAux :: (Morphism f e m -> e) -> Morphism f e m -> Sign f e
imageOfMorphismAux fE m =
inducedSignAux (\ _ _ _ _ _ -> fE m)
(sort_map m) (op_map m) (pred_map m) (extended_map m) (msource m)
type InducedSign f e m r =
Sort_map -> Op_map -> Pred_map -> m -> Sign f e -> r
-- | the induced signature image of a signature morphism
inducedSignAux :: InducedSign f e m e -> InducedSign f e m (Sign f e)
inducedSignAux f sm om pm em src =
let ms = mapSort sm
msorts = Set.map ms
in (emptySign $ f sm om pm em src)
{ sortRel = Rel.transClosure . Rel.irreflex . Rel.map ms $ sortRel src
-- sorts may fall together and need to be removed as trivial relation
, emptySortSet = msorts $ emptySortSet src
, opMap = inducedOpMap sm om $ opMap src
, assocOps = inducedOpMap sm om $ assocOps src
, predMap = inducedPredMap sm pm $ predMap src
, annoMap = inducedAnnoMap sm om pm $ annoMap src
}
inducedOpMap :: Sort_map -> Op_map -> OpMap -> OpMap
inducedOpMap sm fm = MapSet.foldWithKey
(\ i ot -> let (j, nt) = mapOpSym sm fm (i, ot)
in MapSet.insert j nt) MapSet.empty
inducedPredMap :: Sort_map -> Pred_map -> PredMap -> PredMap
inducedPredMap sm pm = MapSet.foldWithKey
( \ i pt -> let (j, nt) = mapPredSym sm pm (i, pt)
in MapSet.insert j nt) MapSet.empty
inducedAnnoMap :: Sort_map -> Op_map -> Pred_map -> AnnoMap -> AnnoMap
inducedAnnoMap sm om pm = MapSet.foldWithKey
( \ sy s -> MapSet.insert (mapSymbol sm om pm sy) s) MapSet.empty
mapSymbol :: Sort_map -> Op_map -> Pred_map -> Symbol -> Symbol
mapSymbol sm om pm (Symbol i ty) = case ty of
SortAsItemType -> Symbol (mapSort sm i) SortAsItemType
SubsortAsItemType j ->
Symbol (mapSort sm i) $ SubsortAsItemType $ mapSort sm j
OpAsItemType ot -> let (j, nt) = mapOpSym sm om (i, ot) in
Symbol j $ OpAsItemType nt
PredAsItemType pt -> let (j, nt) = mapPredSym sm pm (i, pt) in
Symbol j $ PredAsItemType nt
legalMor :: MorphismExtension e m => Morphism f e m -> Result ()
legalMor mor =
let s1 = msource mor
s2 = mtarget mor
sm = sort_map mor
msorts = Set.map $ mapSort sm
in if legalSign s1
&& Set.isSubsetOf (msorts $ sortSet s1) (sortSet s2)
&& Set.isSubsetOf (msorts $ emptySortSet s1) (emptySortSet s2)
&& isSubOpMap (inducedOpMap sm (op_map mor) $ opMap s1) (opMap s2)
&& isSubMap (inducedPredMap sm (pred_map mor) $ predMap s1) (predMap s2)
&& legalSign s2
then legalMorphismExtension mor else fail "illegal CASL morphism"
isInclOpMap :: Op_map -> Bool
isInclOpMap = all (\ ((i, _), (j, _)) -> i == j) . Map.toList
idOrInclMorphism :: Morphism f e m -> Morphism f e m
idOrInclMorphism m =
let src = opMap $ msource m
tar = opMap $ mtarget m
in if isSubOpMap tar src then m
else let diffOpMap = MapSet.toMap $ MapSet.difference src tar in
m { op_map = Map.fromList $ concatMap (\ (i, s) ->
map (\ t -> ((i, t), (i, Total)))
$ Set.toList s) $ Map.toList diffOpMap }
sigInclusion :: m -- ^ computed extended morphism
-> Sign f e -> Sign f e -> Result (Morphism f e m)
sigInclusion extEm sigma1 =
return . idOrInclMorphism . embedMorphism extEm sigma1
addSigM :: Monad m => (e -> e -> m e) -> Sign f e -> Sign f e -> m (Sign f e)
addSigM f a b = do
e <- f (extendedInfo a) $ extendedInfo b
return $ addSig (const $ const e) a b
plainMorphismUnion :: (e -> e -> e) -- ^ join signature extensions
-> Morphism f e m -> Morphism f e m -> Result (Morphism f e m)
plainMorphismUnion = morphismUnion retExtMap
retExtMap :: b -> Morphism f e m -> Result m
retExtMap = const $ return . extended_map
morphismUnion :: (Morphism f e m -> Morphism f e m -> Result m)
-- ^ join morphism extensions
-> (e -> e -> e) -- ^ join signature extensions
-> Morphism f e m -> Morphism f e m -> Result (Morphism f e m)
morphismUnion uniteM addSigExt =
morphismUnionM uniteM (\ e -> return . addSigExt e)
morphismUnionM :: (Morphism f e m -> Morphism f e m -> Result m)
-- ^ join morphism extensions
-> (e -> e -> Result e) -- ^ join signature extensions
-> Morphism f e m -> Morphism f e m -> Result (Morphism f e m)
-- consider identity mappings but filter them eventually
morphismUnionM uniteM addSigExt mor1 mor2 =
let smap1 = sort_map mor1
smap2 = sort_map mor2
s1 = msource mor1
s2 = msource mor2
us1 = Set.difference (sortSet s1) $ Map.keysSet smap1
us2 = Set.difference (sortSet s2) $ Map.keysSet smap2
omap1 = op_map mor1
omap2 = op_map mor2
uo1 = foldr delOp (opMap s1) $ Map.keys omap1
uo2 = foldr delOp (opMap s2) $ Map.keys omap2
delOp (n, ot) m = diffOpMapSet m $ MapSet.fromList [(n, [mkTotal ot])]
uo = addOpMapSet uo1 uo2
pmap1 = pred_map mor1
pmap2 = pred_map mor2
up1 = foldr delPred (predMap s1) $ Map.keys pmap1
up2 = foldr delPred (predMap s2) $ Map.keys pmap2
up = addMapSet up1 up2
delPred (n, pt) = MapSet.delete n pt
(sds, smap) = foldr ( \ (i, j) (ds, m) -> case Map.lookup i m of
Nothing -> (ds, Map.insert i j m)
Just k -> if j == k then (ds, m) else
(Diag Error
("incompatible mapping of sort " ++ showId i " to "
++ showId j " and " ++ showId k "")
nullRange : ds, m)) ([], smap1)
(Map.toList smap2 ++ map (\ a -> (a, a))
(Set.toList $ Set.union us1 us2))
(ods, omap) = foldr ( \ (isc@(i, ot), jsc@(j, t)) (ds, m) ->
case Map.lookup isc m of
Nothing -> (ds, Map.insert isc jsc m)
Just (k, p) -> if j == k then if p == t then (ds, m)
else (ds, Map.insert isc (j, Total) m) else
(Diag Error
("incompatible mapping of op " ++ showId i ":"
++ showDoc (setOpKind t ot) " to "
++ showId j " and " ++ showId k "") nullRange : ds, m))
(sds, omap1) (Map.toList omap2 ++ map
( \ (a, ot) -> ((a, mkPartial ot), (a, opKind ot)))
(mapSetToList uo))
(pds, pmap) = foldr ( \ (isc@(i, pt), j) (ds, m) ->
case Map.lookup isc m of
Nothing -> (ds, Map.insert isc j m)
Just k -> if j == k then (ds, m) else
(Diag Error
("incompatible mapping of pred " ++ showId i ":"
++ showDoc pt " to " ++ showId j " and "
++ showId k "") nullRange : ds, m)) (ods, pmap1)
(Map.toList pmap2 ++ map ( \ (a, pt) -> ((a, pt), a))
(mapSetToList up))
in if null pds then do
s3 <- addSigM addSigExt s1 s2
s4 <- addSigM addSigExt (mtarget mor1) $ mtarget mor2
extM <- uniteM mor1 mor2
return $ cleanMorMaps
(embedMorphism extM s3 s4)
{ sort_map = smap
, op_map = omap
, pred_map = pmap }
else Result pds Nothing
cleanMorMaps :: Morphism f e m -> Morphism f e m
cleanMorMaps m = m
{ sort_map = Map.filterWithKey (/=) $ sort_map m
, op_map = Map.filterWithKey
(\ (i, ot) (j, k) -> i /= j || k == Total && Set.member ot
(MapSet.lookup i $ opMap $ msource m)) $ op_map m
, pred_map = Map.filterWithKey ((/=) . fst) $ pred_map m }
isSortInjective :: Morphism f e m -> Bool
isSortInjective m =
let ss = sortSet $ msource m
in Set.size ss == Set.size (Set.map (mapSort $ sort_map m) ss)
sumSize :: MapSet.MapSet a b -> Int
sumSize = sum . map Set.size . Map.elems . MapSet.toMap
-- morphism extension m is not considered here
isInjective :: Morphism f e m -> Bool
isInjective m = isSortInjective m && let
src = msource m
sm = sort_map m
os = opMap src
ps = predMap src
in sumSize os == sumSize (inducedOpMap sm (op_map m) os)
&& sumSize ps == sumSize (inducedPredMap sm (pred_map m) ps)
instance Pretty RawSymbol where
pretty rsym = case rsym of
ASymbol sy -> pretty sy
AKindedSymb k i -> pretty k <+> pretty i
printMorphism :: (e -> e -> Bool) -> (m -> Bool) -> (e -> Doc)
-> (Morphism f e m -> Doc) -> Morphism f e m -> Doc
printMorphism isSubSigExt isInclMorExt fE fM mor =
let src = msource mor
tar = mtarget mor
ops = op_map mor
prSig s = specBraces (space <> printSign fE s)
srcD = prSig src
in fsep $ if isInclusionMorphism isInclMorExt mor then
if isSubSig isSubSigExt tar src then
[text "identity morphism over", srcD]
else
[text "inclusion morphism of", srcD
, if Map.null ops then empty
else fsep
[text "by totalizing",
pretty $ Set.map (uncurry idToOpSymbol) $ Map.keysSet ops]]
else
[ braces $ printMap id sepByCommas pairElems
(morphismToSymbMapAux True mor) $+$ fM mor
, colon <+> srcD, mapsto <+> prSig tar ]
instance (SignExtension e, Pretty e, Show f, MorphismExtension e m)
=> Pretty (Morphism f e m) where
pretty = printMorphism isSubSignExtension isInclusionMorphismExtension
pretty prettyMorphismExtension
|
nevrenato/HetsAlloy
|
CASL/Morphism.hs
|
gpl-2.0
| 26,993 | 0 | 27 | 8,091 | 9,667 | 4,882 | 4,785 | 623 | 22 |
{-# OPTIONS_GHC -Wno-deferred-type-errors #-}
-- | API server routes.
module Routes
( routes,
root,
media,
pub,
static,
gpgKeyFile,
blog,
)
where
import API (API, BlogAPI, BlogArticleAPI, BlogListingAPI, BrowseAPI, ComponentAPI, FeedAPI, GPGAPI, runServerAPI)
import Config (Config (..))
import Control.Monad.Except (MonadError (throwError))
import Control.Monad.IO.Class (MonadIO (..))
import qualified Data.Text as T
import Data.Time.Clock.POSIX (getCurrentTime)
import Feed (rssFeed, rssItem)
import Servant (Server, err404)
import Servant.API (Raw, (:<|>) (..))
import Servant.Server.StaticFiles (serveDirectoryFileServer, serveDirectoryWebApp)
import State (APIState (cachedGPGKeyFile), cachedIndexHtml, getBlogArticleContent, getUploadedFiles, listBlogArticleMetadata)
routes :: Config -> APIState -> Server API
routes config state =
feed state
:<|> media config
:<|> pub config
:<|> gpgKeyFile state
:<|> component state
:<|> static
:<|> (blog state :<|> browse state)
:<|> root config
component :: APIState -> Server ComponentAPI
component state = serveRoot :<|> serveBlogListing :<|> serveBlogArticle :<|> serveBrowse
where
serveIndex = pure (cachedIndexHtml state)
serveRoot = serveIndex
serveBlogListing = serveIndex
serveBlogArticle _ = serveIndex
serveBrowse = serveIndex
root :: Config -> Server Raw
root = serveDirectoryWebApp . configFrontDir
media :: Config -> Server Raw
media = serveDirectoryFileServer . configMediaDir
pub :: Config -> Server Raw
pub = serveDirectoryFileServer . configUploadDir
static :: Server Raw
static = serveDirectoryWebApp "static"
gpgKeyFile :: APIState -> Server GPGAPI
gpgKeyFile state = if T.null content then throwError err404 else pure content
where
content = cachedGPGKeyFile state
blog :: APIState -> Server BlogAPI
blog state = blogListing state :<|> article state
browse :: APIState -> Server BrowseAPI
browse = getUploadedFiles
feed :: APIState -> Server FeedAPI
feed state = do
now <- liftIO getCurrentTime
items <- fmap (map rssItem) (listBlogArticleMetadata state)
pure $ rssFeed now items
blogListing :: APIState -> Server BlogListingAPI
blogListing = listBlogArticleMetadata
article :: APIState -> Server BlogArticleAPI
article state slug = runServerAPI $ getBlogArticleContent state slug
|
phaazon/phaazon.net
|
backend/src/Routes.hs
|
gpl-3.0
| 2,355 | 0 | 12 | 392 | 649 | 355 | 294 | 61 | 2 |
{-# OPTIONS -fwarn-tabs -fwarn-incomplete-patterns -Wall -fno-warn-type-defaults #-}
{-# LANGUAGE RecordWildCards, MultiParamTypeClasses #-}
module CatanGUI (beginGUI) where
import Prelude hiding(log)
import Control.Monad(when, void)
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import qualified Graphics.UI.Threepenny.SVG.Elements as SVG
import qualified Graphics.UI.Threepenny.SVG.Attributes as SVG hiding (filter, mask)
import Control.Concurrent.MVar.Lifted
import Data.Maybe(fromJust)
import Types
import Control.Monad.Base
import Control.Concurrent(threadDelay)
{-----------------------------------------------------------------------------
SVG
------------------------------------------------------------------------------}
-- data WindowSize = WindowSize {x :: Int, y :: Int}
hexSize :: Num a => a
hexSize = 60
beginGUI :: CatanMVars -> IO ()
beginGUI cmvars =
startGUI defaultConfig { jsCustomHTML = Just "catan.html",
jsStatic = Just "static",
jsLog = \_ -> putStr "" ,
jsPort = Just 8022,
jsWindowReloadOnDisconnect = True} (setup cmvars)
bootstrapRow :: [UI Element] -> UI Element
bootstrapRow elems = UI.div # set UI.class_ "row" #+ elems
mkButton :: String -> String -> UI Element
mkButton buttonTitle classes =
UI.button #. ("btn myspacing " ++ classes) # set UI.type_ "button" #+ [string buttonTitle]
setup :: CatanMVars -> Window -> UI ()
setup CatanMVars{..} w = void $ do
_ <- return w # set title "Curriers of Catan"
roll <- takeMVar rollVar
game@Game{..} <- takeMVar gameVar
heading <- UI.h1 # set text "Curriers of Catan"
subHeading <- UI.h2 # set text ("Current Player: " ++ show currentPlayer)
# set UI.id_ "player"
rollResult <- UI.h4 # set text ("Roll: " ++ show roll) # set UI.id_ "roll"
endturnbutton <- mkButton "End Turn" "btn-outline-danger btn-sm"
buildRoadButton <- mkButton "Build Road" "btn-outline-primary btn-sm"
buildSettButton <- mkButton "Build Settlement" "btn-outline-primary btn-sm"
buyCardButton <- mkButton "Buy Card" "btn-outline-primary btn-sm"
buildCityButton <- mkButton "Build City" "btn-outline-primary btn-sm"
cheatButton <- mkButton "cheater" "btn-outline-danger btn-sm"
buttons <- bootstrapRow [element buildRoadButton
, element buildSettButton
, element buildCityButton
, element buyCardButton
, element cheatButton
, element endturnbutton]
menu <- UI.div # set UI.id_ "menu"
#+ [element subHeading
, element rollResult
, drawResources game
, drawKnights game
, drawTrading game
, drawCards game]
sidebar <- UI.div # set UI.class_ "col-lg-5"
#+ [ element menu
, UI.div # set UI.class_ "row" #+ [UI.h4 # set text "Actions:"]
, element buttons
]
boardDiv <- UI.div # set UI.class_ "col-lg-7"
#+ [background game]
gameRow <- bootstrapRow [element boardDiv, element sidebar]
container <- UI.div # set UI.class_ "container-fluid main-container" #+ [element heading, element gameRow]
_ <- getBody w #+ [ element container ]
on UI.click endturnbutton $ \_ -> endTurn mvars
on UI.click buildRoadButton $
\_ -> makeCorners $ \i1 -> makeCorners (\i2 -> do _ <- sendAction (BuildRoad i1 i2) mvars
return ())
on UI.click buildSettButton $
\_ -> makeCorners (\index -> do _ <- sendAction (BuildSettlement index) mvars
return ())
on UI.click buildCityButton $
\_ -> makeCorners (\index -> do _ <- sendAction (BuildCity index) mvars
return ())
on UI.click buyCardButton $ \_ -> do
_ <- sendAction BuyCard mvars
return ()
on UI.click cheatButton $ \_ -> do
_ <- sendAction (Cheat resourceRadioValues) mvars
return ()
drawResources :: Game -> UI Element
drawResources Game{..} =
UI.p
# set UI.id_ "resources"
# set UI.text ("Resources: " ++ show (resources (getPlayer currentPlayer players)))
drawKnights :: Game -> UI Element
drawKnights Game{..} =
UI.p
# set UI.id_ "knights"
# set UI.text ("Knights: " ++ (show (knights (getPlayer currentPlayer players))))
drawCards :: Game -> UI Element
drawCards g@Game{..} = do
let devcards = cards (getPlayer currentPlayer players)
let playableCardsList = map (\c -> do
button <- UI.button
# set UI.class_ "card list-group-item list-group-item-action"
# set UI.type_ "button"
#+ [string (show c)]
on UI.click button $ \_ ->
case c of
VictoryPoint -> return ()
Knight -> handlePlayKnight g mvars
Progress Monopoly -> do
maybeRes <- getRadioSelection "from"
case maybeRes of
Just r -> do
_ <- sendAction (PlayMonopoly r) mvars
return ()
_ -> return ()
Progress YearOfPlenty -> do
maybeRes1 <- getRadioSelection "from"
maybeRes2 <- getRadioSelection "to"
case (maybeRes1, maybeRes2) of
(Just r1, Just r2) -> do
_ <- sendAction (PlayYearOfPlenty r1 r2) mvars
return ()
(_,_) -> return ()
Progress _ -> return ()
return button) devcards
let pendingCardsList = map (\c -> UI.button
# set UI.class_ "card list-group-item"
# set UI.enabled False
# set UI.type_ "button"
#+ [string (show c ++ " (PENDING)")]) pendingCards
cardsTitle <- UI.h4 # set text "Development cards:"
list <- UI.div
# set UI.class_ "list-group cards"
#+ (playableCardsList ++ pendingCardsList)
UI.div # set UI.id_ "devcardsdiv" #+ [element cardsTitle, element list]
-- is there a nicer way to do this?
resourceRadioValues :: [Resource]
resourceRadioValues = [Brick, Lumber, Ore, Grain, Wool]
getRadioSelection :: String -> UI (Maybe Resource)
getRadioSelection t = foldr (checkRadio t) (return Nothing) resourceRadioValues
where
checkRadio tag res acc = do
let r = show res
w <- askWindow
let radioid = tag ++ r
maybeElem <- getElementById w radioid
let e = fromJust maybeElem
checked <- get UI.checked e
if checked then return (Just res) else acc
drawTrading :: Game -> UI Element
drawTrading Game{..} = do
let fromResourceRadios = map (makeRadio "from") resourceRadioValues
let toResourceRadios = map (makeRadio "to") resourceRadioValues
let fromDiv = UI.div # set UI.class_ "from-group" # set UI.id_ "fromDiv" #+ ((UI.legend # set text "Resource to trade"):fromResourceRadios)
let toDiv = UI.div # set UI.class_ "form-group" # set UI.id_ "fromDiv" #+ ((UI.legend # set text "Resource to receive"):toResourceRadios)
submitButton <- UI.button #. "btn btn-outline-success btn-sm" # set text "Trade With Bank"
form <- UI.div #+ [UI.h4 # set text "Trade With Bank:", fromDiv, toDiv, element submitButton]
on UI.click submitButton $ \_ -> do
maybeFrom <- getRadioSelection "from"
maybeTo <- getRadioSelection "to"
case (maybeFrom, maybeTo) of
(Just r1, Just r2) -> do
_ <- sendAction (TradeWithBank r1 r2 1) mvars
return ()
(_, _) -> return ()
return form
where
makeRadio tag res =
let r = show res in
UI.label
# set UI.class_ "btn btn-secondary btn-sm active myspacing-sm"
#+ [UI.input # set UI.type_ "radio" # set UI.id_ (tag ++ r) # set UI.name tag # set UI.value r
, UI.span # set UI.class_ "custom-control-indicator"
, UI.span # set UI.class_ "custom-control-description" # set text (" " ++ r)]
hexPoints :: (Integral t1, Integral t2, Integral t3) => t1 -> t2 -> t3 -> String
hexPoints x1 y1 r1 =
unwords (map hexCorner [0..5])
where hexCorner i =
let x = fromIntegral x1
y = fromIntegral y1
r = fromIntegral r1
angle_deg = 60 * i + 30
angle_rad = pi / 180 * angle_deg in
show (x + r * cos angle_rad) ++ "," ++ show (y + r * sin angle_rad)
flatHexPoints :: (Integral t1, Integral t2, Integral t3) => t1 -> t2 -> t3 -> String
flatHexPoints x1 y1 r1 =
unwords (map hexCorner [0..5])
where hexCorner i =
let x = fromIntegral x1
y = fromIntegral y1
r = fromIntegral r1
angle_deg = 60 * i
angle_rad = pi / 180 * angle_deg in
show (x + r * cos angle_rad) ++ "," ++ show (y + r * sin angle_rad)
foreground :: Game -> UI Element
foreground Game{..} = do
let bs = map drawBuilding buildings
rs = map drawRoad roads
(x, y) = hexToPixel robberTile
robber <- SVG.circle
# set SVG.class_ "render"
# set SVG.r (show (hexSize / 2))
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill "rgb(103, 128, 159)"
# set SVG.opacity ".7"
# set SVG.pointer_events "none"
SVG.g #+ (element robber : (bs ++ rs))
where
drawBuilding (Settlement c l) =
let (x,y) = cornerToPixel l in
SVG.circle
# set SVG.r (show (hexSize/3))
# set SVG.class_ "render"
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill (colorToRGB c)
drawBuilding (City c l) = do
let (x, y) = cornerToPixel l
g <- SVG.g # set SVG.class_ "render"
return g #+ [SVG.circle
# set SVG.r (show (hexSize/3 + 5))
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill (colorToRGB c),
SVG.circle
# set SVG.r "10"
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill "rgb(34, 49, 63)"
]
drawRoad r = do
let (l1, l2, c) = getRoad r
let (x1,y1) = cornerToPixel l1
let (x2,y2) = cornerToPixel l2
g <- SVG.g # set SVG.class_ "render"
return g #+ [SVG.line
# set SVG.r "20"
# set SVG.x1 (show x1)
# set SVG.y1 (show y1)
# set SVG.x2 (show x2)
# set SVG.y2 (show y2)
# set SVG.stroke (colorToRGB c)
# set SVG.stroke_width "10"
, SVG.circle
# set SVG.r "5"
# set SVG.cx (show x1)
# set SVG.cy (show y1)
# set SVG.stroke_width "0"
# set SVG.fill (colorToRGB c)
, SVG.circle
# set SVG.r "5"
# set SVG.cx (show x2)
# set SVG.cy (show y2)
# set SVG.stroke_width "0"
# set SVG.fill (colorToRGB c)
]
getTileColor :: Board -> TileLocation -> String
getTileColor board index =
let tile = getTile board index in
case tile of
Paying t _ -> case t of
Hills -> "rgb(179, 94, 30)"
Forest -> "rgb(30, 130, 76)"
Mountains -> "rgb(190, 144, 212)"
Fields -> "rgb(135, 211, 124)"
Pasture -> "rgb(245, 215, 110)"
Desert -> "rgb(253, 227, 167)"
makeHexGroup :: Board -> [UI Element]
makeHexGroup board = map drawHex tileIndices
where
drawHex index = do
let (x, y) = hexToPixel index
let color = getTileColor board index
let token = getToken index
hex <- SVG.polygon
# set SVG.class_ "hex tile"
# set SVG.points (hexPoints x y hexSize)
# set SVG.stroke "rgb(34, 49, 63)"
# set SVG.stroke_width "2"
# set SVG.fill color
# set UI.value (show index)
if token == "" then SVG.g #+ [element hex] else do
circ <- SVG.circle
# set SVG.r "25"
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.stroke "rgb(34, 49, 63)"
# set SVG.stroke_width "1"
# set SVG.fill "rgb(228, 241, 254)"
# set SVG.pointer_events "none"
t <- SVG.text
# set SVG.text_anchor "middle"
# set SVG.alignment_baseline "central"
# set SVG.x (show x)
# set SVG.y (show y)
# set SVG.font_size "30"
# set SVG.font_family "Palatino"
# set SVG.font_weight "bold"
# set text token
# set SVG.fill "black"
# set SVG.pointer_events "none"
SVG.g #+ [element hex, element circ, element t]
getToken index = --show (fst $ tileToAxial index) ++ " " ++
--show (snd $ tileToAxial index)
case getTile board index of
Paying _ tok -> show $ case tok of
Two -> 2 :: Int
Three -> 3
Four -> 4
Five -> 5
Six -> 6
Eight -> 8
Nine -> 9
Ten -> 10
Eleven -> 11
Twelve -> 12
Desert -> ""
background :: Game -> UI Element
background game@Game{..} = do
let height = 12 * hexSize
context <- SVG.svg
# set SVG.id "mainHex"
# set SVG.width (show height)
# set SVG.height (show height)
bg <- SVG.polygon
# set SVG.class_ "hex"
# set SVG.points (flatHexPoints (height `div` 2) (height `div` 2) (height `div` 2))
# set SVG.stroke "rgb(34, 49, 63)"
# set SVG.stroke_width "1"
# set SVG.fill "rgb(129,207,224)"
let g = SVG.g # set SVG.id "hexGroup" #+ makeHexGroup board
let harbors = foldr makeHabors [] cornerIndices
let elemList = element bg : (harbors ++ (g : [foreground game]))
return context #+ elemList
where
makeHabors cornerLoc acc =
case getCorner board cornerLoc of
(_, Just GenericHarbor) ->
let (x,y) = cornerToPixel cornerLoc
h = SVG.circle
# set SVG.r (show (hexSize/2))
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill "rgb(108, 122, 137)"
# set SVG.stroke "rgb(34, 49, 63)"
# set SVG.stroke_width "1"
in
h:acc
(_, Just (SpecialHarbor r)) ->
let (x,y) = cornerToPixel cornerLoc
h = SVG.circle
# set SVG.r (show (hexSize/2))
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.fill (getHarborColor r)
# set SVG.stroke "rgb(34, 49, 63)"
# set SVG.stroke_width "1"
in
h:acc
(_, Nothing) -> acc
getHarborColor r = case r of
Brick -> "rgb(179, 94, 30)"
Lumber -> "rgb(30, 130, 76)"
Ore -> "rgb(190, 144, 212)"
Grain -> "rgb(135, 211, 124)"
Wool -> "rgb(245, 215, 110)"
colorToRGB :: Color -> String
colorToRGB c = case c of
Blue -> "rgb(65, 131, 215)"
Red -> "rgb(217, 30, 24)"
Orange -> "rgb(248, 148, 6)"
White -> "rgb(236,236,236)"
cornerToPixel :: CornerLocation -> (Int, Int)
cornerToPixel cl =
let (q1, r1, t) = cornerToAxial cl
q = fromIntegral q1
r = fromIntegral r1
x = hexSize * (q + r/2.0) * sqrt 3
y = hexSize * 3.0/2.0 * r
x' = x + 6 * hexSize
y' = y + 6 * hexSize + (if t then negate else id) hexSize
in (round x', round y')
hexToPixel :: TileLocation -> (Int, Int)
hexToPixel cl =
let (q1, r1) = tileToAxial cl
q = fromIntegral q1
r = fromIntegral r1
x = hexSize * (q + r/2.0) * sqrt 3
y = hexSize * 3.0/2.0 * r
x' = (x + 6 * hexSize)
y' = (y + 6 * hexSize) in
(round x', round y')
sendAction :: PlayerAction -> CatanMVars -> UI Game
sendAction action CatanMVars{..} = do
putMVar actionVar action
log "taking sendaction"
game@Game{..} <- takeMVar gameVar
log "took sendaction"
renderGame game
mapM_ log errorMessage
when (action == PlayKnight) (robberSequence game)
return game
disableClicking :: Board -> UI ()
disableClicking board = do
turnButtons True
resetBoard board
turnButtons :: Bool -> UI ()
turnButtons b = do
w <- askWindow
es <- getElementsByClassName w "button"
foldr (\button acc -> element button # set UI.enabled b >> acc) (return ()) es
robberSequence :: Game -> UI ()
robberSequence Game{..} = do
log "robber sequence"
w <- askWindow
let CatanMVars{..} = mvars
tiles <- getElementsByClassName w "tile"
turnButtons False
foldr (\tile acc -> do
on UI.hover tile $ \_ ->
element tile # set SVG.fill "white"
on UI.leave tile $ \_ -> do
index <- get UI.value tile
let color = getTileColor board (read index)
element tile # set SVG.fill color
on UI.click tile $ \_ -> do
index <- get UI.value tile
log "UI putting robber"
putMVar robberVar (read index)
log "UI taking game robber"
_ <- tryTakeMVar gameVar
g <- takeMVar gameVar
log "UI took game robber"
renderGame g
disableClicking board
liftIO $ threadDelay 10000
r <- takeMVar stealVar
case r of
[] -> return ()
ps -> let color = snd $ head ps in
putMVar colorVar color
log "UI taking extra game"
_ <- takeMVar gameVar
log "UI took extra game"
e <- getElementById w "roll"
_ <- element (fromJust e) # set text "Roll: 7"
return ()
acc) (return ()) tiles
handlePlayKnight :: Game -> CatanMVars -> UI ()
handlePlayKnight g CatanMVars{..} = do
log "putting playknight"
putMVar actionVar PlayKnight
log "put playknight"
robberSequence g
endTurn :: CatanMVars -> UI ()
endTurn m@CatanMVars{..} = do
w <- askWindow
log "UI taking game Action"
g@Game{..} <- sendAction EndTurn m
log "UI took game Action"
roll <- takeMVar rollVar
let roll7prompt = if roll == 7 then "\tPlace the robber on a new tile." else ""
let rollStr = "Roll: " ++ show roll ++ roll7prompt
rolle <- getElementById w "roll"
_ <- element (fromJust rolle) # set text rollStr
e <- getElementById w "player"
_ <- element (fromJust e) # set text ("Current Player: " ++ show currentPlayer)
liftIO $ threadDelay (400 * 1000)
when (roll == 7) $ robberSequence g
makeCorners :: (CornerLocation -> UI ()) -> UI ()
makeCorners onClick = do
w <- askWindow
e <- getElementById w "mainHex"
_ <- return (fromJust e) #+ map draw cornerIndices
turnButtons False
return ()
where draw l = do
let (x,y) = cornerToPixel l
corner <- SVG.circle
# set SVG.r (show (hexSize/2))
# set SVG.class_ "corner"
# set SVG.cx (show x)
# set SVG.cy (show y)
# set SVG.stroke_width "0"
# set SVG.fill "white"
# set SVG.fill_opacity "0.0"
# set UI.value (show l)
on UI.hover corner $ \_ ->
element corner # set SVG.fill_opacity "0.5"
on UI.leave corner $ \_ ->
element corner # set SVG.fill_opacity "0.0"
on UI.click corner $ \_ -> do
index <- get UI.value corner
deleteCorners
onClick (read index)
turnButtons True
element corner
deleteCorners :: UI ()
deleteCorners = do
w <- askWindow
es <- getElementsByClassName w "corner"
foldr (\x acc -> delete x >> acc) (return ()) es
renderGame :: Game -> UI ()
renderGame game@Game{..} = do
w <- askWindow
es <- getElementsByClassName w "render"
e <- getElementById w "mainHex"
re <- getElementById w "resources"
_ <- element (fromJust re) # set UI.text ("Resources: " ++ show (resources (getPlayer currentPlayer players)))
ke <- getElementById w "knights"
_ <- element (fromJust ke) # set UI.text ("Knights: " ++ show (knights (getPlayer currentPlayer players)))
_ <- return (fromJust e) #+ [foreground game]
cdiv <- getElementById w "devcardsdiv"
menu <- getElementById w "menu"
_ <- return (fromJust menu) #+ [drawCards game]
foldr (\x acc -> delete x >> acc) (return ()) (fromJust cdiv:es)
resetBoard :: Board -> UI ()
resetBoard board = do
w <- askWindow
es <- getElementsByClassName w "tile"
e <- getElementById w "hexGroup"
_ <- return (fromJust e) #+ makeHexGroup board
foldr (\x acc -> delete x >> acc) (return ()) es
instance MonadBase IO UI where
liftBase = liftIO
log :: Show a => a -> UI ()
log str = liftIO $ do putStr "[UI] "
print str
|
dylanmann/CurriersOfCatan
|
src/CatanGUI.hs
|
gpl-3.0
| 20,666 | 0 | 30 | 6,535 | 7,319 | 3,493 | 3,826 | 513 | 12 |
module Data.Record.StateFields
( IdField(..)
, StateField(..)
, idStateField
, StateTField(..)
, idStateTField
, ReaderField(..)
, idReaderField
, FieldPath(..)
, FieldState(..)
, modf
, enter
, enterT
, FieldReader(..)
, proj
, projT
, record
, (!/)
, (&/)
) where
import Data.Record.StateFields.Core
import Data.Record.StateFields.Templates
import Control.Monad.Reader
import Control.Monad.State
-- | A field descriptor with stateful effects.
data StateField a b = StateField
{ getFieldM :: (MonadState a m) => m b
, putFieldM :: (MonadState a m) => b -> m ()
}
-- | Make a StateField from an IdField.
idStateField :: IdField a b -> StateField a b
idStateField f =
StateField
{ getFieldM = gets $ getField f
, putFieldM = \b -> get >>= \a -> put $ putField f b a
}
-- | A field descriptor with stateful and other effects.
data StateTField m a b = StateTField
{ getFieldT :: StateT a m b
, putFieldT :: b -> StateT a m ()
}
-- | Make a StateTField from an IdField.
idStateTField :: (Monad m) => IdField a b -> StateTField m a b
idStateTField f =
StateTField
{ getFieldT = gets $ getField f
, putFieldT = \b -> get >>= \a -> put $ putField f b a
}
-- | A field descriptor which operates in an environment.
data ReaderField a b = ReaderField
{ askFieldM :: (MonadReader a m) => m b
}
-- | Make a ReaderField from an IdField.
idReaderField :: IdField a b -> ReaderField a b
idReaderField f =
ReaderField
{ askFieldM = asks $ getField f
}
infixl 9 //, !/, &/
(!/) :: a -> IdField a b -> b
(!/) = flip getField
(&/) :: a -> (IdField a b, b) -> a
(&/) rec (f, val) = putField f val rec
-- | The class of field descriptors which can be chained into paths.
class FieldPath f g h | f g -> h where
-- | Join two field descriptors into a compound.
-- '//' is left-associative with precedence level 9.
(//) :: f -> g -> h
instance FieldPath (IdField a b) (IdField b c) (IdField a c) where
ab // bc =
IdField
{ getField = getField bc . getField ab
, putField = \c a -> putField ab (putField bc c $ getField ab a) a
}
instance FieldPath (IdField a b) (StateField b c) (StateField a c) where
ab // bc = idStateField ab // bc
instance FieldPath (StateField a b) (IdField b c) (StateField a c) where
ab // bc = ab // idStateField bc
instance (Monad m) => FieldPath (IdField a b) (StateTField m b c) (StateTField m a c) where
ab // bc = (idStateTField ab :: StateTField m a b) // bc
instance (Monad m) => FieldPath (StateTField m a b) (IdField b c) (StateTField m a c) where
ab // bc = ab // (idStateTField bc :: StateTField m b c)
instance FieldPath (IdField a b) (ReaderField b c) (ReaderField a c) where
ab // bc = idReaderField ab // bc
instance FieldPath (ReaderField a b) (IdField b c) (ReaderField a c) where
ab // bc = ab // idReaderField bc
instance FieldPath (StateField a b) (StateField b c) (StateField a c) where
ab // bc =
StateField
{ getFieldM = enter ab $ getFieldM bc
, putFieldM = enter ab . putFieldM bc
}
instance (Monad m) =>
FieldPath (StateTField m a b) (StateTField m b c) (StateTField m a c) where
ab // bc =
StateTField
{ getFieldT = enterT ab $ getFieldT bc
, putFieldT = enterT ab . putFieldT bc
}
instance FieldPath (ReaderField a b) (ReaderField b c) (ReaderField a c) where
ab // bc =
ReaderField
{ askFieldM = proj ab $ askFieldM bc
}
-- | The class of field descriptors usable with a State monad.
class (MonadState a m) => FieldState f a m | f -> a where
getf :: f b -> m b
putf :: f b -> b -> m ()
instance (MonadState a m) => FieldState (IdField a) a m where
getf = gets . getField
putf f b = get >>= put . putField f b
instance (MonadState a m) => FieldState (StateField a) a m where
getf = getFieldM
putf = putFieldM
instance (Monad m) => FieldState (StateTField m a) a (StateT a m) where
getf = getFieldT
putf = putFieldT
-- | Modify the value of a field by applying a function.
modf :: (FieldState f a m, MonadState a m) => f b -> (b -> b) -> m ()
modf f g = getf f >>= putf f . g
-- | Run a stateful computation with a field as the state.
enter :: (FieldState f a m, MonadState a m) => f b -> State b x -> m x
enter f m = do
b <- getf f
let (x, b') = runState m b
putf f b'
return x
-- | Like 'enter', but allows the subcomputation to share the underlying
-- monad of the enclosing computation.
enterT :: (Monad m, FieldState f a (StateT a m)) => f b -> StateT b m x -> StateT a m x
enterT f m = do
b <- getf f
(x, b') <- lift $ runStateT m b
putf f b'
return x
-- | The class of field descriptors usable with a Reader monad.
class FieldReader f where
askf :: (MonadReader a m) => f a b -> m b
instance FieldReader IdField where
askf = asks . getField
instance FieldReader ReaderField where
askf = askFieldM
-- | Run a computation with a field as the environment.
proj :: (FieldReader f, MonadReader a m) => f a b -> Reader b x -> m x
proj f m = do
b <- askf f
let x = runReader m b
return x
-- | Like 'proj', but allows the subcomputation to share the underlying
-- monad of the enclosing computation.
projT :: (Monad m, FieldReader f) => f a b -> ReaderT b m x -> ReaderT a m x
projT f m = do
b <- askf f
x <- lift $ runReaderT m b
return x
|
ktvoelker/state-record
|
src/Data/Record/StateFields.hs
|
gpl-3.0
| 5,323 | 0 | 12 | 1,295 | 2,055 | 1,067 | 988 | -1 | -1 |
module Masque.Ejectors where
import Control.Monad.Error.Class
import Control.Monad.Trans.Either
import Data.Unique
import Masque.Monte
-- | Run an action, catching a single specified ejector. If caught, the
-- ejector's payload will be run through the given handler action.
catchEjector :: Unique -> Monte Obj -> (Obj -> Monte Obj) -> Monte Obj
catchEjector u action handler = catchError action $ \err ->
case err of
Ejecting u' obj | u == u' -> handler obj
_ -> left err
-- | Like Monte m`throw.eject(ej, problem)`.
throwEject :: Obj -> Obj -> Monte ()
throwEject ej problem = left $ case ej of
EjectorObj u -> Ejecting u problem
_ -> Exception problem
|
monte-language/masque
|
Masque/Ejectors.hs
|
gpl-3.0
| 723 | 0 | 13 | 178 | 186 | 95 | 91 | 14 | 2 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Importing of FR3D data. Both "basepairs" and "near interactions" are
-- currently supported. More parsers will come if required.
module Biobase.FR3D.Import where
import Control.Arrow
import Data.ByteString.Char8 as BS
import Data.Char
import Data.Iteratee as I
import Data.Iteratee.Char as I
import Data.Iteratee.IO as I
import Data.Iteratee.ListLike as I
import Data.List as L
import Data.Maybe
import qualified Data.Map as M
import System.FilePath.Find as F
import Biobase.Secondary
import Biobase.FR3D
-- | An Iteratee from a bytestring to one FR3D entry. Since each file contains
-- exactly one entry, this is no problem.
iFR3D :: (Monad m) => Iteratee ByteString m FR3D
iFR3D = joinI $ enumLinesBS f where
f = do
I.head -- fr3d header
I.head -- sequence header
cs' <- I.break ((/="#") . BS.take 1)
I.head -- basepairs header
xs <- stream2list -- and all basepairs
let cs = L.map (second (BS.drop 1) . BS.span isAlphaNum . BS.drop 2) $ cs'
return FR3D
{ pdbid = maybe "" (BS.take 4) $ listToMaybe xs
, chains = cs
, basepairs = {- L.map (fixSeqpos cs) . -} L.map bs2basepair $ xs
}
{-
- This would be for fixing sequence position information, but it seems that
- FR3D does not store this info consistently...
-
fixSeqpos :: [(ByteString,ByteString)] -> Basepair -> Basepair
fixSeqpos cs bp@Basepair{..} = bp{seqpos1 = seqpos1 - cl M.! chain1, seqpos2 = seqpos2 - cl M.! chain2} where
cl = M.fromList . snd . L.mapAccumL f 0 $ cs
f acc x = (acc + BS.length (snd x), (fst x, acc))
-}
-- | Helper function turning a bytestring line into a basepair entry
bs2basepair :: ByteString -> Basepair
bs2basepair s
| L.length ws /= 10 = error $ "can't parse line: " ++ unpack s
| otherwise = Basepair
{ interaction = threeChar . BS.unpack $ ws!!1
, nucleotide1 = BS.head $ ws!!2
, pdbnumber1 = maybe (-1) fst . readInt $ ws!!3
, chain1 = ws!!4
, seqpos1 = maybe (-1) (subtract 1 . fst) . readInt $ ws!!5
, nucleotide2 = BS.head $ ws!!6
, pdbnumber2 = maybe (-1) fst . readInt $ ws!!7
, chain2 = ws!!8
, seqpos2 = maybe (-1) (subtract 1 . fst) . readInt $ ws!!9
}
where ws = BS.words s
-- | Convenience function: given a directory name, extracts a list of all FR3D
-- entries.
fromDirSelect :: String -> FilePath -> IO [FR3D]
fromDirSelect select fp = do
fs <- F.find always (fileName ~~? select) fp
mapM (\f -> run =<< enumFile 8192 f iFR3D) fs
-- | This one select the "near interactions"
fromDirNear = fromDirSelect "*near_interactions_FR3D.txt"
-- | And this one the "basepairs" (this one you normally want).
fromDir = fromDirSelect "*basepairs_FR3D.txt"
|
choener/BiobaseFR3D
|
Biobase/FR3D/Import.hs
|
gpl-3.0
| 2,772 | 0 | 20 | 603 | 671 | 367 | 304 | 49 | 1 |
module Interface.Completed (startCompleted, startCompletedNum, completedUI, Completed) where
import Interface.Expand
import HTorrentPrelude
import Torrent.Env
import qualified Data.IntMap as IM
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Reactive.Threepenny
type Completed = IntMap ByteString
startCompleted :: ReaderT TorrentEnv IO (Behavior Completed, Handler Completed)
startCompleted = do
(completedEvent, completedHandler) <- liftIO newEvent
completedStart <- viewTVarIO completed
completedBehavior <- liftIO (stepper completedStart completedEvent)
return (completedBehavior, completedHandler)
startCompletedNum :: ReaderT TorrentEnv IO (Behavior Int, Handler Int)
startCompletedNum = do
(numCompletedE, numCompletedH) <- liftIO newEvent
numCompletedStart <- viewTVar numCompleted
numCompletedB <- liftIO (stepper numCompletedStart numCompletedE)
return (numCompletedB, numCompletedH)
completedUI :: Behavior Completed -> UI Element
completedUI b = do
name <- string "Completed"
completedTable <- sink completedAttr b UI.table
expand name [completedTable]
where completedAttr = mkWriteAttr updateCompleted
updateCompleted :: Completed -> Element -> UI ()
updateCompleted c t = do
cells <- mapM f (IM.keys c)
void (UI.set UI.children cells (UI.element t))
where f i = UI.set text (show i) UI.td
|
ian-mi/hTorrent
|
Interface/Completed.hs
|
gpl-3.0
| 1,410 | 0 | 12 | 222 | 414 | 210 | 204 | 32 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Styles (mainCss) where
import Text.Lucius
render = undefined
mainCss' = [lucius|
.form-urlshort {
max-width: 550px;
padding: 15px;
margin: 0 auto;
margin-top: 25vh;
* {
margin: 15px 0 15px 0;
}
}
|]
mainCss = renderCss $ mainCss' render
|
nico-izo/hsUrlShort
|
src/Styles.hs
|
gpl-3.0
| 315 | 0 | 6 | 85 | 41 | 26 | 15 | 6 | 1 |
-- Copyright (C) 2017 Jonathan W. Armond
module Loco.Interpreter where
import Loco.Parser
import Loco.Eval
import Loco.Error
import Loco.Store
import Loco.AST
import Loco.Pretty
import Control.Monad
import Control.Monad.Except
import Data.List
import Data.List.Zipper
trace = False
traceShow = show
-- |Execute a program with a new store.
execProgramNewStore :: Program -> IO ()
execProgramNewStore prog = newStore >>= execProgram prog
-- |Execute a program using the provided store.
execProgram :: Program -> Store -> IO ()
execProgram [] _ = return ()
execProgram prog st = runIOEval $ execProg st progZip
where progZip = fromList prog
-- |Execute program. Uses a zipper to recurse down list.
execProg :: Store -> Zipper CommandLine -> IOLocoEval ()
execProg st progZip = liftIO $ unless (endp progZip) $ runIOEval exec
where
exec = do
let cmdline@(CommandLine linum stmt) = cursor progZip
-- Execute statement. If a jump is returned then execution should move
-- to jump target line.
when trace $ liftIO (printStore st >> (putStrLn . traceShow) cmdline)
jump <- evalSt st stmt
case jump of
Jump linum -> jumpToLine 0 linum
JumpNext linum -> jumpToLine 1 linum
Next -> execNextLine
where
-- Recursively execute next line.
execNextLine :: IOLocoEval ()
execNextLine = execProg st $ right progZip
-- Unpack zipper back to list and search for linenumber, optionally
-- skipping on @skip@ lines.
jumpToLine :: Int -> LineNumber -> IOLocoEval ()
jumpToLine skip n = do
let prog = toList progZip
case findIndex ((==n) . lineNum) prog of
Nothing -> (throwError $ InvalidLineError n)
-- Skip to the jump.
Just idx -> execProg st $ zipToIndex progZip (idx+skip)
lineNum :: CommandLine -> Integer
lineNum (CommandLine linum _) = linum
-- |zipToIndex moves the cursor to index i
zipToIndex :: Zipper a -> Int -> Zipper a
zipToIndex z i = iterate right (start z) !! i
runStatement :: String -> IO ()
runStatement s = runIOEval $ exec
where exec = do
st <- liftIO newStore
stmt <- liftIOEval $ runParseStatement s
evalSt1 st stmt
runProgram :: String -> IO ()
runProgram xs = runIOEval $ exec
where exec = do
prog <- liftIOEval $ runParseProgram xs
liftIO $ printProgram prog
liftIO $ prompt
liftIO $ execProgramNewStore prog
printProgram :: Program -> IO ()
printProgram prog = mapM_ (putStrLn . prettyShow) prog
runShowProgram :: String -> IO ()
runShowProgram xs = runIOEval $ exec
where exec = do
prog <- liftIOEval $ runParseProgram xs
liftIO $ showProgram prog
showProgram :: Program -> IO ()
showProgram prog = mapM_ (putStrLn . show) prog
prompt :: IO ()
prompt = putStrLn "Ready"
|
jarmond/locomotive-haskell
|
src/Loco/Interpreter.hs
|
gpl-3.0
| 2,904 | 0 | 17 | 752 | 825 | 408 | 417 | 65 | 4 |
{-# LANGUAGE NoOverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module GUI.RSCoin.TransactionsTab
( createTransactionsTab
, initTransactionsTab
) where
import Control.Exception (SomeException (..), catch)
import Control.Monad (void, when)
import Control.Monad.IO.Class (liftIO)
import qualified Data.IntMap.Strict as M
import qualified Data.Text as T
import Graphics.UI.Gtk (AttrOp ((:=)), on)
import qualified Graphics.UI.Gtk as G
import GUI.RSCoin.ErrorMessage (reportSimpleError)
import GUI.RSCoin.Glade (GladeMainWindow (..))
import GUI.RSCoin.GUIAcid (Contact (..), GUIState,
addTransaction, getContacts)
import GUI.RSCoin.MainWindow (TransactionsTab (..))
import qualified GUI.RSCoin.MainWindow as M
import GUI.RSCoin.WalletTab (updateWalletTab)
import qualified RSCoin.Core as C
import RSCoin.Timed (runRealModeUntrusted)
import qualified RSCoin.User as U
import Serokell.Util.Text (readFractional)
createTransactionsTab :: GladeMainWindow -> TransactionsTab
createTransactionsTab GladeMainWindow{..} =
TransactionsTab
gEntryPayTo
gButtonChooseContacts
gSpinButtonSendAmount
gButtonConfirmSend
gButtonClearSend
initTransactionsTab :: Maybe FilePath
-> U.RSCoinUserState
-> GUIState
-> M.MainWindow
-> IO ()
initTransactionsTab confPath st gst mw = do
let tr@TransactionsTab{..} = M.tabTransactions mw
sendAmountAdjustment <- G.adjustmentNew 0 0 99999999999 1 1 1
G.spinButtonSetAdjustment spinButtonSendAmount sendAmountAdjustment
void (buttonClearSend `on`
G.buttonActivated $ onClearButtonPressed tr)
void (buttonConfirmSend `on`
G.buttonActivated $ onSendButtonPressed confPath st gst mw)
void (buttonChooseContacts `on`
G.buttonActivated $ onChooseContactsButtonPressed gst mw)
onClearButtonPressed :: TransactionsTab -> IO ()
onClearButtonPressed TransactionsTab{..} = do
G.entrySetText entryPayTo ""
G.entrySetText spinButtonSendAmount ""
onSendButtonPressed :: Maybe FilePath
-> U.RSCoinUserState
-> GUIState
-> M.MainWindow
-> IO ()
onSendButtonPressed confPath st gst [email protected]{..} =
act `catch` handler
where
handler (e :: SomeException) =
reportSimpleError mainWindow $ "Exception has happened: " ++ show e
act = do
let TransactionsTab{..} = tabTransactions
sendAddressPre <- G.entryGetText entryPayTo
sendAmount <- readFractional . T.pack <$> G.entryGetText spinButtonSendAmount
let sendAddress = C.Address <$> C.constructPublicKey (T.pack sendAddressPre)
constructDialog sendAddress sendAmount
constructDialog Nothing _ =
reportSimpleError mainWindow "Couldn't read address, most probably wrong format."
constructDialog _ (Left _) =
reportSimpleError mainWindow "Wrong amount format -- should be positive integer."
constructDialog (Just _) (Right amount) | amount <= 0 = do
reportSimpleError mainWindow "Amount should be positive."
G.entrySetText (spinButtonSendAmount tabTransactions) ""
constructDialog (Just address) (Right amount) = do
userAmount <- runRealModeUntrusted C.userLoggerName confPath
(M.findWithDefault 0 0 <$> U.getUserTotalAmount False st)
if amount > C.getCoin userAmount
then do
reportSimpleError mainWindow $
"Amount exceeds your assets -- you have only " ++
show (C.getCoin userAmount) ++ " coins."
G.entrySetText (spinButtonSendAmount tabTransactions) $ show userAmount
else do
tr <- runRealModeUntrusted C.userLoggerName confPath $
U.submitTransactionFromAll st Nothing address $ C.Coin 0 amount
liftIO $ addTransaction gst (C.hash tr) (Just tr)
dialog <- G.messageDialogNew
(Just mainWindow)
[G.DialogDestroyWithParent]
G.MessageInfo
G.ButtonsOk
"Successfully formed and sent transaction."
void $ G.dialogRun dialog
G.widgetDestroy dialog
onClearButtonPressed tabTransactions
updateWalletTab confPath st gst mw
onChooseContactsButtonPressed :: GUIState -> M.MainWindow -> IO ()
onChooseContactsButtonPressed gst M.MainWindow{..} = do
contacts <- getContacts gst
if null contacts
then reportSimpleError mainWindow "Your contact list is empty -- nothing to choose from."
else do
dialog <- G.dialogNew
G.set dialog [ G.windowTitle := "Choose contact"]
void $ G.dialogAddButton dialog "Choose" G.ResponseOk
void $ G.dialogAddButton dialog "Cancel" G.ResponseCancel
upbox <- G.castToBox <$> G.dialogGetContentArea dialog
view <- G.treeViewNew
model <- setupModel view
G.boxPackStart upbox view G.PackGrow 10
G.widgetShowAll upbox
mapM_ (G.listStoreAppend model) contacts
response <- G.dialogRun dialog
when (response == G.ResponseOk) $ do
sel <- G.treeViewGetSelection view
selNum <- G.treeSelectionCountSelectedRows sel
rows <- G.treeSelectionGetSelectedRows sel
if selNum == 0
then reportSimpleError mainWindow "No contacts were selected."
else do
let row = head $ head rows
e = contactAddress $ contacts !! row
G.entrySetText (M.entryPayTo tabTransactions) e
G.widgetDestroy dialog
where
setupModel view = do
model <- G.listStoreNew ([] :: [Contact])
appendColumn view model True "Name" $
\c -> [G.cellText := contactName c]
appendColumn view model True "Address" $
\c -> [G.cellText := contactAddress c]
G.treeViewSetModel view model
return model
appendColumn view model expand title attributesSetter = do
column <- G.treeViewColumnNew
G.treeViewColumnSetTitle column title
G.treeViewColumnSetExpand column expand
renderer <- G.cellRendererTextNew
G.cellLayoutPackStart column renderer False
G.cellLayoutSetAttributes column renderer model attributesSetter
void $ G.treeViewAppendColumn view column
|
input-output-hk/rscoin-haskell
|
src/User/GUI/RSCoin/TransactionsTab.hs
|
gpl-3.0
| 6,675 | 0 | 20 | 1,925 | 1,597 | 789 | 808 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceControl.Services.Check
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Checks an operation with Google Service Control to decide whether the
-- given operation should proceed. It should be called before the operation
-- is executed. If feasible, the client should cache the check results and
-- reuse them for 60 seconds. In case of server errors, the client can rely
-- on the cached results for longer time. NOTE: the \`CheckRequest\` has
-- the size limit of 64KB. This method requires the
-- \`servicemanagement.services.check\` permission on the specified
-- service. For more information, see [Google Cloud
-- IAM](https:\/\/cloud.google.com\/iam).
--
-- /See:/ <https://cloud.google.com/service-control/ Google Service Control API Reference> for @servicecontrol.services.check@.
module Network.Google.Resource.ServiceControl.Services.Check
(
-- * REST Resource
ServicesCheckResource
-- * Creating a Request
, servicesCheck
, ServicesCheck
-- * Request Lenses
, scXgafv
, scUploadProtocol
, scPp
, scAccessToken
, scUploadType
, scPayload
, scBearerToken
, scServiceName
, scCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceControl.Types
-- | A resource alias for @servicecontrol.services.check@ method which the
-- 'ServicesCheck' request conforms to.
type ServicesCheckResource =
"v1" :>
"services" :>
CaptureMode "serviceName" "check" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CheckRequest :>
Post '[JSON] CheckResponse
-- | Checks an operation with Google Service Control to decide whether the
-- given operation should proceed. It should be called before the operation
-- is executed. If feasible, the client should cache the check results and
-- reuse them for 60 seconds. In case of server errors, the client can rely
-- on the cached results for longer time. NOTE: the \`CheckRequest\` has
-- the size limit of 64KB. This method requires the
-- \`servicemanagement.services.check\` permission on the specified
-- service. For more information, see [Google Cloud
-- IAM](https:\/\/cloud.google.com\/iam).
--
-- /See:/ 'servicesCheck' smart constructor.
data ServicesCheck = ServicesCheck'
{ _scXgafv :: !(Maybe Xgafv)
, _scUploadProtocol :: !(Maybe Text)
, _scPp :: !Bool
, _scAccessToken :: !(Maybe Text)
, _scUploadType :: !(Maybe Text)
, _scPayload :: !CheckRequest
, _scBearerToken :: !(Maybe Text)
, _scServiceName :: !Text
, _scCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ServicesCheck' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scXgafv'
--
-- * 'scUploadProtocol'
--
-- * 'scPp'
--
-- * 'scAccessToken'
--
-- * 'scUploadType'
--
-- * 'scPayload'
--
-- * 'scBearerToken'
--
-- * 'scServiceName'
--
-- * 'scCallback'
servicesCheck
:: CheckRequest -- ^ 'scPayload'
-> Text -- ^ 'scServiceName'
-> ServicesCheck
servicesCheck pScPayload_ pScServiceName_ =
ServicesCheck'
{ _scXgafv = Nothing
, _scUploadProtocol = Nothing
, _scPp = True
, _scAccessToken = Nothing
, _scUploadType = Nothing
, _scPayload = pScPayload_
, _scBearerToken = Nothing
, _scServiceName = pScServiceName_
, _scCallback = Nothing
}
-- | V1 error format.
scXgafv :: Lens' ServicesCheck (Maybe Xgafv)
scXgafv = lens _scXgafv (\ s a -> s{_scXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
scUploadProtocol :: Lens' ServicesCheck (Maybe Text)
scUploadProtocol
= lens _scUploadProtocol
(\ s a -> s{_scUploadProtocol = a})
-- | Pretty-print response.
scPp :: Lens' ServicesCheck Bool
scPp = lens _scPp (\ s a -> s{_scPp = a})
-- | OAuth access token.
scAccessToken :: Lens' ServicesCheck (Maybe Text)
scAccessToken
= lens _scAccessToken
(\ s a -> s{_scAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
scUploadType :: Lens' ServicesCheck (Maybe Text)
scUploadType
= lens _scUploadType (\ s a -> s{_scUploadType = a})
-- | Multipart request metadata.
scPayload :: Lens' ServicesCheck CheckRequest
scPayload
= lens _scPayload (\ s a -> s{_scPayload = a})
-- | OAuth bearer token.
scBearerToken :: Lens' ServicesCheck (Maybe Text)
scBearerToken
= lens _scBearerToken
(\ s a -> s{_scBearerToken = a})
-- | The service name as specified in its service configuration. For example,
-- \`\"pubsub.googleapis.com\"\`. See google.api.Service for the definition
-- of a service name.
scServiceName :: Lens' ServicesCheck Text
scServiceName
= lens _scServiceName
(\ s a -> s{_scServiceName = a})
-- | JSONP
scCallback :: Lens' ServicesCheck (Maybe Text)
scCallback
= lens _scCallback (\ s a -> s{_scCallback = a})
instance GoogleRequest ServicesCheck where
type Rs ServicesCheck = CheckResponse
type Scopes ServicesCheck =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/servicecontrol"]
requestClient ServicesCheck'{..}
= go _scServiceName _scXgafv _scUploadProtocol
(Just _scPp)
_scAccessToken
_scUploadType
_scBearerToken
_scCallback
(Just AltJSON)
_scPayload
serviceControlService
where go
= buildClient (Proxy :: Proxy ServicesCheckResource)
mempty
|
rueshyna/gogol
|
gogol-servicecontrol/gen/Network/Google/Resource/ServiceControl/Services/Check.hs
|
mpl-2.0
| 6,766 | 0 | 19 | 1,614 | 955 | 561 | 394 | 133 | 1 |
{- ORMOLU_DISABLE -}
-- Implicit CAD. Copyright (C) 2011, Christopher Olah ([email protected])
-- Copyright (C) 2016, Julia Longtin ([email protected])
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.Export.Render.Interpolate (interpolate) where
import Prelude((*), (>), (<), (/=), (+), (-), (/), (==), (&&), abs)
import Graphics.Implicit.Definitions (ℝ, Fastℕ, ℝ2)
import Linear (V2(V2))
default (Fastℕ, ℝ)
-- Consider a function f(x):
{-
| \ f(x)
| - \
|_______\________ x
|
\
-}
-- The purpose of interpolate is to find the value of x where f(x) crosses zero.
-- This should be accomplished cheaply and accurately.
-- We are given the constraint that x will be between a and b.
-- We are also given the values of f at a and b: aval and bval.
-- Additionaly, we get f (continuous and differentiable almost everywhere),
-- and the resolution of the object (so that we can make decisions about
-- how precise we need to be).
-- While the output will never be used, interpolate will be called
-- in cases where f(x) doesn't cross zero (ie. aval and bval are both
-- positive or negative.
-- Clarification: If f(x) crosses zero, but doesn't necessarily have
-- to do so by intermediate value theorem, it is beyond the scope of this
-- function.
-- If it doesn't cross zero, we don't actually care what answer we give,
-- just that it's cheap.
-- FIXME: accept resolution on multiple axises.
interpolate :: ℝ2 -> ℝ2 -> (ℝ -> ℝ) -> ℝ -> ℝ
interpolate (V2 a aval) (V2 _ bval) _ _ | aval*bval > 0 = a
-- The obvious:
interpolate (V2 a 0) _ _ _ = a
interpolate _ (V2 b 0) _ _ = b
-- It may seem, at first, that our task is trivial.
-- Just use linear interpolation!
-- Unfortunately, there's a nasty failure case
{- /
/
________#________/____
________________/
-}
-- This is really common for us, for example in cubes,
-- where another variable dominates.
-- It may even be the case that, because we are so close
-- to the side, it looks like we are really close to an
-- answer... And we just give it back.
-- So we need to detect this. And get free goodies while we're
-- at it (shrink domain to guess within fromm (a,b) to (a',b'))
-- :)
interpolate (V2 a aval) (V2 b bval) f _ =
-- Make sure aval > bval, then pass to interpolateLin
if aval > bval
then interpolateLin 0 (V2 a aval) (V2 b bval) f
else interpolateLin 0 (V2 b bval) (V2 a aval) f
-- Yay, linear interpolation!
-- Try the answer linear interpolation gives us...
-- (n is to cut us off if recursion goes too deep)
interpolateLin :: Fastℕ -> ℝ2 -> ℝ2 -> (ℝ -> ℝ) -> ℝ
interpolateLin n (V2 a aval) (V2 b bval) obj | aval /= bval=
let
-- Interpolate and evaluate
mid :: ℝ
mid = a + (b-a)*aval/(aval-bval)
midval = obj mid
-- Are we done?
in if midval == 0
then mid
--
else let
(a', a'val, b', b'val, improveRatio) =
if midval > 0
then (mid, midval, b, bval, midval/aval)
else (a, aval, mid, midval, midval/bval)
-- some times linear interpolate doesn't work,
-- because one side is very close to zero and flat
-- we catch it because the interval won't shrink when
-- this is the case. To test this, we look at whether
-- the replaced point evaluates to substantially closer
-- to zero than the previous one.
in if improveRatio < 0.3 && n < 4
-- And we continue on.
then interpolateLin (n+1) (V2 a' a'val) (V2 b' b'val) obj
-- But if not, we switch to binary interpolate, which is
-- immune to this problem
else interpolateBin (n+1) (V2 a' a'val) (V2 b' b'val) obj
-- And a fallback:
interpolateLin _ (V2 a _) _ _ = a
-- Now for binary searching!
interpolateBin :: Fastℕ -> ℝ2 -> ℝ2 -> (ℝ -> ℝ) -> ℝ
-- The termination case:
interpolateBin 5 (V2 a aval) (V2 b bval) _ =
if abs aval < abs bval
then a
else b
-- Otherwise, have fun with mid!
interpolateBin n (V2 a aval) (V2 b bval) f =
let
mid :: ℝ
mid = (a+b)/2
midval = f mid
in if midval > 0
then interpolateBin (n+1) (V2 mid midval) (V2 b bval) f
else interpolateBin (n+1) (V2 a aval) (V2 mid midval) f
|
colah/ImplicitCAD
|
Graphics/Implicit/Export/Render/Interpolate.hs
|
agpl-3.0
| 4,334 | 3 | 15 | 1,100 | 909 | 517 | 392 | 43 | 4 |
module SubprimeFib.A282812Spec (main, spec) where
import Test.Hspec
import SubprimeFib.A282812 (a282812)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A282812" $
it "correctly computes the first 20 elements" $
take 20 (map a282812 [1..]) `shouldBe` expectedValue where
expectedValue = [239,239,239,239,2,239,347,239,239,347,239,239,3,239,347,239,239,239,347,239]
|
peterokagey/haskellOEIS
|
test/SubprimeFib/A282812Spec.hs
|
apache-2.0
| 393 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Camfort.ReprintSpec (spec) where
import Camfort.Functionality
import Camfort.Reprint
import Camfort.Specification.Units.Monad (LiteralsOpt(LitMixed))
import qualified Data.ByteString.Char8 as B
import qualified Language.Fortran.Util.Position as FU
import System.FilePath
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec =
describe "subtext function tests" $ do
it "(unit test) first line of sample text" $
subtext (1, 1) (1, 1) (2, 1) btext `shouldBe`
(B.pack (text !! 0 ++ "\n"), B.pack (unlines . tail $ text))
it "(unit test) second line of sample text" $
subtext (1, 1) (2, 1) (3, 1) btext `shouldBe`
(B.pack (text !! 1 ++ "\n"), B.pack (unlines . tail . tail $ text))
it "(unit test) third line of sample text" $
subtext (1, 1) (4, 1) (5, 1) btext `shouldBe`
(B.pack (text !! 3 ++ "\n"), B.empty)
it "(unit test) fourth line, middle, of sample text" $
subtext (1, 1) (4, 2) (4, 5) btext `shouldBe`
(B.pack "G H", B.pack " I J K L\n")
it "(unit test) relative test (third line)" $
subtext (3, 1) (5, 1) (6, 1) btext `shouldBe`
(B.pack " E F\n", B.pack " G H I J K L\n")
it "(unit test) relative test (third line fragment)" $
subtext (3, 1) (5, 1) (5, 4) btext `shouldBe`
(B.pack " E", B.pack " F\n G H I J K L\n")
it "zero-length span at start yields empty string" $ property $
\s -> subtext (0, 0) (0, 0) (0, 0) s == (B.empty, s)
it "zero-length span yields empty substring" $ property $
\(l,c) -> \s -> (fst $ subtext (l,c) (l,c) (l,c) s) == B.empty
{-
it "takeBounds is the same as old one" $ property $
\p -> takeBoundsOld (FU.initPosition, p) btext
== takeBounds (FU.initPosition, p) btext
it "takeBounds is the same as old one, with different start pos" $ property $
\p -> takeBoundsOld (FU.Position 0 2 2, unwrapPO p) btext
== takeBounds (FU.Position 0 2 2, unwrapPO p) btext
-}
it "takeBounds test 1" $
(fst $ takeBounds (FU.Position 0 2 2, FU.Position 0 5 2) btext)
`shouldBe` (B.pack "A B")
it "takeBounds test 2" $
(fst $ takeBounds (FU.Position 0 2 2, FU.Position 0 1 3) btext)
`shouldBe` (B.pack "A B C D\n")
it "takeBound test 3" $
(fst $ takeBounds (FU.Position 1 1 1, FU.Position 1 5 3) btext2)
`shouldBe` (B.pack $ unlines $ take 3 text2)
context "Integration test with synthesising a spec" $ do
runIO $ unitsSynth ("tests" </> "fixtures" </> "simple.f90") []
LitMixed False Nothing
("tests" </> "fixtures" </> "simple.f90.out") ATDefault
actual <- runIO $ readFile ("tests" </> "fixtures" </> "simple.f90.out")
expected <- runIO $ readFile ("tests" </> "fixtures" </> "simple.expected.f90")
it "Unit synth" $ actual `shouldBe` expected
----
data PlusOne a = PlusOne { _unwrapPO :: a } deriving Show
instance Arbitrary (PlusOne FU.Position) where
arbitrary = do
FU.Position offset col line <- arbitrary
let col' = if line == 1 then col+1 else col
return $ PlusOne $ FU.Position offset col' (line + 1)
instance Arbitrary FU.Position where
arbitrary = do
offset <- arbitrary `suchThat` (>0)
line <- arbitrary `suchThat` (\x -> x >= 1 && x <= (length text))
col <- choose (1, orOne $ length (text !! (line - 1)))
return $ FU.Position offset col line
orOne x | x == 0 = 1
| otherwise = x
-- Arbtirary ByteString
instance Arbitrary B.ByteString where
arbitrary = do
numLines <- choose (0, 3)
return . B.pack . concat $ take numLines text
btext = B.pack . unlines $ text
text = ["A B C D"
,""
," E F"
," G H I J K L"]
btext2 = B.pack . unlines $ text2
text2 = ["A B C D"
,"E F"
,"G H"
,"I J K L"]
{-
-- Given a lower-bound and upper-bound pair of FU.Positions, split the
-- incoming SourceText based on the distanceF between the FU.Position pairs
takeBoundsOld :: (FU.Position, FU.Position) -> SourceText -> (SourceText, SourceText)
takeBoundsOld (l, u) = takeBounds' ((ll, lc), (ul, uc)) B.empty
where (FU.Position _ lc ll) = l
(FU.Position _ uc ul) = u
takeBounds' ((ll, lc), (ul, uc)) tk inp =
if (ll == ul && lc == uc) || (ll > ul) then (B.reverse tk, inp)
else
case B.uncons inp of
Nothing -> (B.reverse tk, inp)
Just ('\n', ys) -> takeBounds' ((ll+1, 1), (ul, uc)) (B.cons '\n' tk) ys
Just (x, xs) -> takeBounds' ((ll, lc+1), (ul, uc)) (B.cons x tk) xs
-}
|
mrd/camfort
|
tests/Camfort/ReprintSpec.hs
|
apache-2.0
| 4,711 | 0 | 16 | 1,314 | 1,319 | 707 | 612 | 79 | 1 |
{-# LANGUAGE TupleSections, NamedFieldPuns, ScopedTypeVariables, RankNTypes,
GADTs #-}
{-| Filtering of jobs for the Ganeti job queue.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.JQScheduler.Filtering
( applyingFilter
, jobFiltering
-- * For testing only
, matchPredicate
, matches
) where
import qualified Data.ByteString as BS
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Constants (opcodeReasonSrcRlib2, opcodeReasonAuthUser)
import Ganeti.Errors
import Ganeti.Lens hiding (chosen)
import Ganeti.JQScheduler.Types
import Ganeti.JQueue (QueuedJob(..))
import Ganeti.JQueue.Lens
import Ganeti.JSON
import Ganeti.Objects (FilterRule(..), FilterAction(..), FilterPredicate(..),
filterRuleOrder)
import Ganeti.OpCodes (OpCode)
import Ganeti.OpCodes.Lens
import Ganeti.Query.Language
import Ganeti.Query.Filter (evaluateFilterM, evaluateFilterJSON, Comparator,
FilterOp(..), toCompFun)
import Ganeti.SlotMap
import Ganeti.Types (JobId(..), ReasonElem)
-- | Accesses a field of the JSON representation of an `OpCode` using a dotted
-- accessor (like @"a.b.c"@).
accessOpCodeField :: OpCode -> String -> ErrorResult J.JSValue
accessOpCodeField opc s = case nestedAccessByKeyDotted s (J.showJSON opc) of
J.Ok x -> Ok x
J.Error e -> Bad . ParameterError $ e
-- | All `OpCode`s of a job.
opCodesOf :: QueuedJob -> [OpCode]
opCodesOf job =
job ^.. qjOpsL . traverse . qoInputL . validOpCodeL . metaOpCodeL
-- | All `ReasonElem`s of a job.
reasonsOf :: QueuedJob -> [ReasonElem]
reasonsOf job = job ^.. qjOpsL . traverse . qoInputL . validOpCodeL
. metaParamsL . opReasonL . traverse
-- | Authenticated RAPI user submitted a job. It's always the last entry
userOf :: QueuedJob -> String -> String
userOf job default_user =
foldl extractRapiUser default_user $ reasonsOf job
where extractRapiUser current_user (source, reason, _) =
if source == opcodeReasonSrcRlib2
then fromMaybe current_user (stripPrefix opcodeReasonAuthUser reason)
else current_user
-- | Like `evaluateFilterM`, but allowing only `Comparator` operations;
-- all other filter language operations are evaluated as `False`.
--
-- The passed function is supposed to return `Just True/False` depending
-- on whether the comparing operation succeeds or not, and `Nothing` if
-- the comparison itself is invalid (e.g. comparing to a field that doesn't
-- exist).
evaluateFilterComparator :: (Ord field)
=> Filter field
-> (Comparator -> field -> FilterValue -> Maybe Bool)
-> Bool
evaluateFilterComparator fil opFun =
fromMaybe False $
evaluateFilterM
(\filterOp -> case filterOp of
Comp cmp -> opFun (toCompFun cmp)
_ -> \_ _ -> Nothing -- non-comparisons (become False)
)
fil
-- | Whether a `FilterPredicate` is true for a job.
matchPredicate :: QueuedJob
-> JobId -- ^ the watermark to compare against
-- if the predicate references it
-> FilterPredicate
-> Bool
matchPredicate job watermark predicate = case predicate of
FPJobId fil ->
let jid = qjId job
jidInt = fromIntegral (fromJobId jid)
in evaluateFilterComparator fil $ \comp field val -> case field of
"id" -> case val of
NumericValue i -> Just $ jidInt `comp` i
QuotedString "watermark" -> Just $ jid `comp` watermark
QuotedString _ -> Nothing
_ -> Nothing
FPOpCode fil ->
let opMatches opc = genericResult (const False) id $ do
jsonFilter <- traverse (accessOpCodeField opc) fil
evaluateFilterJSON jsonFilter
in any opMatches (opCodesOf job)
FPReason fil ->
let reasonMatches (source, reason, timestamp) =
evaluateFilterComparator fil $ \comp field val -> case field of
"source" -> Just $ QuotedString source `comp` val
"reason" -> Just $ QuotedString reason `comp` val
"timestamp" -> Just $ NumericValue timestamp `comp` val
_ -> Nothing
in any reasonMatches (reasonsOf job)
FPUser fil -> evaluateFilterComparator fil $ \comp field val -> case field of
"user" -> Just $ (QuotedString $ userOf job "") `comp` val
_ -> Nothing
-- | Whether all predicates of the filter rule are true for the job.
matches :: QueuedJob -> FilterRule -> Bool
matches job FilterRule{ frPredicates, frWatermark } =
all (matchPredicate job frWatermark) frPredicates
-- | Filters need to be processed in the order as given by the spec;
-- see `filterRuleOrder`.
orderFilters :: Set FilterRule -> [FilterRule]
orderFilters = sortBy filterRuleOrder . Set.toList
-- | Finds the first filter whose predicates all match the job and whose
-- action is not `Continue`. This is the /applying/ filter.
applyingFilter :: Set FilterRule -> QueuedJob -> Maybe FilterRule
applyingFilter filters job =
-- Skip over all `Continue`s, to the first filter that matches.
find ((Continue /=) . frAction)
. filter (matches job)
. orderFilters
$ filters
-- | SlotMap for filter rule rate limiting, having `FilterRule` UUIDs as keys.
type RateLimitSlotMap = SlotMap BS.ByteString
-- We would prefer FilterRule here but that has no Ord instance (yet).
-- | State to be accumulated while traversing filters.
data FilterChainState = FilterChainState
{ rateLimitSlotMap :: RateLimitSlotMap -- ^ counts
} deriving (Eq, Ord, Show)
-- | Update a `FilterChainState` if the given `CountMap` fits into its
-- filtering SlotsMap.
tryFitSlots :: FilterChainState
-> CountMap BS.ByteString
-> Maybe FilterChainState
tryFitSlots st@FilterChainState{ rateLimitSlotMap = slotMap } countMap =
if slotMap `hasSlotsFor` countMap
then Just st{ rateLimitSlotMap = slotMap `occupySlots` countMap }
else Nothing
-- | For a given job queue and set of filters, calculates how many rate
-- limiting filter slots are available and how many are taken by running jobs
-- in the queue.
queueRateLimitSlotMap :: Queue -> Set FilterRule -> RateLimitSlotMap
queueRateLimitSlotMap queue filters =
let -- Rate limiting slots for each filter, with 0 occupied count each
-- (limits only).
emptyFilterSlots =
Map.fromList
[ (uuid, Slot 0 n)
| FilterRule{ frUuid = uuid
, frAction = RateLimit n } <- Set.toList filters ]
-- How many rate limiting slots are taken by the jobs currently running
-- in the queue jobs (counts only).
-- A job takes a slot of a RateLimit filter if that filter is the first
-- one that matches for the job.
runningJobSlots = Map.fromListWith (+)
[ (frUuid, 1) | Just FilterRule{ frUuid, frAction = RateLimit _ } <-
map (applyingFilter filters . jJob)
$ qRunning queue ++ qManipulated queue ]
in -- Fill limits from above with counts from above.
emptyFilterSlots `occupySlots` runningJobSlots
-- | Implements job filtering as specified in `doc/design-optables.rst`.
--
-- Importantly, the filter that *applies* is the first one of which all
-- predicates match; this is implemented in `applyingFilter`.
--
-- The initial `FilterChainState` is currently not cached across
-- `selectJobsToRun` invocations because the number of running jobs is
-- typically small (< 100).
jobFiltering :: Queue -> Set FilterRule -> [JobWithStat] -> [JobWithStat]
jobFiltering queue filters =
let
processFilters :: FilterChainState
-> JobWithStat
-> (FilterChainState, Maybe JobWithStat)
processFilters state job =
case applyingFilter filters (jJob job) of
Nothing -> (state, Just job) -- no filter applies, accept job
Just FilterRule{ frUuid, frAction } -> case frAction of
Accept -> (state, Just job)
Continue -> (state, Just job)
Pause -> (state, Nothing)
Reject -> (state, Nothing)
RateLimit _ -> -- A matching job takes 1 slot.
let jobSlots = Map.fromList [(frUuid, 1)]
in case tryFitSlots state jobSlots of
Nothing -> (state, Nothing)
Just state' -> (state', Just job)
in catMaybes . snd . mapAccumL processFilters FilterChainState
{ rateLimitSlotMap = queueRateLimitSlotMap queue filters
}
|
leshchevds/ganeti
|
src/Ganeti/JQScheduler/Filtering.hs
|
bsd-2-clause
| 10,127 | 0 | 21 | 2,514 | 1,763 | 958 | 805 | -1 | -1 |
module Main where
import Crypto.Classes
import Crypto.Hash.Ed2k
import qualified Data.ByteString as B
import Data.Word (Word8)
import System.Exit
testHash :: [Word8]
testHash = [83,211,145,194,240,9,199,230,55,34,205,252,35,114,246,228]
main :: IO ()
main = do
testData <- B.readFile "testData.bin"
let testHash' = B.unpack $ encode ((hash' testData) :: Ed2k)
if testHash' == testHash
then exitSuccess
else do
print testHash'
exitFailure
|
nullref/haskell-hash-ed2k
|
Benchmark/Strict.hs
|
bsd-2-clause
| 467 | 0 | 14 | 84 | 180 | 104 | 76 | 17 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QUndoView.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:14
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QUndoView (
QqUndoView(..)
,cleanIcon
,emptyLabel
,setCleanIcon
,setEmptyLabel
,setStack
,stack
,qUndoView_delete
,qUndoView_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QItemSelectionModel
import Qtc.Enums.Gui.QAbstractItemView
import Qtc.Enums.Gui.QPaintDevice
import Qtc.Enums.Core.Qt
import Qtc.Enums.Gui.QAbstractItemDelegate
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QUndoView ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoView_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QUndoView_userMethod" qtc_QUndoView_userMethod :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QuserMethod (QUndoViewSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoView_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QUndoView ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QUndoView_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QUndoView_userMethodVariant" qtc_QUndoView_userMethodVariant :: Ptr (TQUndoView a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QUndoViewSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QUndoView_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqUndoView x1 where
qUndoView :: x1 -> IO (QUndoView ())
instance QqUndoView (()) where
qUndoView ()
= withQUndoViewResult $
qtc_QUndoView
foreign import ccall "qtc_QUndoView" qtc_QUndoView :: IO (Ptr (TQUndoView ()))
instance QqUndoView ((QWidget t1)) where
qUndoView (x1)
= withQUndoViewResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView1 cobj_x1
foreign import ccall "qtc_QUndoView1" qtc_QUndoView1 :: Ptr (TQWidget t1) -> IO (Ptr (TQUndoView ()))
instance QqUndoView ((QUndoStack t1)) where
qUndoView (x1)
= withQUndoViewResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView2 cobj_x1
foreign import ccall "qtc_QUndoView2" qtc_QUndoView2 :: Ptr (TQUndoStack t1) -> IO (Ptr (TQUndoView ()))
instance QqUndoView ((QUndoGroup t1)) where
qUndoView (x1)
= withQUndoViewResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView3 cobj_x1
foreign import ccall "qtc_QUndoView3" qtc_QUndoView3 :: Ptr (TQUndoGroup t1) -> IO (Ptr (TQUndoView ()))
instance QqUndoView ((QUndoGroup t1, QWidget t2)) where
qUndoView (x1, x2)
= withQUndoViewResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView4 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView4" qtc_QUndoView4 :: Ptr (TQUndoGroup t1) -> Ptr (TQWidget t2) -> IO (Ptr (TQUndoView ()))
instance QqUndoView ((QUndoStack t1, QWidget t2)) where
qUndoView (x1, x2)
= withQUndoViewResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView5 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView5" qtc_QUndoView5 :: Ptr (TQUndoStack t1) -> Ptr (TQWidget t2) -> IO (Ptr (TQUndoView ()))
cleanIcon :: QUndoView a -> (()) -> IO (QIcon ())
cleanIcon x0 ()
= withQIconResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_cleanIcon cobj_x0
foreign import ccall "qtc_QUndoView_cleanIcon" qtc_QUndoView_cleanIcon :: Ptr (TQUndoView a) -> IO (Ptr (TQIcon ()))
emptyLabel :: QUndoView a -> (()) -> IO (String)
emptyLabel x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_emptyLabel cobj_x0
foreign import ccall "qtc_QUndoView_emptyLabel" qtc_QUndoView_emptyLabel :: Ptr (TQUndoView a) -> IO (Ptr (TQString ()))
instance Qgroup (QUndoView a) (()) (IO (QUndoGroup ())) where
group x0 ()
= withQUndoGroupResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_group cobj_x0
foreign import ccall "qtc_QUndoView_group" qtc_QUndoView_group :: Ptr (TQUndoView a) -> IO (Ptr (TQUndoGroup ()))
setCleanIcon :: QUndoView a -> ((QIcon t1)) -> IO ()
setCleanIcon x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setCleanIcon cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setCleanIcon" qtc_QUndoView_setCleanIcon :: Ptr (TQUndoView a) -> Ptr (TQIcon t1) -> IO ()
setEmptyLabel :: QUndoView a -> ((String)) -> IO ()
setEmptyLabel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_setEmptyLabel cobj_x0 cstr_x1
foreign import ccall "qtc_QUndoView_setEmptyLabel" qtc_QUndoView_setEmptyLabel :: Ptr (TQUndoView a) -> CWString -> IO ()
instance QsetGroup (QUndoView a) ((QUndoGroup t1)) where
setGroup x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setGroup cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setGroup" qtc_QUndoView_setGroup :: Ptr (TQUndoView a) -> Ptr (TQUndoGroup t1) -> IO ()
setStack :: QUndoView a -> ((QUndoStack t1)) -> IO ()
setStack x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setStack cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setStack" qtc_QUndoView_setStack :: Ptr (TQUndoView a) -> Ptr (TQUndoStack t1) -> IO ()
stack :: QUndoView a -> (()) -> IO (QUndoStack ())
stack x0 ()
= withQUndoStackResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_stack cobj_x0
foreign import ccall "qtc_QUndoView_stack" qtc_QUndoView_stack :: Ptr (TQUndoView a) -> IO (Ptr (TQUndoStack ()))
qUndoView_delete :: QUndoView a -> IO ()
qUndoView_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_delete cobj_x0
foreign import ccall "qtc_QUndoView_delete" qtc_QUndoView_delete :: Ptr (TQUndoView a) -> IO ()
qUndoView_deleteLater :: QUndoView a -> IO ()
qUndoView_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_deleteLater cobj_x0
foreign import ccall "qtc_QUndoView_deleteLater" qtc_QUndoView_deleteLater :: Ptr (TQUndoView a) -> IO ()
instance QqcontentsSize (QUndoView ()) (()) where
qcontentsSize x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_contentsSize cobj_x0
foreign import ccall "qtc_QUndoView_contentsSize" qtc_QUndoView_contentsSize :: Ptr (TQUndoView a) -> IO (Ptr (TQSize ()))
instance QqcontentsSize (QUndoViewSc a) (()) where
qcontentsSize x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_contentsSize cobj_x0
instance QcontentsSize (QUndoView ()) (()) where
contentsSize x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_contentsSize_qth cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QUndoView_contentsSize_qth" qtc_QUndoView_contentsSize_qth :: Ptr (TQUndoView a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QcontentsSize (QUndoViewSc a) (()) where
contentsSize x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_contentsSize_qth cobj_x0 csize_ret_w csize_ret_h
instance QcurrentChanged (QUndoView ()) ((QModelIndex t1, QModelIndex t2)) where
currentChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_currentChanged cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_currentChanged" qtc_QUndoView_currentChanged :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> Ptr (TQModelIndex t2) -> IO ()
instance QcurrentChanged (QUndoViewSc a) ((QModelIndex t1, QModelIndex t2)) where
currentChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_currentChanged cobj_x0 cobj_x1 cobj_x2
instance QdataChanged (QUndoView ()) ((QModelIndex t1, QModelIndex t2)) where
dataChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_dataChanged cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_dataChanged" qtc_QUndoView_dataChanged :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> Ptr (TQModelIndex t2) -> IO ()
instance QdataChanged (QUndoViewSc a) ((QModelIndex t1, QModelIndex t2)) where
dataChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_dataChanged cobj_x0 cobj_x1 cobj_x2
instance QdoItemsLayout (QUndoView ()) (()) where
doItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_doItemsLayout_h cobj_x0
foreign import ccall "qtc_QUndoView_doItemsLayout_h" qtc_QUndoView_doItemsLayout_h :: Ptr (TQUndoView a) -> IO ()
instance QdoItemsLayout (QUndoViewSc a) (()) where
doItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_doItemsLayout_h cobj_x0
instance QdragLeaveEvent (QUndoView ()) ((QDragLeaveEvent t1)) where
dragLeaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragLeaveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_dragLeaveEvent_h" qtc_QUndoView_dragLeaveEvent_h :: Ptr (TQUndoView a) -> Ptr (TQDragLeaveEvent t1) -> IO ()
instance QdragLeaveEvent (QUndoViewSc a) ((QDragLeaveEvent t1)) where
dragLeaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragLeaveEvent_h cobj_x0 cobj_x1
instance QdragMoveEvent (QUndoView ()) ((QDragMoveEvent t1)) where
dragMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragMoveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_dragMoveEvent_h" qtc_QUndoView_dragMoveEvent_h :: Ptr (TQUndoView a) -> Ptr (TQDragMoveEvent t1) -> IO ()
instance QdragMoveEvent (QUndoViewSc a) ((QDragMoveEvent t1)) where
dragMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragMoveEvent_h cobj_x0 cobj_x1
instance QdropEvent (QUndoView ()) ((QDropEvent t1)) where
dropEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dropEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_dropEvent_h" qtc_QUndoView_dropEvent_h :: Ptr (TQUndoView a) -> Ptr (TQDropEvent t1) -> IO ()
instance QdropEvent (QUndoViewSc a) ((QDropEvent t1)) where
dropEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dropEvent_h cobj_x0 cobj_x1
instance Qevent (QUndoView ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_event_h" qtc_QUndoView_event_h :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QUndoViewSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_event_h cobj_x0 cobj_x1
instance QhorizontalOffset (QUndoView ()) (()) where
horizontalOffset x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalOffset cobj_x0
foreign import ccall "qtc_QUndoView_horizontalOffset" qtc_QUndoView_horizontalOffset :: Ptr (TQUndoView a) -> IO CInt
instance QhorizontalOffset (QUndoViewSc a) (()) where
horizontalOffset x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalOffset cobj_x0
instance QindexAt (QUndoView ()) ((Point)) where
indexAt x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QUndoView_indexAt_qth_h cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QUndoView_indexAt_qth_h" qtc_QUndoView_indexAt_qth_h :: Ptr (TQUndoView a) -> CInt -> CInt -> IO (Ptr (TQModelIndex ()))
instance QindexAt (QUndoViewSc a) ((Point)) where
indexAt x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QUndoView_indexAt_qth_h cobj_x0 cpoint_x1_x cpoint_x1_y
instance QqindexAt (QUndoView ()) ((QPoint t1)) where
qindexAt x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_indexAt_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_indexAt_h" qtc_QUndoView_indexAt_h :: Ptr (TQUndoView a) -> Ptr (TQPoint t1) -> IO (Ptr (TQModelIndex ()))
instance QqindexAt (QUndoViewSc a) ((QPoint t1)) where
qindexAt x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_indexAt_h cobj_x0 cobj_x1
instance QinternalDrag (QUndoView ()) ((DropActions)) where
internalDrag x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_internalDrag cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QUndoView_internalDrag" qtc_QUndoView_internalDrag :: Ptr (TQUndoView a) -> CLong -> IO ()
instance QinternalDrag (QUndoViewSc a) ((DropActions)) where
internalDrag x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_internalDrag cobj_x0 (toCLong $ qFlags_toInt x1)
instance QinternalDrop (QUndoView ()) ((QDropEvent t1)) where
internalDrop x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_internalDrop cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_internalDrop" qtc_QUndoView_internalDrop :: Ptr (TQUndoView a) -> Ptr (TQDropEvent t1) -> IO ()
instance QinternalDrop (QUndoViewSc a) ((QDropEvent t1)) where
internalDrop x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_internalDrop cobj_x0 cobj_x1
instance QisIndexHidden (QUndoView ()) ((QModelIndex t1)) where
isIndexHidden x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_isIndexHidden cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_isIndexHidden" qtc_QUndoView_isIndexHidden :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> IO CBool
instance QisIndexHidden (QUndoViewSc a) ((QModelIndex t1)) where
isIndexHidden x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_isIndexHidden cobj_x0 cobj_x1
instance QmouseMoveEvent (QUndoView ()) ((QMouseEvent t1)) where
mouseMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseMoveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_mouseMoveEvent_h" qtc_QUndoView_mouseMoveEvent_h :: Ptr (TQUndoView a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseMoveEvent (QUndoViewSc a) ((QMouseEvent t1)) where
mouseMoveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseMoveEvent_h cobj_x0 cobj_x1
instance QmouseReleaseEvent (QUndoView ()) ((QMouseEvent t1)) where
mouseReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseReleaseEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_mouseReleaseEvent_h" qtc_QUndoView_mouseReleaseEvent_h :: Ptr (TQUndoView a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseReleaseEvent (QUndoViewSc a) ((QMouseEvent t1)) where
mouseReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseReleaseEvent_h cobj_x0 cobj_x1
instance QmoveCursor (QUndoView ()) ((CursorAction, KeyboardModifiers)) (IO (QModelIndex ())) where
moveCursor x0 (x1, x2)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_moveCursor cobj_x0 (toCLong $ qEnum_toInt x1) (toCLong $ qFlags_toInt x2)
foreign import ccall "qtc_QUndoView_moveCursor" qtc_QUndoView_moveCursor :: Ptr (TQUndoView a) -> CLong -> CLong -> IO (Ptr (TQModelIndex ()))
instance QmoveCursor (QUndoViewSc a) ((CursorAction, KeyboardModifiers)) (IO (QModelIndex ())) where
moveCursor x0 (x1, x2)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_moveCursor cobj_x0 (toCLong $ qEnum_toInt x1) (toCLong $ qFlags_toInt x2)
instance QpaintEvent (QUndoView ()) ((QPaintEvent t1)) where
paintEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_paintEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_paintEvent_h" qtc_QUndoView_paintEvent_h :: Ptr (TQUndoView a) -> Ptr (TQPaintEvent t1) -> IO ()
instance QpaintEvent (QUndoViewSc a) ((QPaintEvent t1)) where
paintEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_paintEvent_h cobj_x0 cobj_x1
instance QqrectForIndex (QUndoView ()) ((QModelIndex t1)) where
qrectForIndex x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rectForIndex cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_rectForIndex" qtc_QUndoView_rectForIndex :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQRect ()))
instance QqrectForIndex (QUndoViewSc a) ((QModelIndex t1)) where
qrectForIndex x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rectForIndex cobj_x0 cobj_x1
instance QrectForIndex (QUndoView ()) ((QModelIndex t1)) where
rectForIndex x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rectForIndex_qth cobj_x0 cobj_x1 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QUndoView_rectForIndex_qth" qtc_QUndoView_rectForIndex_qth :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QrectForIndex (QUndoViewSc a) ((QModelIndex t1)) where
rectForIndex x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rectForIndex_qth cobj_x0 cobj_x1 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
instance Qreset (QUndoView ()) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_reset_h cobj_x0
foreign import ccall "qtc_QUndoView_reset_h" qtc_QUndoView_reset_h :: Ptr (TQUndoView a) -> IO ()
instance Qreset (QUndoViewSc a) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_reset_h cobj_x0
instance QresizeContents (QUndoView ()) ((Int, Int)) where
resizeContents x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resizeContents cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QUndoView_resizeContents" qtc_QUndoView_resizeContents :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance QresizeContents (QUndoViewSc a) ((Int, Int)) where
resizeContents x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resizeContents cobj_x0 (toCInt x1) (toCInt x2)
instance QresizeEvent (QUndoView ()) ((QResizeEvent t1)) where
resizeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_resizeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_resizeEvent_h" qtc_QUndoView_resizeEvent_h :: Ptr (TQUndoView a) -> Ptr (TQResizeEvent t1) -> IO ()
instance QresizeEvent (QUndoViewSc a) ((QResizeEvent t1)) where
resizeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_resizeEvent_h cobj_x0 cobj_x1
instance QrowsAboutToBeRemoved (QUndoView ()) ((QModelIndex t1, Int, Int)) where
rowsAboutToBeRemoved x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rowsAboutToBeRemoved cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QUndoView_rowsAboutToBeRemoved" qtc_QUndoView_rowsAboutToBeRemoved :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QrowsAboutToBeRemoved (QUndoViewSc a) ((QModelIndex t1, Int, Int)) where
rowsAboutToBeRemoved x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rowsAboutToBeRemoved cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QrowsInserted (QUndoView ()) ((QModelIndex t1, Int, Int)) where
rowsInserted x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rowsInserted cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QUndoView_rowsInserted" qtc_QUndoView_rowsInserted :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QrowsInserted (QUndoViewSc a) ((QModelIndex t1, Int, Int)) where
rowsInserted x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_rowsInserted cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QscrollContentsBy (QUndoView ()) ((Int, Int)) where
scrollContentsBy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scrollContentsBy_h cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QUndoView_scrollContentsBy_h" qtc_QUndoView_scrollContentsBy_h :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance QscrollContentsBy (QUndoViewSc a) ((Int, Int)) where
scrollContentsBy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scrollContentsBy_h cobj_x0 (toCInt x1) (toCInt x2)
instance QscrollTo (QUndoView ()) ((QModelIndex t1, ScrollHint)) where
scrollTo x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_scrollTo_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QUndoView_scrollTo_h" qtc_QUndoView_scrollTo_h :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> CLong -> IO ()
instance QscrollTo (QUndoViewSc a) ((QModelIndex t1, ScrollHint)) where
scrollTo x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_scrollTo_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
instance QselectedIndexes (QUndoView ()) (()) where
selectedIndexes x0 ()
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_selectedIndexes cobj_x0 arr
foreign import ccall "qtc_QUndoView_selectedIndexes" qtc_QUndoView_selectedIndexes :: Ptr (TQUndoView a) -> Ptr (Ptr (TQModelIndex ())) -> IO CInt
instance QselectedIndexes (QUndoViewSc a) (()) where
selectedIndexes x0 ()
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_selectedIndexes cobj_x0 arr
instance QselectionChanged (QUndoView ()) ((QItemSelection t1, QItemSelection t2)) where
selectionChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_selectionChanged cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_selectionChanged" qtc_QUndoView_selectionChanged :: Ptr (TQUndoView a) -> Ptr (TQItemSelection t1) -> Ptr (TQItemSelection t2) -> IO ()
instance QselectionChanged (QUndoViewSc a) ((QItemSelection t1, QItemSelection t2)) where
selectionChanged x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_selectionChanged cobj_x0 cobj_x1 cobj_x2
instance QsetPositionForIndex (QUndoView ()) ((Point, QModelIndex t2)) where
setPositionForIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_setPositionForIndex_qth cobj_x0 cpoint_x1_x cpoint_x1_y cobj_x2
foreign import ccall "qtc_QUndoView_setPositionForIndex_qth" qtc_QUndoView_setPositionForIndex_qth :: Ptr (TQUndoView a) -> CInt -> CInt -> Ptr (TQModelIndex t2) -> IO ()
instance QsetPositionForIndex (QUndoViewSc a) ((Point, QModelIndex t2)) where
setPositionForIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_setPositionForIndex_qth cobj_x0 cpoint_x1_x cpoint_x1_y cobj_x2
instance QqsetPositionForIndex (QUndoView ()) ((QPoint t1, QModelIndex t2)) where
qsetPositionForIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_setPositionForIndex cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_setPositionForIndex" qtc_QUndoView_setPositionForIndex :: Ptr (TQUndoView a) -> Ptr (TQPoint t1) -> Ptr (TQModelIndex t2) -> IO ()
instance QqsetPositionForIndex (QUndoViewSc a) ((QPoint t1, QModelIndex t2)) where
qsetPositionForIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_setPositionForIndex cobj_x0 cobj_x1 cobj_x2
instance QsetRootIndex (QUndoView ()) ((QModelIndex t1)) where
setRootIndex x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setRootIndex_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setRootIndex_h" qtc_QUndoView_setRootIndex_h :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> IO ()
instance QsetRootIndex (QUndoViewSc a) ((QModelIndex t1)) where
setRootIndex x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setRootIndex_h cobj_x0 cobj_x1
instance QqsetSelection (QUndoView ()) ((QRect t1, SelectionFlags)) where
qsetSelection x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setSelection cobj_x0 cobj_x1 (toCLong $ qFlags_toInt x2)
foreign import ccall "qtc_QUndoView_setSelection" qtc_QUndoView_setSelection :: Ptr (TQUndoView a) -> Ptr (TQRect t1) -> CLong -> IO ()
instance QqsetSelection (QUndoViewSc a) ((QRect t1, SelectionFlags)) where
qsetSelection x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setSelection cobj_x0 cobj_x1 (toCLong $ qFlags_toInt x2)
instance QsetSelection (QUndoView ()) ((Rect, SelectionFlags)) where
setSelection x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QUndoView_setSelection_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h (toCLong $ qFlags_toInt x2)
foreign import ccall "qtc_QUndoView_setSelection_qth" qtc_QUndoView_setSelection_qth :: Ptr (TQUndoView a) -> CInt -> CInt -> CInt -> CInt -> CLong -> IO ()
instance QsetSelection (QUndoViewSc a) ((Rect, SelectionFlags)) where
setSelection x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QUndoView_setSelection_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h (toCLong $ qFlags_toInt x2)
instance QsetSpacing (QUndoView ()) ((Int)) where
setSpacing x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setSpacing cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_setSpacing" qtc_QUndoView_setSpacing :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QsetSpacing (QUndoViewSc a) ((Int)) where
setSpacing x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setSpacing cobj_x0 (toCInt x1)
instance Qspacing (QUndoView ()) (()) where
spacing x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_spacing cobj_x0
foreign import ccall "qtc_QUndoView_spacing" qtc_QUndoView_spacing :: Ptr (TQUndoView a) -> IO CInt
instance Qspacing (QUndoViewSc a) (()) where
spacing x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_spacing cobj_x0
instance QstartDrag (QUndoView ()) ((DropActions)) where
startDrag x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_startDrag cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QUndoView_startDrag" qtc_QUndoView_startDrag :: Ptr (TQUndoView a) -> CLong -> IO ()
instance QstartDrag (QUndoViewSc a) ((DropActions)) where
startDrag x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_startDrag cobj_x0 (toCLong $ qFlags_toInt x1)
instance QtimerEvent (QUndoView ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_timerEvent" qtc_QUndoView_timerEvent :: Ptr (TQUndoView a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QUndoViewSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_timerEvent cobj_x0 cobj_x1
instance QupdateGeometries (QUndoView ()) (()) where
updateGeometries x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateGeometries cobj_x0
foreign import ccall "qtc_QUndoView_updateGeometries" qtc_QUndoView_updateGeometries :: Ptr (TQUndoView a) -> IO ()
instance QupdateGeometries (QUndoViewSc a) (()) where
updateGeometries x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateGeometries cobj_x0
instance QverticalOffset (QUndoView ()) (()) where
verticalOffset x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalOffset cobj_x0
foreign import ccall "qtc_QUndoView_verticalOffset" qtc_QUndoView_verticalOffset :: Ptr (TQUndoView a) -> IO CInt
instance QverticalOffset (QUndoViewSc a) (()) where
verticalOffset x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalOffset cobj_x0
instance QviewOptions (QUndoView ()) (()) where
viewOptions x0 ()
= withQStyleOptionViewItemResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_viewOptions cobj_x0
foreign import ccall "qtc_QUndoView_viewOptions" qtc_QUndoView_viewOptions :: Ptr (TQUndoView a) -> IO (Ptr (TQStyleOptionViewItem ()))
instance QviewOptions (QUndoViewSc a) (()) where
viewOptions x0 ()
= withQStyleOptionViewItemResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_viewOptions cobj_x0
instance QqvisualRect (QUndoView ()) ((QModelIndex t1)) where
qvisualRect x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRect_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_visualRect_h" qtc_QUndoView_visualRect_h :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQRect ()))
instance QqvisualRect (QUndoViewSc a) ((QModelIndex t1)) where
qvisualRect x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRect_h cobj_x0 cobj_x1
instance QvisualRect (QUndoView ()) ((QModelIndex t1)) where
visualRect x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRect_qth_h cobj_x0 cobj_x1 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QUndoView_visualRect_qth_h" qtc_QUndoView_visualRect_qth_h :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QvisualRect (QUndoViewSc a) ((QModelIndex t1)) where
visualRect x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRect_qth_h cobj_x0 cobj_x1 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
instance QvisualRegionForSelection (QUndoView ()) ((QItemSelection t1)) where
visualRegionForSelection x0 (x1)
= withQRegionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRegionForSelection cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_visualRegionForSelection" qtc_QUndoView_visualRegionForSelection :: Ptr (TQUndoView a) -> Ptr (TQItemSelection t1) -> IO (Ptr (TQRegion ()))
instance QvisualRegionForSelection (QUndoViewSc a) ((QItemSelection t1)) where
visualRegionForSelection x0 (x1)
= withQRegionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_visualRegionForSelection cobj_x0 cobj_x1
instance QcloseEditor (QUndoView ()) ((QWidget t1, EndEditHint)) where
closeEditor x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_closeEditor cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QUndoView_closeEditor" qtc_QUndoView_closeEditor :: Ptr (TQUndoView a) -> Ptr (TQWidget t1) -> CLong -> IO ()
instance QcloseEditor (QUndoViewSc a) ((QWidget t1, EndEditHint)) where
closeEditor x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_closeEditor cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
instance QcommitData (QUndoView ()) ((QWidget t1)) where
commitData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_commitData cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_commitData" qtc_QUndoView_commitData :: Ptr (TQUndoView a) -> Ptr (TQWidget t1) -> IO ()
instance QcommitData (QUndoViewSc a) ((QWidget t1)) where
commitData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_commitData cobj_x0 cobj_x1
instance QdirtyRegionOffset (QUndoView ()) (()) where
dirtyRegionOffset x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dirtyRegionOffset_qth cobj_x0 cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QUndoView_dirtyRegionOffset_qth" qtc_QUndoView_dirtyRegionOffset_qth :: Ptr (TQUndoView a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QdirtyRegionOffset (QUndoViewSc a) (()) where
dirtyRegionOffset x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dirtyRegionOffset_qth cobj_x0 cpoint_ret_x cpoint_ret_y
instance QqdirtyRegionOffset (QUndoView ()) (()) where
qdirtyRegionOffset x0 ()
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dirtyRegionOffset cobj_x0
foreign import ccall "qtc_QUndoView_dirtyRegionOffset" qtc_QUndoView_dirtyRegionOffset :: Ptr (TQUndoView a) -> IO (Ptr (TQPoint ()))
instance QqdirtyRegionOffset (QUndoViewSc a) (()) where
qdirtyRegionOffset x0 ()
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dirtyRegionOffset cobj_x0
instance QdoAutoScroll (QUndoView ()) (()) where
doAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_doAutoScroll cobj_x0
foreign import ccall "qtc_QUndoView_doAutoScroll" qtc_QUndoView_doAutoScroll :: Ptr (TQUndoView a) -> IO ()
instance QdoAutoScroll (QUndoViewSc a) (()) where
doAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_doAutoScroll cobj_x0
instance QdragEnterEvent (QUndoView ()) ((QDragEnterEvent t1)) where
dragEnterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragEnterEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_dragEnterEvent_h" qtc_QUndoView_dragEnterEvent_h :: Ptr (TQUndoView a) -> Ptr (TQDragEnterEvent t1) -> IO ()
instance QdragEnterEvent (QUndoViewSc a) ((QDragEnterEvent t1)) where
dragEnterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_dragEnterEvent_h cobj_x0 cobj_x1
instance QdropIndicatorPosition (QUndoView ()) (()) where
dropIndicatorPosition x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dropIndicatorPosition cobj_x0
foreign import ccall "qtc_QUndoView_dropIndicatorPosition" qtc_QUndoView_dropIndicatorPosition :: Ptr (TQUndoView a) -> IO CLong
instance QdropIndicatorPosition (QUndoViewSc a) (()) where
dropIndicatorPosition x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_dropIndicatorPosition cobj_x0
instance Qedit (QUndoView ()) ((QModelIndex t1, EditTrigger, QEvent t3)) (IO (Bool)) where
edit x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QUndoView_edit cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) cobj_x3
foreign import ccall "qtc_QUndoView_edit" qtc_QUndoView_edit :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> CLong -> Ptr (TQEvent t3) -> IO CBool
instance Qedit (QUndoViewSc a) ((QModelIndex t1, EditTrigger, QEvent t3)) (IO (Bool)) where
edit x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QUndoView_edit cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) cobj_x3
instance QeditorDestroyed (QUndoView ()) ((QObject t1)) where
editorDestroyed x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_editorDestroyed cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_editorDestroyed" qtc_QUndoView_editorDestroyed :: Ptr (TQUndoView a) -> Ptr (TQObject t1) -> IO ()
instance QeditorDestroyed (QUndoViewSc a) ((QObject t1)) where
editorDestroyed x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_editorDestroyed cobj_x0 cobj_x1
instance QexecuteDelayedItemsLayout (QUndoView ()) (()) where
executeDelayedItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_executeDelayedItemsLayout cobj_x0
foreign import ccall "qtc_QUndoView_executeDelayedItemsLayout" qtc_QUndoView_executeDelayedItemsLayout :: Ptr (TQUndoView a) -> IO ()
instance QexecuteDelayedItemsLayout (QUndoViewSc a) (()) where
executeDelayedItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_executeDelayedItemsLayout cobj_x0
instance QfocusInEvent (QUndoView ()) ((QFocusEvent t1)) where
focusInEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_focusInEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_focusInEvent_h" qtc_QUndoView_focusInEvent_h :: Ptr (TQUndoView a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusInEvent (QUndoViewSc a) ((QFocusEvent t1)) where
focusInEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_focusInEvent_h cobj_x0 cobj_x1
instance QfocusNextPrevChild (QUndoView ()) ((Bool)) where
focusNextPrevChild x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusNextPrevChild cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_focusNextPrevChild" qtc_QUndoView_focusNextPrevChild :: Ptr (TQUndoView a) -> CBool -> IO CBool
instance QfocusNextPrevChild (QUndoViewSc a) ((Bool)) where
focusNextPrevChild x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusNextPrevChild cobj_x0 (toCBool x1)
instance QfocusOutEvent (QUndoView ()) ((QFocusEvent t1)) where
focusOutEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_focusOutEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_focusOutEvent_h" qtc_QUndoView_focusOutEvent_h :: Ptr (TQUndoView a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusOutEvent (QUndoViewSc a) ((QFocusEvent t1)) where
focusOutEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_focusOutEvent_h cobj_x0 cobj_x1
instance QhorizontalScrollbarAction (QUndoView ()) ((Int)) where
horizontalScrollbarAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalScrollbarAction cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_horizontalScrollbarAction" qtc_QUndoView_horizontalScrollbarAction :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QhorizontalScrollbarAction (QUndoViewSc a) ((Int)) where
horizontalScrollbarAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalScrollbarAction cobj_x0 (toCInt x1)
instance QhorizontalScrollbarValueChanged (QUndoView ()) ((Int)) where
horizontalScrollbarValueChanged x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalScrollbarValueChanged cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_horizontalScrollbarValueChanged" qtc_QUndoView_horizontalScrollbarValueChanged :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QhorizontalScrollbarValueChanged (QUndoViewSc a) ((Int)) where
horizontalScrollbarValueChanged x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalScrollbarValueChanged cobj_x0 (toCInt x1)
instance QhorizontalStepsPerItem (QUndoView ()) (()) where
horizontalStepsPerItem x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalStepsPerItem cobj_x0
foreign import ccall "qtc_QUndoView_horizontalStepsPerItem" qtc_QUndoView_horizontalStepsPerItem :: Ptr (TQUndoView a) -> IO CInt
instance QhorizontalStepsPerItem (QUndoViewSc a) (()) where
horizontalStepsPerItem x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_horizontalStepsPerItem cobj_x0
instance QinputMethodEvent (QUndoView ()) ((QInputMethodEvent t1)) where
inputMethodEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_inputMethodEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_inputMethodEvent" qtc_QUndoView_inputMethodEvent :: Ptr (TQUndoView a) -> Ptr (TQInputMethodEvent t1) -> IO ()
instance QinputMethodEvent (QUndoViewSc a) ((QInputMethodEvent t1)) where
inputMethodEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_inputMethodEvent cobj_x0 cobj_x1
instance QinputMethodQuery (QUndoView ()) ((InputMethodQuery)) where
inputMethodQuery x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_inputMethodQuery_h cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QUndoView_inputMethodQuery_h" qtc_QUndoView_inputMethodQuery_h :: Ptr (TQUndoView a) -> CLong -> IO (Ptr (TQVariant ()))
instance QinputMethodQuery (QUndoViewSc a) ((InputMethodQuery)) where
inputMethodQuery x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_inputMethodQuery_h cobj_x0 (toCLong $ qEnum_toInt x1)
instance QkeyPressEvent (QUndoView ()) ((QKeyEvent t1)) where
keyPressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_keyPressEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_keyPressEvent_h" qtc_QUndoView_keyPressEvent_h :: Ptr (TQUndoView a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyPressEvent (QUndoViewSc a) ((QKeyEvent t1)) where
keyPressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_keyPressEvent_h cobj_x0 cobj_x1
instance QkeyboardSearch (QUndoView ()) ((String)) where
keyboardSearch x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_keyboardSearch_h cobj_x0 cstr_x1
foreign import ccall "qtc_QUndoView_keyboardSearch_h" qtc_QUndoView_keyboardSearch_h :: Ptr (TQUndoView a) -> CWString -> IO ()
instance QkeyboardSearch (QUndoViewSc a) ((String)) where
keyboardSearch x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_keyboardSearch_h cobj_x0 cstr_x1
instance QmouseDoubleClickEvent (QUndoView ()) ((QMouseEvent t1)) where
mouseDoubleClickEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseDoubleClickEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_mouseDoubleClickEvent_h" qtc_QUndoView_mouseDoubleClickEvent_h :: Ptr (TQUndoView a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseDoubleClickEvent (QUndoViewSc a) ((QMouseEvent t1)) where
mouseDoubleClickEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mouseDoubleClickEvent_h cobj_x0 cobj_x1
instance QmousePressEvent (QUndoView ()) ((QMouseEvent t1)) where
mousePressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mousePressEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_mousePressEvent_h" qtc_QUndoView_mousePressEvent_h :: Ptr (TQUndoView a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmousePressEvent (QUndoViewSc a) ((QMouseEvent t1)) where
mousePressEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_mousePressEvent_h cobj_x0 cobj_x1
instance QscheduleDelayedItemsLayout (QUndoView ()) (()) where
scheduleDelayedItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scheduleDelayedItemsLayout cobj_x0
foreign import ccall "qtc_QUndoView_scheduleDelayedItemsLayout" qtc_QUndoView_scheduleDelayedItemsLayout :: Ptr (TQUndoView a) -> IO ()
instance QscheduleDelayedItemsLayout (QUndoViewSc a) (()) where
scheduleDelayedItemsLayout x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scheduleDelayedItemsLayout cobj_x0
instance QscrollDirtyRegion (QUndoView ()) ((Int, Int)) where
scrollDirtyRegion x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scrollDirtyRegion cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QUndoView_scrollDirtyRegion" qtc_QUndoView_scrollDirtyRegion :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance QscrollDirtyRegion (QUndoViewSc a) ((Int, Int)) where
scrollDirtyRegion x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_scrollDirtyRegion cobj_x0 (toCInt x1) (toCInt x2)
instance QselectAll (QUndoView ()) (()) where
selectAll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_selectAll_h cobj_x0
foreign import ccall "qtc_QUndoView_selectAll_h" qtc_QUndoView_selectAll_h :: Ptr (TQUndoView a) -> IO ()
instance QselectAll (QUndoViewSc a) (()) where
selectAll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_selectAll_h cobj_x0
instance QselectionCommand (QUndoView ()) ((QModelIndex t1)) where
selectionCommand x0 (x1)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_selectionCommand cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_selectionCommand" qtc_QUndoView_selectionCommand :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> IO CLong
instance QselectionCommand (QUndoViewSc a) ((QModelIndex t1)) where
selectionCommand x0 (x1)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_selectionCommand cobj_x0 cobj_x1
instance QselectionCommand (QUndoView ()) ((QModelIndex t1, QEvent t2)) where
selectionCommand x0 (x1, x2)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_selectionCommand1 cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_selectionCommand1" qtc_QUndoView_selectionCommand1 :: Ptr (TQUndoView a) -> Ptr (TQModelIndex t1) -> Ptr (TQEvent t2) -> IO CLong
instance QselectionCommand (QUndoViewSc a) ((QModelIndex t1, QEvent t2)) where
selectionCommand x0 (x1, x2)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_selectionCommand1 cobj_x0 cobj_x1 cobj_x2
instance QsetDirtyRegion (QUndoView ()) ((QRegion t1)) where
setDirtyRegion x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setDirtyRegion cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setDirtyRegion" qtc_QUndoView_setDirtyRegion :: Ptr (TQUndoView a) -> Ptr (TQRegion t1) -> IO ()
instance QsetDirtyRegion (QUndoViewSc a) ((QRegion t1)) where
setDirtyRegion x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setDirtyRegion cobj_x0 cobj_x1
instance QsetHorizontalStepsPerItem (QUndoView ()) ((Int)) where
setHorizontalStepsPerItem x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setHorizontalStepsPerItem cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_setHorizontalStepsPerItem" qtc_QUndoView_setHorizontalStepsPerItem :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QsetHorizontalStepsPerItem (QUndoViewSc a) ((Int)) where
setHorizontalStepsPerItem x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setHorizontalStepsPerItem cobj_x0 (toCInt x1)
instance QsetModel (QUndoView ()) ((QAbstractItemModel t1)) where
setModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setModel_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setModel_h" qtc_QUndoView_setModel_h :: Ptr (TQUndoView a) -> Ptr (TQAbstractItemModel t1) -> IO ()
instance QsetModel (QUndoViewSc a) ((QAbstractItemModel t1)) where
setModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setModel_h cobj_x0 cobj_x1
instance QsetSelectionModel (QUndoView ()) ((QItemSelectionModel t1)) where
setSelectionModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setSelectionModel_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setSelectionModel_h" qtc_QUndoView_setSelectionModel_h :: Ptr (TQUndoView a) -> Ptr (TQItemSelectionModel t1) -> IO ()
instance QsetSelectionModel (QUndoViewSc a) ((QItemSelectionModel t1)) where
setSelectionModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setSelectionModel_h cobj_x0 cobj_x1
instance QsetState (QUndoView ()) ((QAbstractItemViewState)) where
setState x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setState cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QUndoView_setState" qtc_QUndoView_setState :: Ptr (TQUndoView a) -> CLong -> IO ()
instance QsetState (QUndoViewSc a) ((QAbstractItemViewState)) where
setState x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setState cobj_x0 (toCLong $ qEnum_toInt x1)
instance QsetVerticalStepsPerItem (QUndoView ()) ((Int)) where
setVerticalStepsPerItem x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setVerticalStepsPerItem cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_setVerticalStepsPerItem" qtc_QUndoView_setVerticalStepsPerItem :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QsetVerticalStepsPerItem (QUndoViewSc a) ((Int)) where
setVerticalStepsPerItem x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setVerticalStepsPerItem cobj_x0 (toCInt x1)
instance QsizeHintForColumn (QUndoView ()) ((Int)) where
sizeHintForColumn x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHintForColumn_h cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_sizeHintForColumn_h" qtc_QUndoView_sizeHintForColumn_h :: Ptr (TQUndoView a) -> CInt -> IO CInt
instance QsizeHintForColumn (QUndoViewSc a) ((Int)) where
sizeHintForColumn x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHintForColumn_h cobj_x0 (toCInt x1)
instance QsizeHintForRow (QUndoView ()) ((Int)) where
sizeHintForRow x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHintForRow_h cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_sizeHintForRow_h" qtc_QUndoView_sizeHintForRow_h :: Ptr (TQUndoView a) -> CInt -> IO CInt
instance QsizeHintForRow (QUndoViewSc a) ((Int)) where
sizeHintForRow x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHintForRow_h cobj_x0 (toCInt x1)
instance QstartAutoScroll (QUndoView ()) (()) where
startAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_startAutoScroll cobj_x0
foreign import ccall "qtc_QUndoView_startAutoScroll" qtc_QUndoView_startAutoScroll :: Ptr (TQUndoView a) -> IO ()
instance QstartAutoScroll (QUndoViewSc a) (()) where
startAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_startAutoScroll cobj_x0
instance Qstate (QUndoView ()) (()) (IO (QAbstractItemViewState)) where
state x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_state cobj_x0
foreign import ccall "qtc_QUndoView_state" qtc_QUndoView_state :: Ptr (TQUndoView a) -> IO CLong
instance Qstate (QUndoViewSc a) (()) (IO (QAbstractItemViewState)) where
state x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_state cobj_x0
instance QstopAutoScroll (QUndoView ()) (()) where
stopAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_stopAutoScroll cobj_x0
foreign import ccall "qtc_QUndoView_stopAutoScroll" qtc_QUndoView_stopAutoScroll :: Ptr (TQUndoView a) -> IO ()
instance QstopAutoScroll (QUndoViewSc a) (()) where
stopAutoScroll x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_stopAutoScroll cobj_x0
instance QupdateEditorData (QUndoView ()) (()) where
updateEditorData x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateEditorData cobj_x0
foreign import ccall "qtc_QUndoView_updateEditorData" qtc_QUndoView_updateEditorData :: Ptr (TQUndoView a) -> IO ()
instance QupdateEditorData (QUndoViewSc a) (()) where
updateEditorData x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateEditorData cobj_x0
instance QupdateEditorGeometries (QUndoView ()) (()) where
updateEditorGeometries x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateEditorGeometries cobj_x0
foreign import ccall "qtc_QUndoView_updateEditorGeometries" qtc_QUndoView_updateEditorGeometries :: Ptr (TQUndoView a) -> IO ()
instance QupdateEditorGeometries (QUndoViewSc a) (()) where
updateEditorGeometries x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateEditorGeometries cobj_x0
instance QverticalScrollbarAction (QUndoView ()) ((Int)) where
verticalScrollbarAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalScrollbarAction cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_verticalScrollbarAction" qtc_QUndoView_verticalScrollbarAction :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QverticalScrollbarAction (QUndoViewSc a) ((Int)) where
verticalScrollbarAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalScrollbarAction cobj_x0 (toCInt x1)
instance QverticalScrollbarValueChanged (QUndoView ()) ((Int)) where
verticalScrollbarValueChanged x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalScrollbarValueChanged cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_verticalScrollbarValueChanged" qtc_QUndoView_verticalScrollbarValueChanged :: Ptr (TQUndoView a) -> CInt -> IO ()
instance QverticalScrollbarValueChanged (QUndoViewSc a) ((Int)) where
verticalScrollbarValueChanged x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalScrollbarValueChanged cobj_x0 (toCInt x1)
instance QverticalStepsPerItem (QUndoView ()) (()) where
verticalStepsPerItem x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalStepsPerItem cobj_x0
foreign import ccall "qtc_QUndoView_verticalStepsPerItem" qtc_QUndoView_verticalStepsPerItem :: Ptr (TQUndoView a) -> IO CInt
instance QverticalStepsPerItem (QUndoViewSc a) (()) where
verticalStepsPerItem x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_verticalStepsPerItem cobj_x0
instance QviewportEvent (QUndoView ()) ((QEvent t1)) where
viewportEvent x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_viewportEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_viewportEvent_h" qtc_QUndoView_viewportEvent_h :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO CBool
instance QviewportEvent (QUndoViewSc a) ((QEvent t1)) where
viewportEvent x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_viewportEvent_h cobj_x0 cobj_x1
instance QcontextMenuEvent (QUndoView ()) ((QContextMenuEvent t1)) where
contextMenuEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_contextMenuEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_contextMenuEvent_h" qtc_QUndoView_contextMenuEvent_h :: Ptr (TQUndoView a) -> Ptr (TQContextMenuEvent t1) -> IO ()
instance QcontextMenuEvent (QUndoViewSc a) ((QContextMenuEvent t1)) where
contextMenuEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_contextMenuEvent_h cobj_x0 cobj_x1
instance QqminimumSizeHint (QUndoView ()) (()) where
qminimumSizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_minimumSizeHint_h cobj_x0
foreign import ccall "qtc_QUndoView_minimumSizeHint_h" qtc_QUndoView_minimumSizeHint_h :: Ptr (TQUndoView a) -> IO (Ptr (TQSize ()))
instance QqminimumSizeHint (QUndoViewSc a) (()) where
qminimumSizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_minimumSizeHint_h cobj_x0
instance QminimumSizeHint (QUndoView ()) (()) where
minimumSizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_minimumSizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QUndoView_minimumSizeHint_qth_h" qtc_QUndoView_minimumSizeHint_qth_h :: Ptr (TQUndoView a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QminimumSizeHint (QUndoViewSc a) (()) where
minimumSizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_minimumSizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
instance QsetViewportMargins (QUndoView ()) ((Int, Int, Int, Int)) where
setViewportMargins x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setViewportMargins cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QUndoView_setViewportMargins" qtc_QUndoView_setViewportMargins :: Ptr (TQUndoView a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetViewportMargins (QUndoViewSc a) ((Int, Int, Int, Int)) where
setViewportMargins x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setViewportMargins cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance QsetupViewport (QUndoView ()) ((QWidget t1)) where
setupViewport x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setupViewport cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setupViewport" qtc_QUndoView_setupViewport :: Ptr (TQUndoView a) -> Ptr (TQWidget t1) -> IO ()
instance QsetupViewport (QUndoViewSc a) ((QWidget t1)) where
setupViewport x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setupViewport cobj_x0 cobj_x1
instance QqsizeHint (QUndoView ()) (()) where
qsizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHint_h cobj_x0
foreign import ccall "qtc_QUndoView_sizeHint_h" qtc_QUndoView_sizeHint_h :: Ptr (TQUndoView a) -> IO (Ptr (TQSize ()))
instance QqsizeHint (QUndoViewSc a) (()) where
qsizeHint x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHint_h cobj_x0
instance QsizeHint (QUndoView ()) (()) where
sizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QUndoView_sizeHint_qth_h" qtc_QUndoView_sizeHint_qth_h :: Ptr (TQUndoView a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QsizeHint (QUndoViewSc a) (()) where
sizeHint x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sizeHint_qth_h cobj_x0 csize_ret_w csize_ret_h
instance QwheelEvent (QUndoView ()) ((QWheelEvent t1)) where
wheelEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_wheelEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_wheelEvent_h" qtc_QUndoView_wheelEvent_h :: Ptr (TQUndoView a) -> Ptr (TQWheelEvent t1) -> IO ()
instance QwheelEvent (QUndoViewSc a) ((QWheelEvent t1)) where
wheelEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_wheelEvent_h cobj_x0 cobj_x1
instance QchangeEvent (QUndoView ()) ((QEvent t1)) where
changeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_changeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_changeEvent_h" qtc_QUndoView_changeEvent_h :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO ()
instance QchangeEvent (QUndoViewSc a) ((QEvent t1)) where
changeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_changeEvent_h cobj_x0 cobj_x1
instance QdrawFrame (QUndoView ()) ((QPainter t1)) where
drawFrame x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_drawFrame cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_drawFrame" qtc_QUndoView_drawFrame :: Ptr (TQUndoView a) -> Ptr (TQPainter t1) -> IO ()
instance QdrawFrame (QUndoViewSc a) ((QPainter t1)) where
drawFrame x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_drawFrame cobj_x0 cobj_x1
instance QactionEvent (QUndoView ()) ((QActionEvent t1)) where
actionEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_actionEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_actionEvent_h" qtc_QUndoView_actionEvent_h :: Ptr (TQUndoView a) -> Ptr (TQActionEvent t1) -> IO ()
instance QactionEvent (QUndoViewSc a) ((QActionEvent t1)) where
actionEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_actionEvent_h cobj_x0 cobj_x1
instance QaddAction (QUndoView ()) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_addAction cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_addAction" qtc_QUndoView_addAction :: Ptr (TQUndoView a) -> Ptr (TQAction t1) -> IO ()
instance QaddAction (QUndoViewSc a) ((QAction t1)) (IO ()) where
addAction x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_addAction cobj_x0 cobj_x1
instance QcloseEvent (QUndoView ()) ((QCloseEvent t1)) where
closeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_closeEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_closeEvent_h" qtc_QUndoView_closeEvent_h :: Ptr (TQUndoView a) -> Ptr (TQCloseEvent t1) -> IO ()
instance QcloseEvent (QUndoViewSc a) ((QCloseEvent t1)) where
closeEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_closeEvent_h cobj_x0 cobj_x1
instance Qcreate (QUndoView ()) (()) (IO ()) where
create x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_create cobj_x0
foreign import ccall "qtc_QUndoView_create" qtc_QUndoView_create :: Ptr (TQUndoView a) -> IO ()
instance Qcreate (QUndoViewSc a) (()) (IO ()) where
create x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_create cobj_x0
instance Qcreate (QUndoView ()) ((QVoid t1)) (IO ()) where
create x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create1 cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_create1" qtc_QUndoView_create1 :: Ptr (TQUndoView a) -> Ptr (TQVoid t1) -> IO ()
instance Qcreate (QUndoViewSc a) ((QVoid t1)) (IO ()) where
create x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create1 cobj_x0 cobj_x1
instance Qcreate (QUndoView ()) ((QVoid t1, Bool)) (IO ()) where
create x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create2 cobj_x0 cobj_x1 (toCBool x2)
foreign import ccall "qtc_QUndoView_create2" qtc_QUndoView_create2 :: Ptr (TQUndoView a) -> Ptr (TQVoid t1) -> CBool -> IO ()
instance Qcreate (QUndoViewSc a) ((QVoid t1, Bool)) (IO ()) where
create x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create2 cobj_x0 cobj_x1 (toCBool x2)
instance Qcreate (QUndoView ()) ((QVoid t1, Bool, Bool)) (IO ()) where
create x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create3 cobj_x0 cobj_x1 (toCBool x2) (toCBool x3)
foreign import ccall "qtc_QUndoView_create3" qtc_QUndoView_create3 :: Ptr (TQUndoView a) -> Ptr (TQVoid t1) -> CBool -> CBool -> IO ()
instance Qcreate (QUndoViewSc a) ((QVoid t1, Bool, Bool)) (IO ()) where
create x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_create3 cobj_x0 cobj_x1 (toCBool x2) (toCBool x3)
instance Qdestroy (QUndoView ()) (()) where
destroy x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy cobj_x0
foreign import ccall "qtc_QUndoView_destroy" qtc_QUndoView_destroy :: Ptr (TQUndoView a) -> IO ()
instance Qdestroy (QUndoViewSc a) (()) where
destroy x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy cobj_x0
instance Qdestroy (QUndoView ()) ((Bool)) where
destroy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy1 cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_destroy1" qtc_QUndoView_destroy1 :: Ptr (TQUndoView a) -> CBool -> IO ()
instance Qdestroy (QUndoViewSc a) ((Bool)) where
destroy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy1 cobj_x0 (toCBool x1)
instance Qdestroy (QUndoView ()) ((Bool, Bool)) where
destroy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy2 cobj_x0 (toCBool x1) (toCBool x2)
foreign import ccall "qtc_QUndoView_destroy2" qtc_QUndoView_destroy2 :: Ptr (TQUndoView a) -> CBool -> CBool -> IO ()
instance Qdestroy (QUndoViewSc a) ((Bool, Bool)) where
destroy x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_destroy2 cobj_x0 (toCBool x1) (toCBool x2)
instance QdevType (QUndoView ()) (()) where
devType x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_devType_h cobj_x0
foreign import ccall "qtc_QUndoView_devType_h" qtc_QUndoView_devType_h :: Ptr (TQUndoView a) -> IO CInt
instance QdevType (QUndoViewSc a) (()) where
devType x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_devType_h cobj_x0
instance QenabledChange (QUndoView ()) ((Bool)) where
enabledChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_enabledChange cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_enabledChange" qtc_QUndoView_enabledChange :: Ptr (TQUndoView a) -> CBool -> IO ()
instance QenabledChange (QUndoViewSc a) ((Bool)) where
enabledChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_enabledChange cobj_x0 (toCBool x1)
instance QenterEvent (QUndoView ()) ((QEvent t1)) where
enterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_enterEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_enterEvent_h" qtc_QUndoView_enterEvent_h :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO ()
instance QenterEvent (QUndoViewSc a) ((QEvent t1)) where
enterEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_enterEvent_h cobj_x0 cobj_x1
instance QfocusNextChild (QUndoView ()) (()) where
focusNextChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusNextChild cobj_x0
foreign import ccall "qtc_QUndoView_focusNextChild" qtc_QUndoView_focusNextChild :: Ptr (TQUndoView a) -> IO CBool
instance QfocusNextChild (QUndoViewSc a) (()) where
focusNextChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusNextChild cobj_x0
instance QfocusPreviousChild (QUndoView ()) (()) where
focusPreviousChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusPreviousChild cobj_x0
foreign import ccall "qtc_QUndoView_focusPreviousChild" qtc_QUndoView_focusPreviousChild :: Ptr (TQUndoView a) -> IO CBool
instance QfocusPreviousChild (QUndoViewSc a) (()) where
focusPreviousChild x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_focusPreviousChild cobj_x0
instance QfontChange (QUndoView ()) ((QFont t1)) where
fontChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_fontChange cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_fontChange" qtc_QUndoView_fontChange :: Ptr (TQUndoView a) -> Ptr (TQFont t1) -> IO ()
instance QfontChange (QUndoViewSc a) ((QFont t1)) where
fontChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_fontChange cobj_x0 cobj_x1
instance QheightForWidth (QUndoView ()) ((Int)) where
heightForWidth x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_heightForWidth_h cobj_x0 (toCInt x1)
foreign import ccall "qtc_QUndoView_heightForWidth_h" qtc_QUndoView_heightForWidth_h :: Ptr (TQUndoView a) -> CInt -> IO CInt
instance QheightForWidth (QUndoViewSc a) ((Int)) where
heightForWidth x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_heightForWidth_h cobj_x0 (toCInt x1)
instance QhideEvent (QUndoView ()) ((QHideEvent t1)) where
hideEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_hideEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_hideEvent_h" qtc_QUndoView_hideEvent_h :: Ptr (TQUndoView a) -> Ptr (TQHideEvent t1) -> IO ()
instance QhideEvent (QUndoViewSc a) ((QHideEvent t1)) where
hideEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_hideEvent_h cobj_x0 cobj_x1
instance QkeyReleaseEvent (QUndoView ()) ((QKeyEvent t1)) where
keyReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_keyReleaseEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_keyReleaseEvent_h" qtc_QUndoView_keyReleaseEvent_h :: Ptr (TQUndoView a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyReleaseEvent (QUndoViewSc a) ((QKeyEvent t1)) where
keyReleaseEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_keyReleaseEvent_h cobj_x0 cobj_x1
instance QlanguageChange (QUndoView ()) (()) where
languageChange x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_languageChange cobj_x0
foreign import ccall "qtc_QUndoView_languageChange" qtc_QUndoView_languageChange :: Ptr (TQUndoView a) -> IO ()
instance QlanguageChange (QUndoViewSc a) (()) where
languageChange x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_languageChange cobj_x0
instance QleaveEvent (QUndoView ()) ((QEvent t1)) where
leaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_leaveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_leaveEvent_h" qtc_QUndoView_leaveEvent_h :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO ()
instance QleaveEvent (QUndoViewSc a) ((QEvent t1)) where
leaveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_leaveEvent_h cobj_x0 cobj_x1
instance Qmetric (QUndoView ()) ((PaintDeviceMetric)) where
metric x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_metric cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QUndoView_metric" qtc_QUndoView_metric :: Ptr (TQUndoView a) -> CLong -> IO CInt
instance Qmetric (QUndoViewSc a) ((PaintDeviceMetric)) where
metric x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_metric cobj_x0 (toCLong $ qEnum_toInt x1)
instance Qmove (QUndoView ()) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_move1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QUndoView_move1" qtc_QUndoView_move1 :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance Qmove (QUndoViewSc a) ((Int, Int)) where
move x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_move1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qmove (QUndoView ()) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QUndoView_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QUndoView_move_qth" qtc_QUndoView_move_qth :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance Qmove (QUndoViewSc a) ((Point)) where
move x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QUndoView_move_qth cobj_x0 cpoint_x1_x cpoint_x1_y
instance Qqmove (QUndoView ()) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_move cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_move" qtc_QUndoView_move :: Ptr (TQUndoView a) -> Ptr (TQPoint t1) -> IO ()
instance Qqmove (QUndoViewSc a) ((QPoint t1)) where
qmove x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_move cobj_x0 cobj_x1
instance QmoveEvent (QUndoView ()) ((QMoveEvent t1)) where
moveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_moveEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_moveEvent_h" qtc_QUndoView_moveEvent_h :: Ptr (TQUndoView a) -> Ptr (TQMoveEvent t1) -> IO ()
instance QmoveEvent (QUndoViewSc a) ((QMoveEvent t1)) where
moveEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_moveEvent_h cobj_x0 cobj_x1
instance QpaintEngine (QUndoView ()) (()) where
paintEngine x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_paintEngine_h cobj_x0
foreign import ccall "qtc_QUndoView_paintEngine_h" qtc_QUndoView_paintEngine_h :: Ptr (TQUndoView a) -> IO (Ptr (TQPaintEngine ()))
instance QpaintEngine (QUndoViewSc a) (()) where
paintEngine x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_paintEngine_h cobj_x0
instance QpaletteChange (QUndoView ()) ((QPalette t1)) where
paletteChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_paletteChange cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_paletteChange" qtc_QUndoView_paletteChange :: Ptr (TQUndoView a) -> Ptr (TQPalette t1) -> IO ()
instance QpaletteChange (QUndoViewSc a) ((QPalette t1)) where
paletteChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_paletteChange cobj_x0 cobj_x1
instance Qrepaint (QUndoView ()) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_repaint cobj_x0
foreign import ccall "qtc_QUndoView_repaint" qtc_QUndoView_repaint :: Ptr (TQUndoView a) -> IO ()
instance Qrepaint (QUndoViewSc a) (()) where
repaint x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_repaint cobj_x0
instance Qrepaint (QUndoView ()) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QUndoView_repaint2" qtc_QUndoView_repaint2 :: Ptr (TQUndoView a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance Qrepaint (QUndoViewSc a) ((Int, Int, Int, Int)) where
repaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_repaint2 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance Qrepaint (QUndoView ()) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_repaint1 cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_repaint1" qtc_QUndoView_repaint1 :: Ptr (TQUndoView a) -> Ptr (TQRegion t1) -> IO ()
instance Qrepaint (QUndoViewSc a) ((QRegion t1)) where
repaint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_repaint1 cobj_x0 cobj_x1
instance QresetInputContext (QUndoView ()) (()) where
resetInputContext x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resetInputContext cobj_x0
foreign import ccall "qtc_QUndoView_resetInputContext" qtc_QUndoView_resetInputContext :: Ptr (TQUndoView a) -> IO ()
instance QresetInputContext (QUndoViewSc a) (()) where
resetInputContext x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resetInputContext cobj_x0
instance Qresize (QUndoView ()) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resize1 cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QUndoView_resize1" qtc_QUndoView_resize1 :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance Qresize (QUndoViewSc a) ((Int, Int)) (IO ()) where
resize x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_resize1 cobj_x0 (toCInt x1) (toCInt x2)
instance Qqresize (QUndoView ()) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_resize cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_resize" qtc_QUndoView_resize :: Ptr (TQUndoView a) -> Ptr (TQSize t1) -> IO ()
instance Qqresize (QUndoViewSc a) ((QSize t1)) where
qresize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_resize cobj_x0 cobj_x1
instance Qresize (QUndoView ()) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QUndoView_resize_qth cobj_x0 csize_x1_w csize_x1_h
foreign import ccall "qtc_QUndoView_resize_qth" qtc_QUndoView_resize_qth :: Ptr (TQUndoView a) -> CInt -> CInt -> IO ()
instance Qresize (QUndoViewSc a) ((Size)) (IO ()) where
resize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QUndoView_resize_qth cobj_x0 csize_x1_w csize_x1_h
instance QsetGeometry (QUndoView ()) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
foreign import ccall "qtc_QUndoView_setGeometry1" qtc_QUndoView_setGeometry1 :: Ptr (TQUndoView a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QUndoViewSc a) ((Int, Int, Int, Int)) where
setGeometry x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setGeometry1 cobj_x0 (toCInt x1) (toCInt x2) (toCInt x3) (toCInt x4)
instance QqsetGeometry (QUndoView ()) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setGeometry cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_setGeometry" qtc_QUndoView_setGeometry :: Ptr (TQUndoView a) -> Ptr (TQRect t1) -> IO ()
instance QqsetGeometry (QUndoViewSc a) ((QRect t1)) where
qsetGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_setGeometry cobj_x0 cobj_x1
instance QsetGeometry (QUndoView ()) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QUndoView_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QUndoView_setGeometry_qth" qtc_QUndoView_setGeometry_qth :: Ptr (TQUndoView a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetGeometry (QUndoViewSc a) ((Rect)) where
setGeometry x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QUndoView_setGeometry_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
instance QsetMouseTracking (QUndoView ()) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setMouseTracking cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_setMouseTracking" qtc_QUndoView_setMouseTracking :: Ptr (TQUndoView a) -> CBool -> IO ()
instance QsetMouseTracking (QUndoViewSc a) ((Bool)) where
setMouseTracking x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setMouseTracking cobj_x0 (toCBool x1)
instance QsetVisible (QUndoView ()) ((Bool)) where
setVisible x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setVisible_h cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_setVisible_h" qtc_QUndoView_setVisible_h :: Ptr (TQUndoView a) -> CBool -> IO ()
instance QsetVisible (QUndoViewSc a) ((Bool)) where
setVisible x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_setVisible_h cobj_x0 (toCBool x1)
instance QshowEvent (QUndoView ()) ((QShowEvent t1)) where
showEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_showEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_showEvent_h" qtc_QUndoView_showEvent_h :: Ptr (TQUndoView a) -> Ptr (TQShowEvent t1) -> IO ()
instance QshowEvent (QUndoViewSc a) ((QShowEvent t1)) where
showEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_showEvent_h cobj_x0 cobj_x1
instance QtabletEvent (QUndoView ()) ((QTabletEvent t1)) where
tabletEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_tabletEvent_h cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_tabletEvent_h" qtc_QUndoView_tabletEvent_h :: Ptr (TQUndoView a) -> Ptr (TQTabletEvent t1) -> IO ()
instance QtabletEvent (QUndoViewSc a) ((QTabletEvent t1)) where
tabletEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_tabletEvent_h cobj_x0 cobj_x1
instance QupdateMicroFocus (QUndoView ()) (()) where
updateMicroFocus x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateMicroFocus cobj_x0
foreign import ccall "qtc_QUndoView_updateMicroFocus" qtc_QUndoView_updateMicroFocus :: Ptr (TQUndoView a) -> IO ()
instance QupdateMicroFocus (QUndoViewSc a) (()) where
updateMicroFocus x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_updateMicroFocus cobj_x0
instance QwindowActivationChange (QUndoView ()) ((Bool)) where
windowActivationChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_windowActivationChange cobj_x0 (toCBool x1)
foreign import ccall "qtc_QUndoView_windowActivationChange" qtc_QUndoView_windowActivationChange :: Ptr (TQUndoView a) -> CBool -> IO ()
instance QwindowActivationChange (QUndoViewSc a) ((Bool)) where
windowActivationChange x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_windowActivationChange cobj_x0 (toCBool x1)
instance QchildEvent (QUndoView ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_childEvent" qtc_QUndoView_childEvent :: Ptr (TQUndoView a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QUndoViewSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QUndoView ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QUndoView_connectNotify" qtc_QUndoView_connectNotify :: Ptr (TQUndoView a) -> CWString -> IO ()
instance QconnectNotify (QUndoViewSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QUndoView ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoView_customEvent" qtc_QUndoView_customEvent :: Ptr (TQUndoView a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QUndoViewSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoView_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QUndoView ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QUndoView_disconnectNotify" qtc_QUndoView_disconnectNotify :: Ptr (TQUndoView a) -> CWString -> IO ()
instance QdisconnectNotify (QUndoViewSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_disconnectNotify cobj_x0 cstr_x1
instance QeventFilter (QUndoView ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QUndoView_eventFilter_h" qtc_QUndoView_eventFilter_h :: Ptr (TQUndoView a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QUndoViewSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QUndoView_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QUndoView ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QUndoView_receivers" qtc_QUndoView_receivers :: Ptr (TQUndoView a) -> CWString -> IO CInt
instance Qreceivers (QUndoViewSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QUndoView_receivers cobj_x0 cstr_x1
instance Qsender (QUndoView ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sender cobj_x0
foreign import ccall "qtc_QUndoView_sender" qtc_QUndoView_sender :: Ptr (TQUndoView a) -> IO (Ptr (TQObject ()))
instance Qsender (QUndoViewSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoView_sender cobj_x0
|
uduki/hsQt
|
Qtc/Gui/QUndoView.hs
|
bsd-2-clause
| 87,687 | 0 | 15 | 14,183 | 28,636 | 14,519 | 14,117 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.FadeWindows
-- Description : A more flexible and general compositing interface than FadeInactive.
-- Copyright : Brandon S Allbery KF8NH <[email protected]>
-- License : BSD
--
-- Maintainer : Brandon S Allbery KF8NH
-- Stability : unstable
-- Portability : unportable
--
-- A more flexible and general compositing interface than FadeInactive.
-- Windows can be selected and opacity specified by means of FadeHooks,
-- which are very similar to ManageHooks and use the same machinery.
--
-----------------------------------------------------------------------------
module XMonad.Hooks.FadeWindows (-- * Usage
-- $usage
-- * The 'logHook' for window fading
fadeWindowsLogHook
-- * The 'FadeHook'
,FadeHook
,Opacity
,idFadeHook
-- * Predefined 'FadeHook's
,opaque
,solid
,transparent
,invisible
,transparency
,translucence
,fadeBy
,opacity
,fadeTo
-- * 'handleEventHook' for mapped/unmapped windows
,fadeWindowsEventHook
-- * 'doF' for simple hooks
,doS
-- * Useful 'Query's for 'FadeHook's
,isFloating
,isUnfocused
) where
import XMonad.Core
import XMonad.Prelude
import XMonad.ManageHook (liftX)
import qualified XMonad.StackSet as W
import XMonad.Hooks.FadeInactive (setOpacity
,isUnfocused
)
import Control.Monad.Reader (ask
,asks)
import Control.Monad.State (gets)
import qualified Data.Map as M
import Graphics.X11.Xlib.Extras (Event(..))
-- $usage
-- To use this module, make sure your @xmonad@ core supports generalized
-- 'ManageHook's (check the type of 'idHook'; if it's @ManageHook@ then
-- your @xmonad@ is too old) and then add @fadeWindowsLogHook@ to your
-- 'logHook' and @fadeWindowsEventHook@ to your 'handleEventHook':
--
-- > , logHook = fadeWindowsLogHook myFadeHook
-- > , handleEventHook = fadeWindowsEventHook
-- > {- ... -}
-- >
-- > myFadeHook = composeAll [ opaque
-- > , isUnfocused --> transparency 0.2
-- > ]
--
-- The above is like FadeInactive with a fade value of 0.2.
--
-- 'FadeHook's do not accumulate; instead, they compose from right to
-- left like 'ManageHook's, so in the above example @myFadeHook@ will
-- render unfocused windows at 4/5 opacity and the focused window as
-- opaque. This means that, in particular, the order in the above
-- example is important.
--
-- The 'opaque' hook above is optional, by the way, as any unmatched
-- window will be opaque by default. If you want to make all windows a
-- bit transparent by default, you can replace 'opaque' with something
-- like
--
-- > transparency 0.93
--
-- at the top of @myFadeHook@.
--
-- This module is best used with "XMonad.Hooks.MoreManageHelpers", which
-- exports a number of Queries that can be used in either @ManageHook@
-- or @FadeHook@.
--
-- Note that you need a compositing manager such as @xcompmgr@,
-- @dcompmgr@, or @cairo-compmgr@ for window fading to work. If you
-- aren't running a compositing manager, the opacity will be recorded
-- but won't take effect until a compositing manager is started.
--
-- For more detailed instructions on editing the 'logHook' see:
--
-- "XMonad.Doc.Extending#The_log_hook_and_external_status_bars"
--
-- For more detailed instructions on editing the 'handleEventHook',
-- see:
--
-- "XMonad.Doc.Extending#Editing_the_event_hook"
-- (which sadly doesnt exist at the time of writing...)
--
-- /WARNING:/ This module is very good at triggering bugs in
-- compositing managers. Symptoms range from windows not being
-- repainted until the compositing manager is restarted or the
-- window is unmapped and remapped, to the machine becoming sluggish
-- until the compositing manager is restarted (at which point a
-- popup/dialog will suddenly appear; apparently it's getting into
-- a tight loop trying to fade the popup in). I find it useful to
-- have a key binding to restart the compositing manager; for example,
--
-- main = xmonad $ def {
-- {- ... -}
-- }
-- `additionalKeysP`
-- [("M-S-4",spawn "killall xcompmgr; sleep 1; xcompmgr -cCfF &")]
-- {- ... -}
-- ]
--
-- (See "XMonad.Util.EZConfig" for 'additionalKeysP'.)
-- a window opacity to be carried in a Query. OEmpty is sort of a hack
-- to make it obay the monoid laws
data Opacity = Opacity Rational | OEmpty
instance Semigroup Opacity where
r <> OEmpty = r
_ <> r = r
instance Monoid Opacity where
mempty = OEmpty
-- | A FadeHook is similar to a ManageHook, but records window opacity.
type FadeHook = Query Opacity
-- | Render a window fully opaque.
opaque :: FadeHook
opaque = doS (Opacity 1)
-- | Render a window fully transparent.
transparent :: FadeHook
transparent = doS (Opacity 0)
-- | Specify a window's transparency.
transparency :: Rational -- ^ The window's transparency as a fraction.
-- @transparency 1@ is the same as 'transparent',
-- whereas @transparency 0@ is the same as 'opaque'.
-> FadeHook
transparency = doS . Opacity . (1-) . clampRatio
-- | Specify a window's opacity; this is the inverse of 'transparency'.
opacity :: Rational -- ^ The opacity of a window as a fraction.
-- @opacity 1@ is the same as 'opaque',
-- whereas @opacity 0@ is the same as 'transparent'.
-> FadeHook
opacity = doS . Opacity . clampRatio
fadeTo, translucence, fadeBy :: Rational -> FadeHook
-- | An alias for 'transparency'.
fadeTo = transparency
-- | An alias for 'transparency'.
translucence = transparency
-- | An alias for 'opacity'.
fadeBy = opacity
invisible, solid :: FadeHook
-- | An alias for 'transparent'.
invisible = transparent
-- | An alias for 'opaque'.
solid = opaque
-- | Like 'doF', but usable with 'ManageHook'-like hooks that
-- aren't 'Query' wrapped around transforming functions ('Endo').
doS :: Monoid m => m -> Query m
doS = return
-- | The identity 'FadeHook', which renders windows 'opaque'.
idFadeHook :: FadeHook
idFadeHook = opaque
-- | A Query to determine if a window is floating.
isFloating :: Query Bool
isFloating = ask >>= \w -> liftX . gets $ M.member w . W.floating . windowset
-- boring windows can't be seen outside of a layout, so we watch messages with
-- a dummy LayoutModifier and stow them in a persistent bucket. this is not
-- entirely reliable given that boringAuto still isn't observable; we just hope
-- those aren't visible and won;t be affected anyway
-- @@@ punted for now, will be a separate module. it's still slimy, though
-- | A 'logHook' to fade windows under control of a 'FadeHook', which is
-- similar to but not identical to 'ManageHook'.
fadeWindowsLogHook :: FadeHook -> X ()
fadeWindowsLogHook h = withWindowSet $ \s -> do
let visibleWins = (W.integrate' . W.stack . W.workspace . W.current $ s) ++
concatMap (W.integrate' . W.stack . W.workspace) (W.visible s)
forM_ visibleWins $ \w -> do
o <- userCodeDef (Opacity 1) (runQuery h w)
setOpacity w $ case o of
OEmpty -> 1
Opacity r -> r
-- | A 'handleEventHook' to handle fading and unfading of newly mapped
-- or unmapped windows; this avoids problems with layouts such as
-- "XMonad.Layout.Full" or "XMonad.Layout.Tabbed". This hook may
-- also be useful with "XMonad.Hooks.FadeInactive".
fadeWindowsEventHook :: Event -> X All
fadeWindowsEventHook MapNotifyEvent{} =
-- we need to run the fadeWindowsLogHook. only one way...
asks config >>= logHook >> return (All True)
fadeWindowsEventHook _ = return (All True)
-- A utility to clamp opacity fractions to the range (0,1)
clampRatio :: Rational -> Rational
clampRatio r | r >= 0 && r <= 1 = r
| r < 0 = 0
| otherwise = 1
|
xmonad/xmonad-contrib
|
XMonad/Hooks/FadeWindows.hs
|
bsd-3-clause
| 9,223 | 0 | 18 | 2,967 | 846 | 519 | 327 | 77 | 2 |
{-# LANGUAGE DefaultSignatures, FlexibleContexts #-}
-- |
-- Module: Data.Aeson.Types.Class
-- Copyright: (c) 2011-2015 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Bryan O'Sullivan <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- Types for working with JSON data.
module Data.Aeson.Types.Class
(
-- * Core JSON classes
FromJSON(..)
, ToJSON(..)
-- * Generic JSON classes
, GFromJSON(..)
, GToJSON(..)
, genericToJSON
, genericToEncoding
, genericParseJSON
-- * Object key-value pairs
, KeyValue(..)
) where
import Data.Aeson.Types.Internal
import Data.Text (Text)
import GHC.Generics (Generic, Rep, from, to)
import qualified Data.Aeson.Encode.Builder as E
-- | Class of generic representation types ('Rep') that can be converted to
-- JSON.
class GToJSON f where
-- | This method (applied to 'defaultOptions') is used as the
-- default generic implementation of 'toJSON'.
gToJSON :: Options -> f a -> Value
-- | This method (applied to 'defaultOptions') can be used as the
-- default generic implementation of 'toEncoding'.
gToEncoding :: Options -> f a -> Encoding
-- | Class of generic representation types ('Rep') that can be converted from JSON.
class GFromJSON f where
-- | This method (applied to 'defaultOptions') is used as the
-- default generic implementation of 'parseJSON'.
gParseJSON :: Options -> Value -> Parser (f a)
-- | A configurable generic JSON creator. This function applied to
-- 'defaultOptions' is used as the default for 'toJSON' when the type
-- is an instance of 'Generic'.
genericToJSON :: (Generic a, GToJSON (Rep a)) => Options -> a -> Value
genericToJSON opts = gToJSON opts . from
-- | A configurable generic JSON encoder. This function applied to
-- 'defaultOptions' is used as the default for 'toEncoding' when the type
-- is an instance of 'Generic'.
genericToEncoding :: (Generic a, GToJSON (Rep a)) => Options -> a -> Encoding
genericToEncoding opts = gToEncoding opts . from
-- | A configurable generic JSON decoder. This function applied to
-- 'defaultOptions' is used as the default for 'parseJSON' when the
-- type is an instance of 'Generic'.
genericParseJSON :: (Generic a, GFromJSON (Rep a)) => Options -> Value -> Parser a
genericParseJSON opts = fmap to . gParseJSON opts
-- | A type that can be converted to JSON.
--
-- An example type and instance:
--
-- @{-\# LANGUAGE OverloadedStrings #-}
--
-- data Coord = Coord { x :: Double, y :: Double }
--
-- instance ToJSON Coord where
-- toJSON (Coord x y) = 'object' [\"x\" '.=' x, \"y\" '.=' y]
-- @
--
-- Note the use of the @OverloadedStrings@ language extension which enables
-- 'Text' values to be written as string literals.
--
-- Instead of manually writing your 'ToJSON' instance, there are three options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so will probably be more efficient than the following two options:
--
-- * The compiler can provide default generic implementations for 'toJSON' and
-- 'toEncoding'.
--
-- To use the second, simply add a @deriving 'Generic'@ clause to your
-- datatype and declare a 'ToJSON' instance for your datatype without giving
-- definitions for 'toJSON' or 'toEncoding'.
--
-- For example, the previous example can be simplified to just:
--
-- @{-\# LANGUAGE DeriveGeneric \#-}
--
-- import GHC.Generics
--
-- data Coord = Coord { x :: Double, y :: Double } deriving Generic
--
-- instance ToJSON Coord
-- @
--
-- Note that, instead of using @DefaultSignatures@, it's also possible
-- to parameterize the generic encoding using 'genericToJSON' applied
-- to your encoding/decoding 'Options':
--
-- @
-- instance ToJSON Coord where
-- toJSON = 'genericToJSON' 'defaultOptions'
-- toEncoding = 'genericToEncoding' 'defaultOptions'
-- @
class ToJSON a where
toJSON :: a -> Value
default toJSON :: (Generic a, GToJSON (Rep a)) => a -> Value
toJSON = genericToJSON defaultOptions
toEncoding :: a -> Encoding
toEncoding = Encoding . E.encodeToBuilder . toJSON
{-# INLINE toEncoding #-}
-- | A type that can be converted from JSON, with the possibility of
-- failure.
--
-- When writing an instance, use 'empty', 'mzero', or 'fail' to make a
-- conversion fail, e.g. if an 'Object' is missing a required key, or
-- the value is of the wrong type.
--
-- An example type and instance:
--
-- @
-- {-\# LANGUAGE OverloadedStrings #-}
--
-- data Coord = Coord { x :: Double, y :: Double }
--
-- instance FromJSON Coord where
-- parseJSON ('Object' v) = Coord '<$>'
-- v '.:' \"x\" '<*>'
-- v '.:' \"y\"
--
-- \-- A non-'Object' value is of the wrong type, so use 'mzero' to fail.
-- parseJSON _ = 'mzero'
-- @
--
-- Note the use of the @OverloadedStrings@ language extension which enables
-- 'Text' values to be written as string literals.
--
-- Instead of manually writing your 'FromJSON' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so will probably be more efficient than the following two options:
--
-- * The compiler can provide a default generic implementation for
-- 'parseJSON'.
--
-- To use the second, simply add a @deriving 'Generic'@ clause to your
-- datatype and declare a 'FromJSON' instance for your datatype without giving
-- a definition for 'parseJSON'.
--
-- For example, the previous example can be simplified to just:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import GHC.Generics
--
-- data Coord = Coord { x :: Double, y :: Double } deriving Generic
--
-- instance FromJSON Coord
-- @
--
-- Note that, instead of using @DefaultSignatures@, it's also possible
-- to parameterize the generic decoding using 'genericParseJSON' applied
-- to your encoding/decoding 'Options':
--
-- @
-- instance FromJSON Coord where
-- parseJSON = 'genericParseJSON' 'defaultOptions'
-- @
class FromJSON a where
parseJSON :: Value -> Parser a
default parseJSON :: (Generic a, GFromJSON (Rep a)) => Value -> Parser a
parseJSON = genericParseJSON defaultOptions
-- | A key-value pair for a JSON object.
class KeyValue t where
(.=) :: ToJSON v => Text -> v -> t
infixr 8 .=
|
nurpax/aeson
|
Data/Aeson/Types/Class.hs
|
bsd-3-clause
| 6,569 | 0 | 11 | 1,322 | 650 | 418 | 232 | 40 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.