code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-
******************************************************************************
* JSHOP *
* *
* Module: AST *
* Purpose: JavaScript Abstract Syntax Tree *
* Authors: Nick Brunt, Henrik Nilsson *
* *
* Based on the HMTC equivalent *
* Copyright (c) Henrik Nilsson, 2006 - 2011 *
* http://www.cs.nott.ac.uk/~nhn/ *
* *
* Revisions for JavaScript *
* Copyright (c) Nick Brunt, 2011 - 2012 *
* *
******************************************************************************
-}
-- | JavaScript Abstract Syntax Tree. Representation of JavaScript programs
-- after parsing.
module AST where
{-
AST (..), -- Not abstract. Instances: HasSrcPos.
Command (..), -- Not abstract. Instances: HasSrcPos.
Expression (..), -- Not abstract. Instances: HasSrcPos.
Declaration (..), -- Not abstract. Instances: HasSrcPos.
TypeDenoter (..) -- Not abstract. Instances: HasSrcPos.
-}
-- JSHOP module imports
--import Name
--import SrcPos
-- Note on Naming Conventions for Constructors and Field Labels
--
-- In Haskell, two (or more) datatypes that are in scope simultaneoulsy
-- must not have any constructors or field labels in common. However,
-- different constructors of the same type may have common field names,
-- provided the fields all have the same type. This is very different
-- from records in languages like Pascal or C, and from objects in OO
-- languages like Java, where sharing names across different records or
-- objects are both possible and common.
--
-- To avoid name clashes, while still making it possible to use similar
-- names for similar things in different type declarations, some simple
-- naming conventins have been adopted:
--
-- * Constructors get prefix which is an abbreviation of the name of
-- the data type. E.g. for 'Command', the prefix is 'Cmd', and a
-- typical constructor name is 'CmdAssign', and for 'TypeDenoter',
-- te prefix is 'TD'.
--
-- * Field names that are common to one or more constructors, get the
-- same prefix as the constructor, but in lower-case.
--
-- * Field names that are specific to a contructor get a lower-case
-- prefix that is an abbreviation of the constructor. E.g. the
-- prefix for 'CmdAssign' is 'ca', and one of its fields is 'caVar'.
-- | Abstract syntax for the syntactic category Program
--data AST = AST { astCmd :: Command }
data Program
= Program [Source]
deriving Show
data Source
= Statement Statement
| SFuncDecl FuncDecl
deriving Show
data FuncDecl
= FuncDecl (Maybe String) [String] [Source]
deriving Show
data Statement
= EmptyStmt
| IfStmt IfStmt
| IterativeStmt IterativeStmt
| ExprStmt Expression
| Block [Statement]
| VarStmt [VarDecl]
| TryStmt TryStmt
| ContinueStmt (Maybe String)
| BreakStmt (Maybe String)
| ReturnStmt (Maybe Expression)
| WithStmt Expression Statement
| LabelledStmt String Statement
| Switch Switch
| ThrowExpr Expression
deriving Show
data IfStmt
= IfElse Expression Statement Statement
| If Expression Statement
-- | If2 Expression
-- | If3
deriving Show
data IterativeStmt
= DoWhile Statement Expression
| While Expression Statement
| For (Maybe Expression) (Maybe Expression) (Maybe Expression) Statement
| ForVar [VarDecl] (Maybe Expression) (Maybe Expression) Statement
| ForIn [VarDecl] Expression Statement
-- | It2 Expression
deriving Show
data TryStmt
= TryBC [Statement] [Catch]
| TryBF [Statement] [Statement]
| TryBCF [Statement] [Catch] [Statement]
deriving Show
data Catch
= Catch String [Statement]
| CatchIf String [Statement] Expression
deriving Show
data Switch
= SSwitch Expression CaseBlock
deriving Show
data CaseBlock
= CaseBlock [CaseClause] [DefaultClause] [CaseClause]
deriving Show
data CaseClause
= CaseClause Expression [Statement]
deriving Show
data DefaultClause
= DefaultClause [Statement]
deriving Show
data Expression
= Assignment Assignment
deriving Show
data VarDecl
= VarDecl String (Maybe Assignment)
deriving Show
-- | Abstract syntax for the syntactic category Assignment
data Assignment
= CondExpr CondExpr
| Assign LeftExpr AssignOp Assignment
| AssignFuncDecl FuncDecl
deriving Show
data LeftExpr
= NewExpr NewExpr
| CallExpr CallExpr
deriving Show
data AssignOp
= AssignNormal
| AssignOpMult
| AssignOpDiv
| AssignOpMod
| AssignOpPlus
| AssignOpMinus
deriving Show
data CondExpr
= LogOr LogOr
| CondIf LogOr Assignment Assignment
deriving Show
data NewExpr
= MemberExpr MemberExpr
| NewNewExpr NewExpr
deriving Show
data CallExpr
= CallMember MemberExpr [Assignment]
| CallCall CallExpr [Assignment]
| CallSquare CallExpr Expression
| CallDot CallExpr String
deriving Show
data MemberExpr
= MemExpression PrimaryExpr
| ArrayExpr MemberExpr Expression
| MemberNew MemberExpr [Assignment]
| MemberCall MemberExpr String
deriving Show
-- | Abstract syntax for the syntactic category PrimaryExpr
data PrimaryExpr
-- | Literal integer
= ExpLitInt Integer
-- | Literal strings
| ExpLitStr String
-- | Identifier
| ExpId String
-- | Bracketed expression
| ExpBrackExp Expression
-- | This (current object)
| ExpThis
-- | Regular PrimaryExpr
| ExpRegex String
-- | Arrays
| ExpArray ArrayLit
-- | Objects
| ExpObject [(PropName, Assignment)]
deriving Show
-- | Abstract syntax for the syntactic category Array Literal
data ArrayLit
-- | Simple array
= ArraySimp [Assignment]
deriving Show
data PropName
= PropNameId String
| PropNameStr String
| PropNameInt Integer
deriving Show
data LogOr
= LogAnd LogAnd
| LOLogOr LogOr LogAnd
deriving Show
data LogAnd
= BitOR BitOR
| LALogAnd LogAnd BitOR
deriving Show
data BitOR
= BitXOR BitXOR
| BOBitOR BitOR BitXOR
deriving Show
data BitXOR
= BitAnd BitAnd
| BXBitXOR BitXOR BitAnd
deriving Show
data BitAnd
= EqualExpr EqualExpr
| BABitAnd BitAnd EqualExpr
deriving Show
data EqualExpr
= RelExpr RelExpr
| Equal EqualExpr RelExpr
| NotEqual EqualExpr RelExpr
| EqualTo EqualExpr RelExpr
| NotEqualTo EqualExpr RelExpr
deriving Show
data RelExpr
= ShiftExpr ShiftExpr
| LessThan RelExpr ShiftExpr
| GreaterThan RelExpr ShiftExpr
| LessEqual RelExpr ShiftExpr
| GreaterEqual RelExpr ShiftExpr
| InstanceOf RelExpr ShiftExpr
| InObject RelExpr ShiftExpr
deriving Show
data ShiftExpr
= AddExpr AddExpr
| ShiftLeft ShiftExpr AddExpr
| ShiftRight ShiftExpr AddExpr
| ShiftRight2 ShiftExpr AddExpr
deriving Show
data AddExpr
= MultExpr MultExpr
| Plus AddExpr MultExpr
| Minus AddExpr MultExpr
deriving Show
data MultExpr
= UnaryExpr UnaryExpr
| Times MultExpr UnaryExpr
| Div MultExpr UnaryExpr
| Mod MultExpr UnaryExpr
deriving Show
data UnaryExpr
= PostFix PostFix
| Delete UnaryExpr
| Void UnaryExpr
| TypeOf UnaryExpr
| PlusPlus UnaryExpr
| MinusMinus UnaryExpr
| UnaryPlus UnaryExpr
| UnaryMinus UnaryExpr
| Not UnaryExpr
| BitNot UnaryExpr
deriving Show
data PostFix
= LeftExpr LeftExpr
| PostInc LeftExpr
| PostDec LeftExpr
deriving Show
{-
instance HasSrcPos AST where
srcPos = cmdSrcPos . astCmd
-}
-- | Abstract syntax for the syntactic category Command
-- For generality, the variable being assigned to, the procedure being
-- called, and the function being applied (currently only operators) are
-- represented by expressions as opposed to just an identifier (for
-- variables, procedures, and functions) or an operator. Consider
-- assignment to an array element, for example, where the RHS (e.g. x[i])
-- really is an expression that gets evaluated to a memory reference
-- (sink). Also, this arrangement facilitates error reporting, as a
-- variable expression has an associated source position, whereas names,
-- currently represented by strings, have not.
{-
data Command
-- | Assignment
= CmdAssign {
caVar :: PrimaryExpr, -- ^ Assigned variable
caVal :: PrimaryExpr, -- ^ Right-hand-side expression
cmdSrcPos :: SrcPos
}
-- | Procedure call
| CmdCall {
ccProc :: PrimaryExpr, -- ^ Called procedure
ccArgs :: [PrimaryExpr], -- ^ Arguments
cmdSrcPos :: SrcPos
}
-- | Command sequence (block)
| CmdSeq {
csCmds :: [Command], -- ^ Commands
cmdSrcPos :: SrcPos
}
{- Original version
-- | Conditional command
| CmdIf {
ciCond :: PrimaryExpr, -- ^ Condition
ciThen :: Command, -- ^ Then-branch
ciElse :: Command, -- ^ Else-branch
cmdSrcPos :: SrcPos
}
-}
-- Extended version
| CmdIf {
ciCondThens :: [(PrimaryExpr, [Command])], -- ^ Conditional branches
ciElse :: [Command], -- ^ Optional else-branch
cmdSrcPos :: SrcPos
}
-- | While-loop
| CmdWhile {
cwCond :: PrimaryExpr, -- ^ Loop-condition
cwBody :: Command, -- ^ Loop-body
cmdSrcPos :: SrcPos
}
-- | Repeat-loop
{- | CmdRepeat {
crBody :: Command, -- ^ Loop-body
crCond :: PrimaryExpr, -- ^ Loop-condition
cmdSrcPos :: SrcPos
}
-}
{-
-- | Let-command
| CmdLet {
clDecls :: [Declaration], -- ^ Declarations
clBody :: Command, -- ^ Let-body
cmdSrcPos :: SrcPos
}
-}
-}
{-
instance HasSrcPos Command where
srcPos = cmdSrcPos
-}
{-
-- | Variable reference
| ExpVar {
evVar :: Name, -- ^ Name of referenced variable
expSrcPos :: SrcPos
}
-- | Function or n-ary operator application
| ExpApp {
eaFun :: PrimaryExpr, -- ^ Applied function or operator
eaArgs :: [PrimaryExpr], -- ^ Arguments
expSrcPos :: SrcPos
}
-- | Conditional expression
| ExpCond {
ecCond :: PrimaryExpr, -- ^ Condition
ecTrue :: PrimaryExpr, -- ^ Value if condition true
ecFalse :: PrimaryExpr, -- ^ Value if condition false
expSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos PrimaryExpr where
srcPos = expSrcPos
-}
-- | Abstract syntax for the syntactic category Declaration
--data Declaration
{-
{-
-- | Constant declaration
= DeclConst {
dcConst :: Name, -- ^ Name of defined constant
dcType :: TypeDenoter, -- ^ Type of defined constant
dcVal :: PrimaryExpr, -- ^ Value of defined constant
declSrcPos :: SrcPos
}
-}
-- | Variable declaration
= DeclVar {
dvVar :: Name, -- ^ Name of declared variable
dvType :: TypeDenoter, -- ^ Type of declared variable
dvMbVal :: Maybe PrimaryExpr, -- ^ Initial value of declared
-- varible, if any
declSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos Declaration where
srcPos = declSrcPos
-}
-- | Abstract syntax for the syntactic category TypeDenoter
-- Right now, the only types are simple base types like Integer and Bool.
-- If MiniTriangle were extended to allow users to express e.g. function
-- types, then this data declaration would have to be extended.
--data TypeDenoter
{-
-- | Base Type
= TDBaseType {
tdbtName :: Name, -- ^ Name of the base type
tdSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos TypeDenoter where
srcPos = tdSrcPos
-} | nbrunt/JSHOP | src/old/ver2/AST.hs | mit | 13,397 | 20 | 8 | 4,720 | 1,190 | 724 | 466 | 190 | 0 |
module Temp where
import Data.List
fy x = ix^2 + ix
where ix = floor x
pascalTri x = x:pascalTri ([1] ++ m ++ [1])
where
m = zipWith (+) (init x) (tail x)
-- 1.a
null' [] = True
null' _ = False
--pembatas
take' n (x:xs)
| n == 0 = []
| n > 0 = [x] ++ take' (n-1) xs
take' _ [] = []
takeDropWhile f coll = iter coll []
where
iter [] temp = ((reverse temp),[])
iter (x:xs) temp
| f x = iter xs (x:temp)
| otherwise = ((reverse temp),x:xs)
--
group' [] = []
group' (x:xs) = (x:h) : (group' s)
where
(h,s) = takeDropWhile (x==) xs
---
unicorn x y = nub $ x ++ y
union'' [] [] = []
union'' (x:xs) [] = (x:xs)
union'' [] (y:ys) = (y:ys)
union'' (x:xs) (y:ys)
| y == temp y (x:xs) = union'' (x:xs) ys
| y /= temp y ((x:xs)) = union'' ((x:xs ++ [y])) ys
where temp y [] = 0
temp y (x:xs)
| y == x = y
| y /= x = temp y xs
| skadinyo/conc | haskell/temp.hs | epl-1.0 | 908 | 0 | 12 | 283 | 620 | 319 | 301 | 31 | 2 |
-- Copyright 2013 Gushcha Anton
-- This file is part of PowerCom.
--
-- PowerCom is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- PowerCom is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with PowerCom. If not, see <http://www.gnu.org/licenses/>.
module Channel.Frame (
Frame(..)
, FrameClass(..)
, prop_toByteString
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.UTF8 as UTF
import Data.Functor
import Data.Word
import Data.Binary.Strict.Get
import Data.Binary.Put
import Control.Monad
import Control.Applicative
import Test.QuickCheck
class (Eq a) => FrameClass a where
toByteString :: a -> BS.ByteString
fromByteString :: BS.ByteString -> (Either String a, BS.ByteString)
frameType :: Frame -> Word8
frameType frame = case frame of
InformationFrame _ _ -> 0x00
DataPartFrame _ -> 0x01
LinkFrame _ -> 0x02
UnlinkFrame _ -> 0x03
AckFrame -> 0x04
RetFrame -> 0x05
OptionFrame _ -> 0x06
Upcheck -> 0x07
data Frame = InformationFrame String Word32
| DataPartFrame String
| OptionFrame [(String, String)]
| LinkFrame String
| UnlinkFrame String
| AckFrame
| RetFrame
| Upcheck
deriving (Show, Eq)
instance Arbitrary Frame where
arbitrary = oneof [ InformationFrame <$> (arbitrary :: Gen String) <*> (arbitrary :: Gen Word32)
, DataPartFrame <$> (arbitrary :: Gen String)
, LinkFrame <$> (arbitrary :: Gen String)
, UnlinkFrame <$> (arbitrary :: Gen String)
, return AckFrame
, return RetFrame
, OptionFrame <$> (arbitrary :: Gen [(String, String)])
, return Upcheck]
shrink (OptionFrame os) = [OptionFrame nos | nos <- shrink os]
shrink _ = []
-- TODO: Move to binary class instead of custom
{-instance Binary Frame where
put = put . toByteString
get = do
(res, _) <- liftM fromByteString
case res of
Right frame -> return frame
Left err -> error err-}
int2word :: Int -> Word32
int2word = fromInteger . toInteger
word2int :: Word32 -> Int
word2int = fromInteger . toInteger
instance FrameClass Frame where
toByteString frame = BS.concat . BL.toChunks $ runPut $ case frame of
InformationFrame u n -> putBounded $ putMarkedString u >> putWord32be n
DataPartFrame s -> putBounded $ putMarkedString s
LinkFrame u -> putBounded $ putMarkedString u
UnlinkFrame u -> putBounded $ putMarkedString u
AckFrame -> putShort
RetFrame -> putShort
OptionFrame os -> putBounded $ putListLength os >> putOptions os
Upcheck -> putShort
where
putBegin = putWord8 (frameType frame)
putShort = putBegin
putListLength = putWord32be . int2word . length
putBSLength = putWord32be . int2word . BS.length
putMarkedString s = let bs = UTF.fromString s in putBSLength bs >> putByteString bs
putBounded m = putBegin >> m
putOptions = mapM_ (\(key,value) -> putMarkedString key >> putMarkedString value)
fromByteString = runGet parseFrame
where
parseFrame :: Get Frame
parseFrame = do
frameTypeId <- getWord8
case frameTypeId of
0x00 -> return InformationFrame `ap` parseMarkedString `ap` getWord32be
0x01 -> return DataPartFrame `ap` parseMarkedString
0x02 -> return LinkFrame `ap` parseMarkedString
0x03 -> return UnlinkFrame `ap` parseMarkedString
0x04 -> return AckFrame
0x05 -> return RetFrame
0x06 -> return OptionFrame `ap` parseKeyValue
0x07 -> return Upcheck
_ -> fail "Unknown frame type!"
parseMarkedString = do
len <- getWord32be
body <- getByteString $ word2int len
return $ UTF.toString body
parseKeyValue :: Get [(String, String)]
parseKeyValue = do
pairsCount <- getWord32be
mapM parsePair [1..pairsCount]
where
parsePair :: a -> Get (String, String)
parsePair _ = do
keyCount <- getWord32be
key <- getByteString $ word2int keyCount
valueCount <- getWord32be
value <- getByteString $ word2int valueCount
return (UTF.toString key, UTF.toString value)
-- Testing
prop_toByteString :: Frame -> Bool
prop_toByteString f = case fst $ fromByteString $ toByteString f of
Left _ -> False
Right v -> v == f | NCrashed/PowerCom | src/powercom/Channel/Frame.hs | gpl-3.0 | 6,323 | 0 | 15 | 2,670 | 1,176 | 618 | 558 | 101 | 8 |
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, RecordWildCards #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Traversable as T
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import System.Directory
import System.Environment
import System.FilePath ((</>))
import System.IO
import System.Log.Logger
--
import HEP.Automation.EventChain.Driver
import HEP.Automation.EventChain.File
import HEP.Automation.EventChain.LHEConn
import HEP.Automation.EventChain.Type.Skeleton
import HEP.Automation.EventChain.Type.Spec
import HEP.Automation.EventChain.Type.Process
import HEP.Automation.EventChain.SpecDSL
import HEP.Automation.EventChain.Simulator
import HEP.Automation.EventChain.Process
import HEP.Automation.EventChain.Process.Generator
import HEP.Automation.EventGeneration.Config
import HEP.Automation.EventGeneration.Type
import HEP.Automation.EventGeneration.Work
import HEP.Automation.MadGraph.Model.ADMXQLD111degen
import HEP.Automation.MadGraph.Run
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Type
import HEP.Parser.LHE.Type
import HEP.Parser.LHE.Sanitizer.Type
import HEP.Storage.WebDAV
--
import qualified Paths_madgraph_auto as PMadGraph
import qualified Paths_madgraph_auto_model as PModel
jets = [1,2,3,4,-1,-2,-3,-4,21]
leptons = [11,13,-11,-13]
lepplusneut = [11,12,13,14,-11,-12,-13,-14]
adms = [9000201,-9000201,9000202,-9000202]
sup = [1000002,-1000002]
sdownR = [2000001,-2000001]
p_sdownR :: DDecay
p_sdownR = d (sdownR, [t lepplusneut, t jets, t adms])
p_2sq_oo_2l2j2x :: DCross
p_2sq_oo_2l2j2x = x (t proton, t proton, [p_sdownR, p_sdownR])
idx_2sq_oo_2l2j2x :: CrossID ProcSmplIdx
idx_2sq_oo_2l2j2x = mkCrossIDIdx (mkDICross p_2sq_oo_2l2j2x)
map_2sq_oo_2l2j2x :: ProcSpecMap
map_2sq_oo_2l2j2x =
HM.fromList [(Nothing , MGProc [] [ "p p > dr dr~ QED=0"
, "p p > dr dr QED=0"
, "p p > dr~ dr~ QED=0"])
,(Just (3,-2000001,[]), MGProc [] [ "dr~ > u~ e+ sxxp~"
, "dr~ > d~ ve~ sxxp~" ])
,(Just (3,2000001,[]) , MGProc [] [ "dr > u e- sxxp"
, "dr > d ve sxxp" ])
,(Just (4,-2000001,[]), MGProc [] [ "dr~ > u~ e+ sxxp~ "
, "dr~ > d~ ve~ sxxp~ " ])
,(Just (4,2000001,[]) , MGProc [] [ "dr > u e- sxxp "
, "dr > d ve sxxp " ])
]
modelparam mgl msq msl mneut = ADMXQLD111degenParam mgl msq msl mneut
-- |
mgrunsetup :: Int -> RunSetup
mgrunsetup n =
RS { numevent = n
, machine = LHC8 ATLAS
, rgrun = Auto
, rgscale = 200.0
, match = NoMatch
, cut = NoCut
, pythia = RunPYTHIA
, lhesanitizer = LHESanitize (Replace [(9000201,1000022),(-9000201,1000022)])
, pgs = RunPGS (AntiKTJet 0.4,NoTau)
, uploadhep = NoUploadHEP
, setnum = 1
}
worksets = [ (mgl,msq,50000,50000, 10000) | mgl <- [100,200..2000], msq <- [100,200..2000] ]
main :: IO ()
main = do
args <- getArgs
let fp = args !! 0
n1 = read (args !! 1) :: Int
n2 = read (args !! 2) :: Int
-- fp <- (!! 0) <$> getArgs
updateGlobalLogger "MadGraphAuto" (setLevel DEBUG)
-- print (length worksets)
mapM_ (scanwork fp) (drop (n1-1) . take n2 $ worksets )
scanwork :: FilePath -> (Double,Double,Double,Double,Int) -> IO ()
scanwork fp (mgl,msq,msl,mneut,n) = do
homedir <- getHomeDirectory
getConfig fp >>=
maybe (return ()) (\ec -> do
let ssetup = evgen_scriptsetup ec
whost = evgen_webdavroot ec
pkey = evgen_privatekeyfile ec
pswd = evgen_passwordstore ec
Just cr <- getCredential pkey pswd
let wdavcfg = WebDAVConfig { webdav_credential = cr
, webdav_baseurl = whost }
param = modelparam mgl msq msl mneut
mgrs = mgrunsetup n
evchainGen ADMXQLD111degen
ssetup
("Work20130627_2sq_oo","2sq_oo_2l2j2x")
param
map_2sq_oo_2l2j2x p_2sq_oo_2l2j2x
mgrs
let wsetup' = getWorkSetupCombined ADMXQLD111degen ssetup param ("Work20130627_2sq_oo","2sq_oo_2l2j2x") mgrs
wsetup = wsetup' { ws_storage = WebDAVRemoteDir "montecarlo/admproject/XQLDdegen/8TeV/scan_2sq_oo_2l2j2x"}
putStrLn "phase2work start"
phase2work wsetup
putStrLn "phase3work start"
phase3work wdavcfg wsetup
)
phase2work :: WorkSetup ADMXQLD111degen -> IO ()
phase2work wsetup = do
r <- flip runReaderT wsetup . runErrorT $ do
ws <- ask
let (ssetup,psetup,param,rsetup) =
((,,,) <$> ws_ssetup <*> ws_psetup <*> ws_param <*> ws_rsetup) ws
cardPrepare
case (lhesanitizer rsetup,pythia rsetup) of
(NoLHESanitize,_) -> return ()
(LHESanitize pid, RunPYTHIA) -> do
sanitizeLHE
runPYTHIA
runPGS
runClean
(LHESanitize pid, NoPYTHIA) -> do
sanitizeLHE
cleanHepFiles
print r
return ()
-- |
phase3work :: WebDAVConfig -> WorkSetup ADMXQLD111degen -> IO ()
phase3work wdav wsetup = do
uploadEventFull NoUploadHEP wdav wsetup
return ()
| wavewave/lhc-analysis-collection | exe/2013-06-27-XQLD-2sq-oo.hs | gpl-3.0 | 5,789 | 0 | 20 | 1,710 | 1,556 | 883 | 673 | 136 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Database where
import Database.Persist.TH
import Data.Text (Text)
import Data.Word (Word8, Word64)
import Data.Time.Clock (UTCTime)
import Data.ByteString (ByteString)
share [mkPersist sqlSettings { mpsGenerateLenses = True }, mkMigrate "fileIndex"] [persistLowerCase|
File
name Text
location Text
size Word64
accessTime UTCTime
modTime UTCTime
userPer Word8
groupPer Word8
otherPer Word8
UniquePath location name
deriving Show
Job
file FileId
ctx ByteString
consumed Int
deriving Show
Hash
file FileId
hash Text
deriving Show
|]
| steffenomak/file-indexer | src/Database.hs | gpl-3.0 | 894 | 0 | 9 | 220 | 93 | 59 | 34 | 14 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveFunctor #-}
-- |Module permettant de faire la gestions des closures
module Closure where
import LambdaCalculus
import Data.List
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
newtype Environment a = Environment [Variable a] deriving (Show, Monoid, Functor, Eq)
instance Pretty a => Pretty (Environment a) where
pretty (Environment xs) = cat $ punctuate comma $ fmap pretty xs
-- | Ajoute une variable a un environment
addEnv :: Variable a -> Environment a -> Environment a
addEnv x (Environment xs) = Environment (x:xs)
-- | Expression ayant des closures (est récursif, car les closures peuvent elles même avoir des expressions avec closure)
data ExprEval = ExprEval ExprEnv deriving (Show, Eq)
type ExprEnv = Expr (Environment (Maybe ExprEval))
instance Pretty ExprEval where
pretty (ExprEval a) = pretty a
toEvalType :: Expr (a, Environment a) -> ExprEnv
toEvalType = fmap (fmap (const Nothing) . snd)
-- | Update une valeur de l'environment
updateEnvValue :: [Char] -> t -> Environment t -> Environment t
updateEnvValue n v (Environment env) = Environment (replace env)
where
replace [] = []
replace (x:xs)
|n == getName x = fmap (const v) x : xs
|otherwise = x : replace xs
-- | Obtient la valeur d'un environment
getEnvValue :: String -> Environment b -> Maybe b
getEnvValue n (Environment env) = getValue <$> find (\v -> getName v == n) env
includeEnv :: Functor f => t1 -> f t -> f (t, t1)
includeEnv env = fmap (\x -> (x, env))
-- | Ajoute un environment a une expression
closure :: Environment a -> Expr a -> Expr (a, Environment a)
closure env (LC (Var a)) = LC $ Var $ includeEnv env a
closure env (LC (Apply expr1 expr2)) = LC $ Apply (closure env expr1) (closure env expr2)
closure env (LC (Lambda x expr)) = LC $ Lambda (includeEnv env x) (closure (addEnv x env) expr)
closure _ (Constant l t) = Constant l (t, mempty)
closure env (Fix n expr) = Fix n (closure env expr)
closure env (If c a b) = If (closure env c) (closure env a) (closure env b)
-- | Ajoute un environment a une expression, pas d'environment initial
addClosure :: Expr a -> Expr (a, Environment a)
addClosure = closure mempty
toVar :: String -> Named ()
toVar x = Named x () | bruno-cadorette/Baeta-Compiler | src/Closure.hs | gpl-3.0 | 2,324 | 0 | 11 | 487 | 866 | 439 | 427 | 38 | 2 |
----------------------------------------------------------------------------------
-- |
-- Module : Tct.Method.Bounds.Violations.Find
-- Copyright : (c) Martin Avanzini <[email protected]>,
-- Georg Moser <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- License : LGPL (see COPYING)
-- Maintainer : Martin Avanzini <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module implements search for compatibility violations, as employed in
-- the bound processor.
----------------------------------------------------------------------------------
module Tct.Method.Bounds.Violations.Find where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Control.Monad (foldM, liftM, filterM)
import Termlib.Utils
import qualified Termlib.Rule as R
import Termlib.Term (Term(..), root, variables)
import qualified Termlib.Term as T
import Termlib.Rule (Strictness(..))
import qualified Termlib.Variable as Var
import Tct.Method.Bounds.Automata
decidingLabel :: Enrichment -> Strictness -> WeakBoundedness -> Label -> R.Rule -> LTerm -> Label
decidingLabel e str wb ml (R.Rule lhs rhs) = case e of {Top -> mtop; Match -> case str of {StrictRule -> mmatch; WeakRule -> mmatchrt}; Roof -> mroof}
where mtop (F (_,l) _) = l
mtop (S _) = error "Bounds.decidingLabel.mtop: cannot determine label from state"
mmatch (F (_,l) ts) = foldl min l [ mmatch ti | ti <- ts, isFun ti]
where isFun (F _ _) = True
isFun _ = False
mmatch (S _) = error "Bounds.decidingLabel.mmatch: cannot determine label from state"
mmatchrt t@(F (_,l) _) = if rtApplicable then applyMaxLabel (l - 1) else applyMaxLabel (mmatch t)
where rtApplicable = (T.size lhs >= T.size rhs) && equalLabels t
equalLabels (F (_,l') ts) = l == l' && all equalLabels ts
equalLabels (S _) = True
applyMaxLabel lab = case wb of
WeakMayExceedBound -> lab
WeakMayNotExceedBound -> min (ml - 1) lab
mmatchrt (S _) = error "Bounds.decidingLabel.mmatchrt: cannot determine label from state"
mroof llhs = mroof' lhs llhs
where mroof' (Fun _ ts) (F (_,l) lts) = foldl min l [ mroof' ti lti | (ti,lti) <- zip ts lts, isRoof ti]
mroof' s t = error $ "Bounds.decidingLabel.mroof': called with strange arguments " ++ show (pprint s) ++ " and " ++ show (pprint t)
isRoof (Var _) = False
isRoof u = rvars `Set.isSubsetOf` variables u
rvars = variables rhs
reachableFromLifted :: Automaton -> Term -> Set.Set State -> Set.Set (LTerm, State)
reachableFromLifted a t qs = runMemoAction reachableFromLiftedM
where t' = identifyVars t
reachableFromLiftedM = foldM (\ r q ->
do lterms <- reachLiftS (t',q)
return $ r `Set.union` Set.map (\ lt -> (lt,q)) lterms)
Set.empty $ Set.toList qs
reachLiftS (Var _, q) = return $ Set.singleton $ S q
reachLiftS s@(Fun f ts, q) = memo (s,q) $
(foldM (\ lterms (l,args) ->
Set.union lterms `liftM` labeledSubterms (f,l) (zip ts args))
Set.empty [(l, args) | (l,argss) <- bstepUL a f q , args <- Set.toList argss])
labeledSubterms fl subproblems = do ltis <- mapM reachLiftS subproblems
return $ Set.fromList $ [F fl lts | lts <- listProduct $ map Set.toList ltis]
identifyVars (Var _) = Var (Var.canonical 0)
identifyVars (Fun f ts) = Fun f $ map identifyVars ts
reaches :: Automaton -> LTerm -> State -> MemoAction (LTerm,State) Bool Bool
reaches _ (S p) q | p == q = return $ True
| otherwise = return $ False
reaches a (F fl lts) q2 = foldM f False (Set.toList $ bstep a fl q2)
where f True _ = return True
f False arg = foldM g True (zip lts arg)
g False _ = return False
g True (lti,qi) = memo (lti,qi) $ reaches a lti qi
findViolations :: Automaton -> Enrichment -> Strictness -> WeakBoundedness -> Label -> R.Rule -> Set.Set (LTerm, State)
findViolations a e str wb ml rule = Set.fromList $ runMemoAction $ filterM (\ (lt, q) -> not `liftM` candidateRejected lt q) candidates
where candidates = snub [ (labeledRhs labeledLhs, q) | (labeledLhs, q) <- Set.toList $ reachableFromLifted a l qs]
candidateRejected lt q = ifM (reaches a lt q) (return True) (isTrivialEpsilon lt q)
isTrivialEpsilon (F _ _) _ = return False
isTrivialEpsilon (S p) q = return (a == insert (Epsilon p q) a)
rt = case root l of Right f' -> f'; Left _ -> error "Bounds.violations: Encountered variable on left-hand side"
qs = Set.unions [qs' | (_,_,qs') <- rulesDefiningUL a rt]
l = R.lhs rule
r = R.rhs rule
labeledRhs labeledLhs = computeRhs (subst l labeledLhs) r
where newLabel = (decidingLabel e str wb ml rule labeledLhs + 1)
subst (Var v) (S s) = Map.singleton v s
subst (Fun _ ts) (F _ lts) = Map.unions [subst ti lti | (ti,lti) <- zip ts lts]
subst _ _ = error "Bounds.violations: labeled term does not match left-hand side"
computeRhs s (Var v) = S $ fromMaybe (error "Variables of right-hand side not included in variables of left-hand side") (Map.lookup v s)
computeRhs s (Fun f ts) = F (f,newLabel) [computeRhs s ti | ti <- ts]
| mzini/TcT | source/Tct/Method/Bounds/Violations/Find.hs | gpl-3.0 | 6,279 | 0 | 17 | 2,130 | 1,932 | 1,007 | 925 | 75 | 13 |
-- Too slow. Turns out there's too much overhead when compiling this to
-- JavaScript with Haste.
import Control.Concurrent.MVar
import Control.Applicative
import Control.Monad
import Data.Functor
import Data.List
import Data.Ord
import Haste
import Haste.Ajax
import Haste.Graphics.Canvas
sz = 4; lim = sz * 28
zLayer as (bs, wvs) = zipWith (+) bs $ sum . zipWith (*) as <$> wvs
feed = foldl' (((max 0 <$>) . ) . zLayer)
think ns brain = let xs = ((/ 256) . fromIntegral) <$> ns in
fst . maximumBy (comparing snd) . zip [0..] $ feed xs brain
main = withElems ["canvas", "message", "clearButton", "goButton"] $ \elems -> do
textRequest GET "85.txt" [] $ go elems
darken (xs, x, y) = let (as, b:bs) = splitAt (x + 28*y) xs in
(as ++ (min 255 (b + 255):bs), x, y)
go [cElem, message, clearButton, goButton] (Just braintxt) = do
void $ setProp message "innerHTML" $ "version qux"
Just canvas <- getCanvas cElem
xVar <- newMVar (replicate 784 0, 0, 0)
penVar <- newMVar False
let
brain = read braintxt :: [([Float], [[Float]])]
box n (y, x) = let m = 255 - n in color (RGB m m m) $ fill $ rect (fromIntegral (x*sz), fromIntegral (y*sz)) (fromIntegral (x*sz + sz), fromIntegral (y*sz + sz))
update = do
(xs, _, _) <- readMVar xVar
render canvas $ zipWithM box xs $ (,) <$> [0..27] <*> [0..27]
return ()
penCheck = do
pen <- readMVar penVar
when pen $ do
(xs, x, y) <- takeMVar xVar
putMVar xVar $ darken (xs, x, y)
setTimeout 50 $ penCheck
guess = do
(xs, _, _) <- readMVar xVar
setTimeout 1 $ void $ setProp message "innerHTML" $ show $ think xs brain
_ <- cElem `onEvent` OnMouseDown $ \_ (x, y) -> do
orig@(xs, _, _) <- takeMVar xVar
if x < lim && y < lim then do
putMVar xVar $ darken (xs, x `div` sz, y `div` sz)
swapMVar penVar True
update
else putMVar xVar orig
_ <- cElem `onEvent` OnMouseUp $ \_ _ -> void $ swapMVar penVar False
_ <- cElem `onEvent` OnMouseOut $ void $ swapMVar penVar False
_ <- cElem `onEvent` OnMouseMove $ \(x, y) -> do
pen <- readMVar penVar
when (pen && x < lim && y < lim) $ do
(xs, _, _) <- takeMVar xVar
putMVar xVar $ darken (xs, x `div` sz, y `div` sz)
update
_ <- clearButton `onEvent` OnClick $ \_ _ ->
swapMVar xVar (replicate 784 0, 0, 0) >> update
_ <- goButton `onEvent` OnClick $ \_ _ -> guess
penCheck
| blynn/morans | tooslow.hs | gpl-3.0 | 2,436 | 0 | 18 | 623 | 1,160 | 604 | 556 | 58 | 2 |
-- Run-length encoding of a list. Use the result of problem P09 to implement the
-- so-called run-length encoding data compression method. Consecutive duplicates
-- of elements are encoded as lists (N E) where N is the number of duplicates of
-- the element E.
----- From Ex09
pack :: (Eq a) => [a] -> [[a]]
computeTail :: (Eq a) => a -> [a] -> Int -> (Int, [a])
computeTail a [] n = (n, [])
computeTail a (x : xs) n = if a == x
then computeTail a xs (n + 1)
else (n, (x : xs))
pack [] = []
pack (x : xs) = (replicate n x) : (pack tailNoDup)
where (n, tailNoDup) = computeTail x xs 1
----- Ex10
encode :: (Eq a) => [a] -> [(Int, a)]
encode l = map (\ l -> (length l, head l)) (pack l)
| dannywillems/99-problems | haskell/p10.hs | gpl-3.0 | 760 | 0 | 10 | 221 | 287 | 160 | 127 | 11 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Test.Gophermap (gophermapTests) where
import Control.Monad (forM_)
import Data.Attoparsec.ByteString (parseOnly)
import qualified Data.ByteString as B
import Data.Either
import Network.Gopher (GopherFileType (..))
import Network.Gopher.Util (stripNewline)
import Network.Gopher.Util.Gophermap
import System.FilePath.Posix.ByteString (RawFilePath)
import Test.Tasty
import Test.Tasty.HUnit
withFileContents :: FilePath -> (IO B.ByteString -> TestTree) -> TestTree
withFileContents path = withResource (B.readFile path) (const (pure ()))
gophermapTests :: TestTree
gophermapTests = testGroup "gophermap tests"
[ withFileContents "test/data/pygopherd.gophermap" checkPygopherd
, withFileContents "test/data/bucktooth.gophermap" checkBucktooth
, generalGophermapParsing
]
checkPygopherd :: IO B.ByteString -> TestTree
checkPygopherd file = testCase "pygopherd example gophermap" $
file >>= assertEqual "" (Right expectedPygopherd) . parseOnly parseGophermap
infoLine :: B.ByteString -> GophermapEntry
infoLine b = GophermapEntry InfoLine b Nothing Nothing Nothing
absDir :: B.ByteString -> RawFilePath -> B.ByteString -> GophermapEntry
absDir n p s =
GophermapEntry Directory n (Just (GophermapAbsolute p)) (Just s) $ Just 70
expectedPygopherd :: Gophermap
expectedPygopherd =
[ infoLine "Welcome to Pygopherd! You can place your documents"
, infoLine "in /var/gopher for future use. You can remove the gophermap"
, infoLine "file there to get rid of this message, or you can edit it to"
, infoLine "use other things. (You'll need to do at least one of these"
, infoLine "two things in order to get your own data to show up!)"
, infoLine ""
, infoLine "Some links to get you started:"
, infoLine ""
, absDir "Pygopherd Home" "/devel/gopher/pygopherd" "gopher.quux.org"
, absDir "Quux.Org Mega Server" "/" "gopher.quux.org"
, absDir "The Gopher Project" "/Software/Gopher" "gopher.quux.org"
, absDir "Traditional UMN Home Gopher" "/" "gopher.tc.umn.edu"
, infoLine ""
, infoLine "Welcome to the world of Gopher and enjoy!"
]
checkBucktooth :: IO B.ByteString -> TestTree
checkBucktooth file = testCase "bucktooth example gophermap" $ do
parseResult <- parseOnly parseGophermap <$> file
assertBool "no parse failure" $ isRight parseResult
-- check if we can distinguish between text/infolines and
-- gophermap lines which have no path
assertEqual "overbite link is parsed correctly" [expectedOverbiteEntry]
. filter (\(GophermapEntry _ n _ _ _) -> n == "/overbite")
$ fromRight [] parseResult
assertEqual "correct length" 95 . length $ fromRight [] parseResult
expectedOverbiteEntry :: GophermapEntry
expectedOverbiteEntry =
GophermapEntry Directory "/overbite" Nothing Nothing Nothing
generalGophermapParsing :: TestTree
generalGophermapParsing = testGroup "gophermap entry test cases" $
let lineEqual :: B.ByteString -> GophermapEntry -> Assertion
lineEqual b e = assertEqual (show b) (Right [e]) $
parseOnly parseGophermap b
infoLines =
[ "1. beginning with valid file type\n"
, "just some usual text.\n"
, "ends with end of input"
, "i'm blue"
, "0"
, "empty ones need to be terminated by a new line\n"
, "\n"
, "otherwise parsing doesn't make sense anymore"
, "DOS-style newlines are also allowed\r\n"
]
menuEntry t name path =
GophermapEntry t name (Just path) Nothing Nothing
menuLines =
[ ("1/somedir\t", GophermapEntry Directory "/somedir" Nothing Nothing Nothing)
, ("0file\tfile.txt\n", menuEntry File "file" (GophermapRelative "file.txt"))
, ("ggif\t/pic.gif", menuEntry GifFile "gif" (GophermapAbsolute "/pic.gif"))
, ("hcode\tURL:https://code.sterni.lv\n", menuEntry Html "code" (GophermapUrl "URL:https://code.sterni.lv"))
, ("1foo\tfoo\tsterni.lv", GophermapEntry Directory "foo" (Just $ GophermapRelative "foo") (Just "sterni.lv") Nothing)
, ("Ibar\t/bar.png\tsterni.lv\t7070\n", GophermapEntry ImageFile "bar" (Just $ GophermapAbsolute "/bar.png") (Just "sterni.lv") (Just 7070))
, ("imanual info line\t", infoLine "manual info line")
]
in [ testCase "info lines" $ forM_ infoLines (\l -> lineEqual l $ infoLine (stripNewline l))
, testCase "menu entries" $ forM_ menuLines (uncurry lineEqual) ]
| sternenseemann/spacecookie | test/Test/Gophermap.hs | gpl-3.0 | 4,425 | 0 | 16 | 814 | 989 | 517 | 472 | 81 | 1 |
{-# LANGUAGE Arrows #-}
module FRP.Chimera.Misc.YampaSeqTest where
import Data.Maybe
import Debug.Trace
import FRP.Yampa
import FRP.Yampa.InternalCore
import FRP.Chimera.Simulation.SeqIteration
import FRP.Chimera.Simulation.ParIteration
type TestInput = Int
type TestOutput = Double
testRunSF :: IO ()
testRunSF = do
let sf = simpleSF 42
let i = 1 :: TestInput
let (sf', o) = runAndFreezeSF sf i 2.0
putStrLn $ show o
let i' = outputToNewInput i o
let (sf'', o') = runAndFreezeSF sf i' 2.0
putStrLn $ show o'
return ()
------------------------------------------------------------------------------------------------------------------------
-- PAR Test
------------------------------------------------------------------------------------------------------------------------
testParEmbed:: IO ()
testParEmbed = do
let oos = embed (runParSF sfs testParCallback) (is, sts)
-- putStrLn $ show (length oos)
let os = (last oos)
mapM (putStrLn . show) os
return ()
where
n = 3
sfs = map simpleSF [0..n-1]
is = [0..n-1]
steps = 3
dt = 1.0
sts = replicate steps (dt, Nothing)
testParCallback :: [TestInput]
-> [TestOutput]
-> [SF TestInput TestOutput]
-> ([SF TestInput TestOutput], [TestInput])
testParCallback oldIns newOuts allSfs = (allSfs, newIns)
where
newIns = testParCallback' oldIns newOuts
testParCallback' :: [TestInput] -> [TestOutput] -> [TestInput]
testParCallback' [] [] = []
testParCallback' (i:is) (o:os) = newIn : testParCallback' is os
where
newIn = outputToNewInput i o
------------------------------------------------------------------------------------------------------------------------
------------------------------------------------------------------------------------------------------------------------
-- SEQ Test
------------------------------------------------------------------------------------------------------------------------
testSeqEmbed:: IO ()
testSeqEmbed = do
let oos = embed (runSeqSF sfs testSeqCallback testIterCallback) (is, sts)
-- putStrLn $ show (length oos)
let os = (last oos)
mapM (putStrLn . show) os
return ()
where
n = 3
sfs = map simpleSF [0..n-1]
is = [0..n-1]
steps = 2
dt = 1.0
sts = replicate steps (dt, Nothing)
testIterCallback :: [TestOutput] -> ([SF TestInput TestOutput], [TestInput])
testIterCallback allOuts = ([], [])
-- NOTE: this callback feeds in all the inputs and the current working triple: SF, Inpout and Output
-- It allows to change the inputs of future SFs and may return the SF. if it doesnt return a SF this means it is deleted from the system
testSeqCallback :: [TestInput] -- the existing inputs
-> (SF TestInput TestOutput, TestInput, TestOutput) -- the current working triple
-> ([TestInput],
Maybe (SF TestInput TestOutput, TestInput) ) -- optionally returns a sf-continuation for the current, can return new signal-functions and changed testinputs
testSeqCallback allIns (sf, oldIn, newOut) = (allIs', maySfIn)
where
allIs' = map (\i' -> i' + (truncate $ realToFrac newOut)) allIns -- distribute the current output to the new inputs
newIn = outputToNewInput oldIn newOut
maySfIn = Just (sf, newIn)
------------------------------------------------------------------------------------------------------------------------
outputToNewInput :: TestInput -> TestOutput -> TestInput
outputToNewInput oldIn newOut = truncate $ realToFrac newOut
simpleSF :: Int -> SF TestInput TestOutput
simpleSF off = proc i -> do
t <- time -< 0
let i' = trace ("time = " ++ (show t)) (i + off)
returnA -< fromInteger $ toInteger i' | thalerjonathan/phd | coding/papers/FrABS/Haskell/YampaTests/YampaSeqTest.hs | gpl-3.0 | 3,790 | 1 | 15 | 754 | 957 | 512 | 445 | 71 | 2 |
module Bluesnap.Test.Parser where
import Test.Tasty.HUnit
import Bluesnap.API.Parser
import Bluesnap.API.Response as Response
import Bluesnap.API.Request as Request
testXMLParser p fname = do
content <- readFile fname
let result = parse p content
case result of
Left err -> assertFailure err
Right _ -> return ()
testOrderResponseParser = testXMLParser Response.elementOrder
testOrderRequestParser = testXMLParser Request.elementOrder
testBatchOrderReqParser = testXMLParser Request.elementBatch_order
testBatchOrderRspParser = testXMLParser Response.elementBatch_order
testFulfillmentRspParser = testXMLParser Response.elementFulfillment
testOrderHistoryRspParser = testXMLParser Response.elementOrder_history
testSubscriptionReqParser = testXMLParser Request.elementSubscription
testSubscriptionRspParser = testXMLParser Response.elementSubscription
testSubscriptionChargeReqParser = testXMLParser Request.elementSubscription_charge
testSubscriptionChargeRspParser = testXMLParser Response.elementSubscription_charge
testShopperSubscriptionsRspParser = testXMLParser Response.elementShopper_subscriptions
testShoppingContextReqParser = testXMLParser Request.elementShopping_context
testShoppingContextRspParser = testXMLParser Response.elementShopping_context
testItemPriceRspParser = testXMLParser Response.elementItem_price
testProductReqParser = testXMLParser Request.elementProduct
testProductRspParser = testXMLParser Response.elementProduct
testCatalogSkuReqParser = testXMLParser Request.elementCatalog_sku
testCatalogSkuRspParser = testXMLParser Response.elementCatalog_sku
testCustomParameterReqParser = testXMLParser Request.elementCustom_parameter
testCustomParameterRspParser = testXMLParser Response.elementCustom_parameter
testParamDecryptionReqParser = testXMLParser Request.elementParam_decryption
testParamDecryptionRspParser = testXMLParser Response.elementParam_decryption
testParamEncryptionReqParser = testXMLParser Request.elementParam_encryption
testParamEncryptionRspParser = testXMLParser Response.elementParam_encryption
testCouponInfoRspParer = testXMLParser Response.elementCoupon_info
testPriceRspParser = testXMLParser Response.elementPrice
testWebAuthenticationRspParser = testXMLParser Response.elementWeb_authentication
| andorp/hs-bluesnap | test/Bluesnap/Test/Parser.hs | gpl-3.0 | 2,289 | 0 | 11 | 185 | 372 | 189 | 183 | 38 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Spanner.Projects.Instances.BackupOperations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the backup long-running operations in the given instance. A backup
-- operation has a name of the form
-- \`projects\/\/instances\/\/backups\/\/operations\/\`. The long-running
-- operation metadata field type \`metadata.type_url\` describes the type
-- of the metadata. Operations returned include those that have
-- completed\/failed\/canceled within the last 7 days, and pending
-- operations. Operations returned are ordered by
-- \`operation.metadata.value.progress.start_time\` in descending order
-- starting from the most recently started operation.
--
-- /See:/ <https://cloud.google.com/spanner/ Cloud Spanner API Reference> for @spanner.projects.instances.backupOperations.list@.
module Network.Google.Resource.Spanner.Projects.Instances.BackupOperations.List
(
-- * REST Resource
ProjectsInstancesBackupOperationsListResource
-- * Creating a Request
, projectsInstancesBackupOperationsList
, ProjectsInstancesBackupOperationsList
-- * Request Lenses
, piboliParent
, piboliXgafv
, piboliUploadProtocol
, piboliAccessToken
, piboliUploadType
, piboliFilter
, piboliPageToken
, piboliPageSize
, piboliCallback
) where
import Network.Google.Prelude
import Network.Google.Spanner.Types
-- | A resource alias for @spanner.projects.instances.backupOperations.list@ method which the
-- 'ProjectsInstancesBackupOperationsList' request conforms to.
type ProjectsInstancesBackupOperationsListResource =
"v1" :>
Capture "parent" Text :>
"backupOperations" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListBackupOperationsResponse
-- | Lists the backup long-running operations in the given instance. A backup
-- operation has a name of the form
-- \`projects\/\/instances\/\/backups\/\/operations\/\`. The long-running
-- operation metadata field type \`metadata.type_url\` describes the type
-- of the metadata. Operations returned include those that have
-- completed\/failed\/canceled within the last 7 days, and pending
-- operations. Operations returned are ordered by
-- \`operation.metadata.value.progress.start_time\` in descending order
-- starting from the most recently started operation.
--
-- /See:/ 'projectsInstancesBackupOperationsList' smart constructor.
data ProjectsInstancesBackupOperationsList =
ProjectsInstancesBackupOperationsList'
{ _piboliParent :: !Text
, _piboliXgafv :: !(Maybe Xgafv)
, _piboliUploadProtocol :: !(Maybe Text)
, _piboliAccessToken :: !(Maybe Text)
, _piboliUploadType :: !(Maybe Text)
, _piboliFilter :: !(Maybe Text)
, _piboliPageToken :: !(Maybe Text)
, _piboliPageSize :: !(Maybe (Textual Int32))
, _piboliCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesBackupOperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'piboliParent'
--
-- * 'piboliXgafv'
--
-- * 'piboliUploadProtocol'
--
-- * 'piboliAccessToken'
--
-- * 'piboliUploadType'
--
-- * 'piboliFilter'
--
-- * 'piboliPageToken'
--
-- * 'piboliPageSize'
--
-- * 'piboliCallback'
projectsInstancesBackupOperationsList
:: Text -- ^ 'piboliParent'
-> ProjectsInstancesBackupOperationsList
projectsInstancesBackupOperationsList pPiboliParent_ =
ProjectsInstancesBackupOperationsList'
{ _piboliParent = pPiboliParent_
, _piboliXgafv = Nothing
, _piboliUploadProtocol = Nothing
, _piboliAccessToken = Nothing
, _piboliUploadType = Nothing
, _piboliFilter = Nothing
, _piboliPageToken = Nothing
, _piboliPageSize = Nothing
, _piboliCallback = Nothing
}
-- | Required. The instance of the backup operations. Values are of the form
-- \`projects\/\/instances\/\`.
piboliParent :: Lens' ProjectsInstancesBackupOperationsList Text
piboliParent
= lens _piboliParent (\ s a -> s{_piboliParent = a})
-- | V1 error format.
piboliXgafv :: Lens' ProjectsInstancesBackupOperationsList (Maybe Xgafv)
piboliXgafv
= lens _piboliXgafv (\ s a -> s{_piboliXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
piboliUploadProtocol :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliUploadProtocol
= lens _piboliUploadProtocol
(\ s a -> s{_piboliUploadProtocol = a})
-- | OAuth access token.
piboliAccessToken :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliAccessToken
= lens _piboliAccessToken
(\ s a -> s{_piboliAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
piboliUploadType :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliUploadType
= lens _piboliUploadType
(\ s a -> s{_piboliUploadType = a})
-- | An expression that filters the list of returned backup operations. A
-- filter expression consists of a field name, a comparison operator, and a
-- value for filtering. The value must be a string, a number, or a boolean.
-- The comparison operator must be one of: \`\<\`, \`>\`, \`\<=\`, \`>=\`,
-- \`!=\`, \`=\`, or \`:\`. Colon \`:\` is the contains operator. Filter
-- rules are not case sensitive. The following fields in the operation are
-- eligible for filtering: * \`name\` - The name of the long-running
-- operation * \`done\` - False if the operation is in progress, else true.
-- * \`metadata.\'type\` - the type of metadata. For example, the type
-- string for CreateBackupMetadata is
-- \`type.googleapis.com\/google.spanner.admin.database.v1.CreateBackupMetadata\`.
-- * \`metadata.\` - any field in metadata.value. * \`error\` - Error
-- associated with the long-running operation. * \`response.\'type\` - the
-- type of response. * \`response.\` - any field in response.value. You can
-- combine multiple expressions by enclosing each expression in
-- parentheses. By default, expressions are combined with AND logic, but
-- you can specify AND, OR, and NOT logic explicitly. Here are a few
-- examples: * \`done:true\` - The operation is complete. *
-- \`metadata.database:prod\` - The database the backup was taken from has
-- a name containing the string \"prod\". *
-- \`(metadata.\'type=type.googleapis.com\/google.spanner.admin.database.v1.CreateBackupMetadata)
-- AND\` \\ \`(metadata.name:howl) AND\` \\ \`(metadata.progress.start_time
-- \< \\\"2018-03-28T14:50:00Z\\\") AND\` \\ \`(error:*)\` - Returns
-- operations where: * The operation\'s metadata type is
-- CreateBackupMetadata. * The backup name contains the string \"howl\". *
-- The operation started before 2018-03-28T14:50:00Z. * The operation
-- resulted in an error.
piboliFilter :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliFilter
= lens _piboliFilter (\ s a -> s{_piboliFilter = a})
-- | If non-empty, \`page_token\` should contain a next_page_token from a
-- previous ListBackupOperationsResponse to the same \`parent\` and with
-- the same \`filter\`.
piboliPageToken :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliPageToken
= lens _piboliPageToken
(\ s a -> s{_piboliPageToken = a})
-- | Number of operations to be returned in the response. If 0 or less,
-- defaults to the server\'s maximum allowed page size.
piboliPageSize :: Lens' ProjectsInstancesBackupOperationsList (Maybe Int32)
piboliPageSize
= lens _piboliPageSize
(\ s a -> s{_piboliPageSize = a})
. mapping _Coerce
-- | JSONP
piboliCallback :: Lens' ProjectsInstancesBackupOperationsList (Maybe Text)
piboliCallback
= lens _piboliCallback
(\ s a -> s{_piboliCallback = a})
instance GoogleRequest
ProjectsInstancesBackupOperationsList
where
type Rs ProjectsInstancesBackupOperationsList =
ListBackupOperationsResponse
type Scopes ProjectsInstancesBackupOperationsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/spanner.admin"]
requestClient
ProjectsInstancesBackupOperationsList'{..}
= go _piboliParent _piboliXgafv _piboliUploadProtocol
_piboliAccessToken
_piboliUploadType
_piboliFilter
_piboliPageToken
_piboliPageSize
_piboliCallback
(Just AltJSON)
spannerService
where go
= buildClient
(Proxy ::
Proxy ProjectsInstancesBackupOperationsListResource)
mempty
| brendanhay/gogol | gogol-spanner/gen/Network/Google/Resource/Spanner/Projects/Instances/BackupOperations/List.hs | mpl-2.0 | 9,801 | 0 | 19 | 1,924 | 1,008 | 601 | 407 | 143 | 1 |
-- | Qualification Round Africa 2010 B
-- https://code.google.com/codejam/contest/351101/dashboard#s=p1
module ReverseWords where
-- constant imports
import Text.ParserCombinators.Parsec
import Text.Parsec
import Control.Exception (bracket)
import System.IO (openFile, hClose, hGetContents, hPutStrLn, IOMode(ReadMode), stderr)
import Debug.Trace (trace)
-- variable imports
import Data.List (sort)
-- variable Data
data TestCase = TestCase
[String] -- ^ list of words
deriving (Show, Eq, Ord)
-- variable implementation
solveCase :: TestCase -> String
solveCase c@(TestCase ws) = unwords $ reverse ws
-- Parser (variable part)
parseSingleCase = do
words <- parseWords
eol
return $ TestCase words
--
-- constant part
--
-- Parsing (constant part)
-- | First number is number of test cases
data TestInput = TestInput
Int -- ^ number of 'TestCase's
[TestCase]
deriving (Show, Ord, Eq)
eol :: GenParser Char st Char
eol = char '\n'
parseInt :: GenParser Char st Int
parseInt = (read :: String -> Int) <$> many1 digit
parseInts :: GenParser Char st [Int]
parseInts = parseInt `sepBy` (char ' ')
parseWords :: GenParser Char st [String]
parseWords = parseWord `sepBy` (char ' ')
parseWord :: GenParser Char st String
parseWord = many1 (oneOf ['a'..'z'])
parseTestCases = do
numCases <- parseInt
eol
cases <- count numCases parseSingleCase
return $ TestInput numCases cases
parseCases :: String -> Either ParseError TestInput
parseCases contents = parse parseTestCases "(stdin)" contents
-- main
runOnContent :: String -> IO ()
runOnContent content = do
let parsed = parseCases content
case parsed of
Right (TestInput _ cases) -> mapM_ putStrLn (output (solveCases cases))
Left err -> hPutStrLn stderr $ show err
where
solveCases xs = map solveCase xs
consCase n s = "Case #" ++ (show n) ++ ": " ++ s
output xs = zipWith consCase [1..] xs
-- | command line implementation
run = do
cs <- getContents
runOnContent cs
main = run
| dirkz/google-code-jam-haskell | practice/src/ReverseWords.hs | mpl-2.0 | 2,052 | 0 | 14 | 428 | 599 | 317 | 282 | 50 | 2 |
module Main where
import GitHub.Hook
import Data.Aeson (decode)
import Data.ByteString.Lazy.Char8 (pack)
import Data.Maybe
import System.Exit
import System.Directory
parsePayload :: FilePath -> IO Bool
parsePayload "." = return True
parsePayload ".." = return True
parsePayload file = do
contents <- readFile $ "./tests/fixtures/" ++ file
case (decode (pack contents) :: Maybe Payload) of
Nothing -> putStrLn ("Failed to parse " ++ file) >> return False
otherwise -> return True
main :: IO ()
main = do
files <- getDirectoryContents "./tests/fixtures/"
results <- mapM parsePayload files
if and results
then exitWith ExitSuccess
else exitWith (ExitFailure 1)
| wereHamster/haskell-github-hook | tests/Tests.hs | unlicense | 722 | 0 | 13 | 157 | 223 | 112 | 111 | 22 | 2 |
module Chapter17 where
import Data.List (elemIndex)
added :: Maybe Integer
added = (+3) <$> (lookup 3 $ zip [1, 2, 3] [4, 5, 6])
y :: Maybe Integer
y = lookup 3 $ zip [1, 2, 3] [4, 5, 6]
z :: Maybe Integer
z = lookup 2 $ zip [1, 2, 3] [4, 5, 6]
tupled :: Maybe (Integer, Integer)
tupled = (,) <$> y <*> z
x' :: Maybe Int
x' = elemIndex 3 [1, 2, 3, 4, 5]
y' :: Maybe Int
y' = elemIndex 4 [1, 2, 3, 4, 5]
max' :: Int -> Int -> Int
max' = max
maxed :: Maybe Int
maxed = max' <$> x' <*> y'
xs = [1, 2, 3]
ys = [4, 5, 6]
x'' :: Maybe Integer
x'' = lookup 3 $ zip xs ys
y'' :: Maybe Integer
y'' = lookup 2 $ zip xs ys
summed :: Maybe Integer
summed = fmap sum $ (,) <$> x'' <*> y''
| prt2121/haskell-practice | ch6-11-17-25/src/Chapter17.hs | apache-2.0 | 689 | 0 | 9 | 176 | 398 | 222 | 176 | 26 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Kernel.GPU.Hogbom ( cleanPrepare, cleanKernel ) where
import Control.Monad
import Data.Word
import Foreign.Marshal.Alloc
import Foreign.Storable ( sizeOf, peek )
import Foreign.C.Types
import Data
import Kernel.GPU.Common as CUDA
import Vector
type Peak = (Int, Int, Double)
cleanPrepare :: CleanPar -> Image -> IO Image
cleanPrepare _ psf = do
-- Transfer PSF to GPU
psfv <- toDeviceVector (imgData psf)
let psf' = psf{ imgData = psfv }
-- TODO: Cache peak?
return psf'
cleanKernel :: CleanPar -> Image -> Image -> IO (Image, Image)
cleanKernel cleanp dirty psf = do
-- Check configuration - the peak find kernel requires a quadratic
-- grid without gaps.
let width = gridWidth (imgPar dirty)
when (width /= gridPitch (imgPar dirty) || width /= gridHeight (imgPar dirty)) $
fail "Cleaning kernel assumes quadratic grid without internal padding!"
-- Furthermore, the reduction requires the field size to be a power of 2.
let powers = map (2^) [1..32 :: Int]
when (width `notElem` powers) $
fail "Cleaning kernel requires a grid size that is a power of two!"
-- Transfer images, if required
dirtyv <- toDeviceVector (imgData dirty)
let dirty' = dirty{ imgData = dirtyv }
-- Allocate model
modelv <- allocCVector (2 * imageSize (imgPar dirty))
let model = Image (imgPar dirty) 0 modelv
-- Find peak in PSF
(psfx, psfy, psfv) <- findPeak psf
-- Minor cleaning loop
let loop res 0 = return (res, model)
loop res fuel = do
-- Find peak in residual
(resx, resy, resv) <- findPeak res
-- Below threshold?
if abs resv < cleanThreshold cleanp then do
return (res, model)
else do
-- Subtract PSF
let mval = cleanGain cleanp * resv / psfv
res' <- subtractImg res psf (resx - psfx, resy - psfy) mval
-- Update model, loop
let ix = resx + resy * width
mpx <- peekVector modelv ix
pokeVector modelv ix (mpx + mval)
loop res' (fuel-1)
loop dirty' (cleanIter cleanp)
foreign import ccall unsafe findPeak_init :: IO CInt
foreign import ccall unsafe "&" findPeak_512_e2 :: Fun
-- | Number of blocks of a certain size required to cover data of a given size
blockCount :: Int -> Int -> Int
blockCount datSize blkSize = (datSize + blkSize - 1) `div` blkSize
-- | Finds the position with the highest intensity in the image
findPeak :: Image -> IO Peak
findPeak img = do
-- Get data
let DeviceVector _ imgp = imgData img
-- Set up reduction kernel
nothingIfOk . toEnum . fromIntegral =<< findPeak_init
sync
-- Run
let width = gridWidth $ imgPar img
placeSize = sizeOf (undefined :: CULong) + sizeOf (undefined :: Double)
blocks = blockCount width 1024
CUDA.allocaArray (placeSize * blocks) $ \(workArea :: DevicePtr Word8) -> do
launchKernel findPeak_512_e2
(blocks, 1, 1) (512, 1, 1) (512 * fromIntegral placeSize) Nothing $
mapArgs imgp workArea (width * width)
sync
-- Load final result value(s) from start of work area
let peekd p = alloca (\p' -> peekArray 1 p p' >> peek p')
pos <- peekd $ castDevPtr workArea :: IO Word64
val <- peekd $ castDevPtr (workArea `plusDevPtr` sizeOf (undefined :: CULong))
return (fromIntegral (pos `mod` fromIntegral width),
fromIntegral (pos `div` fromIntegral width),
val)
foreign import ccall unsafe "&" subtract_psf_kernel :: Fun
-- | Subtract two images from each other at an offset and
-- muliplier. The first image parameter is the one getting updated.
subtractImg :: Image -> Image -> (Int, Int) -> Double -> IO Image
subtractImg res psf (x,y) gain = do
-- Calculate data movement distance and amount
resv@(DeviceVector _ resp) <- toDeviceVector (imgData res)
let DeviceVector _ psfp = imgData psf
width = gridWidth (imgPar res)
diff = x + y * width
resp' = resp -- `plusDevPtr` max 0 diff
psfp' = psfp -- `plusDevPtr` max 0 (-diff)
stopx = width - abs x
stopy = width - abs y
blockDim = 16
-- Run the kernel
launchKernel subtract_psf_kernel
(blockCount stopx blockDim, blockCount stopy blockDim, 1) (blockDim, blockDim, 1) 0 Nothing $
mapArgs resp' psfp' gain diff stopx stopy width
sync
-- Done
return res{imgData=resv}
| SKA-ScienceDataProcessor/RC | MS4/dna-programs/Kernel/GPU/Hogbom.hs | apache-2.0 | 4,411 | 0 | 20 | 1,102 | 1,293 | 668 | 625 | 82 | 3 |
module Main where
import Network.Libre.TLS.FFI.Internal
main = putStrLn "hello"
| cartazio/libressl-hs | tests/hunit.hs | bsd-2-clause | 81 | 0 | 5 | 10 | 20 | 13 | 7 | 3 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QMatrix.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QMatrix (
QqMatrix(..)
,QqMatrix_nf(..)
,det
,inverted
,isIdentity
,isInvertible
,m11
,m12
,m21
,m22
,Qqmap(..), Qqqmap(..)
,QmapRect(..), QqmapRect(..)
,qmapToPolygon, mapToPolygon
,qMatrix_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqMatrix x1 where
qMatrix :: x1 -> IO (QMatrix ())
instance QqMatrix (()) where
qMatrix ()
= withQMatrixResult $
qtc_QMatrix
foreign import ccall "qtc_QMatrix" qtc_QMatrix :: IO (Ptr (TQMatrix ()))
instance QqMatrix ((QMatrix t1)) where
qMatrix (x1)
= withQMatrixResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix1 cobj_x1
foreign import ccall "qtc_QMatrix1" qtc_QMatrix1 :: Ptr (TQMatrix t1) -> IO (Ptr (TQMatrix ()))
instance QqMatrix ((Double, Double, Double, Double, Double, Double)) where
qMatrix (x1, x2, x3, x4, x5, x6)
= withQMatrixResult $
qtc_QMatrix2 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
foreign import ccall "qtc_QMatrix2" qtc_QMatrix2 :: CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> IO (Ptr (TQMatrix ()))
class QqMatrix_nf x1 where
qMatrix_nf :: x1 -> IO (QMatrix ())
instance QqMatrix_nf (()) where
qMatrix_nf ()
= withObjectRefResult $
qtc_QMatrix
instance QqMatrix_nf ((QMatrix t1)) where
qMatrix_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix1 cobj_x1
instance QqMatrix_nf ((Double, Double, Double, Double, Double, Double)) where
qMatrix_nf (x1, x2, x3, x4, x5, x6)
= withObjectRefResult $
qtc_QMatrix2 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
det :: QMatrix a -> (()) -> IO (Double)
det x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_det cobj_x0
foreign import ccall "qtc_QMatrix_det" qtc_QMatrix_det :: Ptr (TQMatrix a) -> IO CDouble
instance Qqdx (QMatrix a) (()) (IO (Double)) where
qdx x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_dx cobj_x0
foreign import ccall "qtc_QMatrix_dx" qtc_QMatrix_dx :: Ptr (TQMatrix a) -> IO CDouble
instance Qqdy (QMatrix a) (()) (IO (Double)) where
qdy x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_dy cobj_x0
foreign import ccall "qtc_QMatrix_dy" qtc_QMatrix_dy :: Ptr (TQMatrix a) -> IO CDouble
inverted :: QMatrix a -> (()) -> IO (QMatrix ())
inverted x0 ()
= withQMatrixResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_inverted cobj_x0
foreign import ccall "qtc_QMatrix_inverted" qtc_QMatrix_inverted :: Ptr (TQMatrix a) -> IO (Ptr (TQMatrix ()))
isIdentity :: QMatrix a -> (()) -> IO (Bool)
isIdentity x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_isIdentity cobj_x0
foreign import ccall "qtc_QMatrix_isIdentity" qtc_QMatrix_isIdentity :: Ptr (TQMatrix a) -> IO CBool
isInvertible :: QMatrix a -> (()) -> IO (Bool)
isInvertible x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_isInvertible cobj_x0
foreign import ccall "qtc_QMatrix_isInvertible" qtc_QMatrix_isInvertible :: Ptr (TQMatrix a) -> IO CBool
m11 :: QMatrix a -> (()) -> IO (Double)
m11 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m11 cobj_x0
foreign import ccall "qtc_QMatrix_m11" qtc_QMatrix_m11 :: Ptr (TQMatrix a) -> IO CDouble
m12 :: QMatrix a -> (()) -> IO (Double)
m12 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m12 cobj_x0
foreign import ccall "qtc_QMatrix_m12" qtc_QMatrix_m12 :: Ptr (TQMatrix a) -> IO CDouble
m21 :: QMatrix a -> (()) -> IO (Double)
m21 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m21 cobj_x0
foreign import ccall "qtc_QMatrix_m21" qtc_QMatrix_m21 :: Ptr (TQMatrix a) -> IO CDouble
m22 :: QMatrix a -> (()) -> IO (Double)
m22 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m22 cobj_x0
foreign import ccall "qtc_QMatrix_m22" qtc_QMatrix_m22 :: Ptr (TQMatrix a) -> IO CDouble
class Qqmap x1 xr where
qmap :: QMatrix a -> x1 -> xr
class Qqqmap x1 xr where
qqmap :: QMatrix a -> x1 -> xr
instance Qqmap ((Line)) (IO (Line)) where
qmap x0 (x1)
= withLineResult $ \cline_ret_x1 cline_ret_y1 cline_ret_x2 cline_ret_y2 ->
withObjectPtr x0 $ \cobj_x0 ->
withCLine x1 $ \cline_x1_x1 cline_x1_y1 cline_x1_x2 cline_x1_y2 ->
qtc_QMatrix_map7_qth cobj_x0 cline_x1_x1 cline_x1_y1 cline_x1_x2 cline_x1_y2 cline_ret_x1 cline_ret_y1 cline_ret_x2 cline_ret_y2
foreign import ccall "qtc_QMatrix_map7_qth" qtc_QMatrix_map7_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqmap ((LineF)) (IO (LineF)) where
qmap x0 (x1)
= withLineFResult $ \clinef_ret_x1 clinef_ret_y1 clinef_ret_x2 clinef_ret_y2 ->
withObjectPtr x0 $ \cobj_x0 ->
withCLineF x1 $ \clinef_x1_x1 clinef_x1_y1 clinef_x1_x2 clinef_x1_y2 ->
qtc_QMatrix_map6_qth cobj_x0 clinef_x1_x1 clinef_x1_y1 clinef_x1_x2 clinef_x1_y2 clinef_ret_x1 clinef_ret_y1 clinef_ret_x2 clinef_ret_y2
foreign import ccall "qtc_QMatrix_map6_qth" qtc_QMatrix_map6_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqmap ((Point)) (IO (Point)) where
qmap x0 (x1)
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QMatrix_map4_qth cobj_x0 cpoint_x1_x cpoint_x1_y cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QMatrix_map4_qth" qtc_QMatrix_map4_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqmap ((PointF)) (IO (PointF)) where
qmap x0 (x1)
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QMatrix_map3_qth cobj_x0 cpointf_x1_x cpointf_x1_y cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QMatrix_map3_qth" qtc_QMatrix_map3_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqqmap ((QLine t1)) (IO (QLine ())) where
qqmap x0 (x1)
= withQLineResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map7 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map7" qtc_QMatrix_map7 :: Ptr (TQMatrix a) -> Ptr (TQLine t1) -> IO (Ptr (TQLine ()))
instance Qqqmap ((QLineF t1)) (IO (QLineF ())) where
qqmap x0 (x1)
= withQLineFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map6 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map6" qtc_QMatrix_map6 :: Ptr (TQMatrix a) -> Ptr (TQLineF t1) -> IO (Ptr (TQLineF ()))
instance Qqmap ((QPainterPath t1)) (IO (QPainterPath ())) where
qmap x0 (x1)
= withQPainterPathResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map5 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map5" qtc_QMatrix_map5 :: Ptr (TQMatrix a) -> Ptr (TQPainterPath t1) -> IO (Ptr (TQPainterPath ()))
instance Qqqmap ((QPoint t1)) (IO (QPoint ())) where
qqmap x0 (x1)
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map4 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map4" qtc_QMatrix_map4 :: Ptr (TQMatrix a) -> Ptr (TQPoint t1) -> IO (Ptr (TQPoint ()))
instance Qqqmap ((QPointF t1)) (IO (QPointF ())) where
qqmap x0 (x1)
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map3 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map3" qtc_QMatrix_map3 :: Ptr (TQMatrix a) -> Ptr (TQPointF t1) -> IO (Ptr (TQPointF ()))
instance Qqmap ((QPolygon t1)) (IO (QPolygon ())) where
qmap x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map2 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map2" qtc_QMatrix_map2 :: Ptr (TQMatrix a) -> Ptr (TQPolygon t1) -> IO (Ptr (TQPolygon ()))
instance Qqmap ((QPolygonF t1)) (IO (QPolygonF ())) where
qmap x0 (x1)
= withQPolygonFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map1" qtc_QMatrix_map1 :: Ptr (TQMatrix a) -> Ptr (TQPolygonF t1) -> IO (Ptr (TQPolygonF ()))
instance Qqmap ((QRegion t1)) (IO (QRegion ())) where
qmap x0 (x1)
= withQRegionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map" qtc_QMatrix_map :: Ptr (TQMatrix a) -> Ptr (TQRegion t1) -> IO (Ptr (TQRegion ()))
class QmapRect x1 xr where
mapRect :: QMatrix a -> x1 -> xr
class QqmapRect x1 xr where
qmapRect :: QMatrix a -> x1 -> xr
instance QqmapRect ((QRect t1)) (IO (QRect ())) where
qmapRect x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapRect cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapRect" qtc_QMatrix_mapRect :: Ptr (TQMatrix a) -> Ptr (TQRect t1) -> IO (Ptr (TQRect ()))
instance QqmapRect ((QRectF t1)) (IO (QRectF ())) where
qmapRect x0 (x1)
= withQRectFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapRect1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapRect1" qtc_QMatrix_mapRect1 :: Ptr (TQMatrix a) -> Ptr (TQRectF t1) -> IO (Ptr (TQRectF ()))
instance QmapRect ((Rect)) (IO (Rect)) where
mapRect x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMatrix_mapRect_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QMatrix_mapRect_qth" qtc_QMatrix_mapRect_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QmapRect ((RectF)) (IO (RectF)) where
mapRect x0 (x1)
= withRectFResult $ \crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCRectF x1 $ \crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h ->
qtc_QMatrix_mapRect1_qth cobj_x0 crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h
foreign import ccall "qtc_QMatrix_mapRect1_qth" qtc_QMatrix_mapRect1_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
qmapToPolygon :: QMatrix a -> ((QRect t1)) -> IO (QPolygon ())
qmapToPolygon x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapToPolygon cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapToPolygon" qtc_QMatrix_mapToPolygon :: Ptr (TQMatrix a) -> Ptr (TQRect t1) -> IO (Ptr (TQPolygon ()))
mapToPolygon :: QMatrix a -> ((Rect)) -> IO (QPolygon ())
mapToPolygon x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMatrix_mapToPolygon_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QMatrix_mapToPolygon_qth" qtc_QMatrix_mapToPolygon_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> IO (Ptr (TQPolygon ()))
instance Qreset (QMatrix a) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_reset cobj_x0
foreign import ccall "qtc_QMatrix_reset" qtc_QMatrix_reset :: Ptr (TQMatrix a) -> IO ()
instance QsetMatrix (QMatrix a) ((Double, Double, Double, Double, Double, Double)) where
setMatrix x0 (x1, x2, x3, x4, x5, x6)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_setMatrix cobj_x0 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
foreign import ccall "qtc_QMatrix_setMatrix" qtc_QMatrix_setMatrix :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> IO ()
qMatrix_delete :: QMatrix a -> IO ()
qMatrix_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_delete cobj_x0
foreign import ccall "qtc_QMatrix_delete" qtc_QMatrix_delete :: Ptr (TQMatrix a) -> IO ()
| uduki/hsQt | Qtc/Core/QMatrix.hs | bsd-2-clause | 13,057 | 0 | 17 | 2,346 | 4,631 | 2,362 | 2,269 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module NLP.Nerf2.Delta.Ref
( delta
, delta'
) where
import Data.List (foldl')
import NLP.Nerf2.Types
import NLP.Nerf2.Forest.Set
import NLP.Nerf2.Forest.Phi
import qualified NLP.Nerf2.Env as Env
delta :: Env.InSent e => e -> N -> Pos -> LogReal
delta env x i = sumForests env $ forestSetB env x i
delta' :: Env.InSent e => e -> N -> Pos -> LogReal
delta' env x i = sumForests env $ forestSetB' env x i
sumForests :: Env.InSent e => e -> [Forest] -> LogReal
sumForests env = foldl' (+) 0 . map (phiForest env)
| kawu/nerf-proto | src/NLP/Nerf2/Delta/Ref.hs | bsd-2-clause | 548 | 0 | 8 | 100 | 213 | 116 | 97 | 15 | 1 |
{-# LANGUAGE DataKinds,UnboxedTuples,MagicHash,TemplateHaskell,RankNTypes,TupleSections #-}
module HLearn.Data.SpaceTree.Algorithms.NearestNeighbor
(
-- * data types
Neighbor (..)
, ValidNeighbor (..)
, NeighborList (..)
, nlSingleton
, getknnL
, nlMaxDist
, Param_k
, _k
-- * functions
, findAllNeighbors
, findAllNeighbors'
, findNeighborList
, findEpsilonNeighborListWith
)
where
import qualified Prelude as P
import Data.Strict.Tuple (Pair(..))
import SubHask
import SubHask.Algebra.Container
import SubHask.Compatibility.Containers
import SubHask.Compatibility.Vector
import SubHask.Compatibility.Vector.Lebesgue
import SubHask.Monad
import SubHask.TemplateHaskell.Deriving
import HLearn.Data.SpaceTree
import Data.Params
-------------------------------------------------------------------------------
data Neighbor dp = Neighbor
{ neighbor :: !dp
, neighborDistance :: !(Scalar dp)
-- { neighbor :: {-#UNPACK#-}!(L2 UnboxedVector Float)
-- , neighborDistance :: {-#UNPACK#-}!Float
}
type instance Scalar (Neighbor dp) = Bool
type instance Logic (Neighbor dp) = Bool
type ValidNeighbor dp =
( Metric dp
, Bounded (Scalar dp)
, CanError (Scalar dp)
, Logic dp~Bool
-- , dp ~ (L2 UnboxedVector Float)
)
deriving instance (Read dp, Read (Scalar dp)) => Read (Neighbor dp)
deriving instance (Show dp, Show (Scalar dp)) => Show (Neighbor dp)
instance Eq (Scalar dp) => Eq_ (Neighbor dp) where
{-# INLINE (==) #-}
a == b = neighborDistance a == neighborDistance b
-- instance Ord (Scalar dp) => Ord (Neighbor dp) where
-- compare a b = compare (neighborDistance a) (neighborDistance b)
instance (NFData dp, NFData (Scalar dp)) => NFData (Neighbor dp) where
rnf (Neighbor _ _) = ()
-- rnf n = deepseq (neighbor n) $ rnf (neighborDistance n)
------------------------------------------------------------------------------
data NeighborList (k :: Config Nat) dp
= NL_Nil
| NL_Cons {-#UNPACK#-}!(Neighbor dp) !(NeighborList k dp)
-- | NL_Err
mkParams ''NeighborList
-- | update the distances in the NeighborList based on a new data point
resetNL :: ValidNeighbor dp => dp -> NeighborList k dp -> NeighborList k dp
resetNL p NL_Nil = NL_Nil
resetNL p (NL_Cons (Neighbor q _) nl)
= NL_Cons (Neighbor q $ distance p q) $ resetNL p nl
type instance Logic (NeighborList k dp) = Bool
deriving instance (Read dp, Read (Scalar dp)) => Read (NeighborList k dp)
deriving instance (Show dp, Show (Scalar dp)) => Show (NeighborList k dp)
instance (NFData dp, NFData (Scalar dp)) => NFData (NeighborList k dp) where
rnf NL_Nil = ()
-- rnf NL_Err = ()
rnf (NL_Cons n ns) = ()
-- rnf (NL_Cons n ns) = deepseq n $ rnf ns
instance (ValidNeighbor dp, Eq_ dp) => Eq_ (NeighborList k dp) where
(NL_Cons x xs) == (NL_Cons y ys) = x==y && xs==ys
NL_Nil == NL_Nil = True
-- NL_Err == NL_Err = True
_ == _ = False
property_orderedNeighborList :: (Logic dp~Bool, Metric dp) => NeighborList k dp -> Bool
property_orderedNeighborList NL_Nil = True
property_orderedNeighborList (NL_Cons n NL_Nil) = True
property_orderedNeighborList (NL_Cons n (NL_Cons n2 ns)) = if neighborDistance n < neighborDistance n2
then property_orderedNeighborList (NL_Cons n2 ns)
else False
{-# INLINE nlSingleton #-}
nlSingleton ::
( ValidNeighbor dp
) => Neighbor dp -> NeighborList k dp
nlSingleton !n = NL_Cons n NL_Nil
-- {-# INLINE mkNeighborList #-}
-- mkNeighborList ::
-- ( ValidNeighbor dp
-- ) => dp -> Scalar dp -> NeighborList k dp
-- mkNeighborList !dp !dist = NL_Cons (Neighbor dp dist) NL_Nil
{-# INLINE getknnL #-}
getknnL ::
( ValidNeighbor dp
) => NeighborList k dp -> [Neighbor dp]
getknnL NL_Nil = []
getknnL (NL_Cons n ns) = n:getknnL ns
-- getknnL NL_Err = error "getknnL: NL_Err"
{-# INLINE nlMaxDist #-}
nlMaxDist ::
( ValidNeighbor dp
) => NeighborList k dp -> Scalar dp
nlMaxDist !nl = go nl
where
go (NL_Cons n NL_Nil) = neighborDistance n
go (NL_Cons n ns) = go ns
go NL_Nil = maxBound
-- go NL_Err = maxBound
instance CanError (NeighborList k dp) where
{-# INLINE errorVal #-}
errorVal = NL_Nil
-- errorVal = NL_Err
{-# INLINE isError #-}
isError NL_Nil = True
-- isError NL_Err = True
isError _ = False
instance
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, ValidNeighbor dp
) => Monoid (NeighborList k dp)
where
{-# INLINE zero #-}
zero = NL_Nil
instance
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, ValidNeighbor dp
) => Semigroup (NeighborList k dp)
where
{-# INLINE (+) #-}
-- nl1 + NL_Err = nl1
-- NL_Err + nl2 = nl2
nl1 + NL_Nil = nl1
NL_Nil + nl2 = nl2
nl1 + nl2 = {-# SCC notNiL #-} ret
where
ret = go nl1 nl2 (viewParam _k nl1)
go _ _ 0 = NL_Nil
go (NL_Cons n1 ns1) (NL_Cons n2 ns2) k = if neighborDistance n1 > neighborDistance n2
then NL_Cons n2 $ go (NL_Cons n1 ns1) ns2 (k-1)
else NL_Cons n1 $ go ns1 (NL_Cons n2 ns2) (k-1)
go NL_Nil (NL_Cons n2 ns2) k = NL_Cons n2 $ go NL_Nil ns2 (k-1)
go (NL_Cons n1 ns1) NL_Nil k = NL_Cons n1 $ go ns1 NL_Nil (k-1)
go NL_Nil NL_Nil k = NL_Nil
{-# INLINE nlAddNeighbor #-}
nlAddNeighbor :: forall k dp.
( ViewParam Param_k (NeighborList k dp)
, ValidNeighbor dp
) => NeighborList k dp -> Neighbor dp -> NeighborList k dp
-- nlAddNeighbor NL_Nil n' = NL_Cons n' NL_Nil
-- nlAddNeighbor (NL_Cons n NL_Nil) n' = if neighborDistance n' > neighborDistance n
-- then NL_Cons n' NL_Nil
-- else NL_Cons n NL_Nil
nlAddNeighbor !nl !n = {-# SCC nlAddNeighbor #-} nl + NL_Cons n NL_Nil
-- mappend (NeighborList (x:.xs) ) (NeighborList (y:.ys) ) = {-# SCC mappend_NeighborList #-} case k of
-- 1 -> if x < y then NeighborList (x:.Strict.Nil) else NeighborList (y:.Strict.Nil)
-- otherwise -> NeighborList $ Strict.take k $ interleave (x:.xs) (y:.ys)
-- where
-- k=fromIntegral $ natVal (Proxy :: Proxy k)
--
-- interleave !xs Strict.Nil = xs
-- interleave Strict.Nil !ys = ys
-- interleave (x:.xs) (y:.ys) = case compare x y of
-- LT -> x:.(interleave xs (y:.ys))
-- GT -> y:.(interleave (x:.xs) ys)
-- EQ -> if neighbor x == neighbor y
-- then x:.interleave xs ys
-- else x:.(y:.(interleave xs ys))
-------------------------------------------------------------------------------
-- single tree
{-# INLINABLE findNeighborList #-}
findNeighborList ::
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Eq dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => t dp -> dp -> NeighborList k dp
findNeighborList !t !query = findEpsilonNeighborListWith zero zero t query
{-# INLINABLE findEpsilonNeighborListWith #-}
findEpsilonNeighborListWith ::
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Eq dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => NeighborList k dp -> Scalar dp -> t dp -> dp -> NeighborList k dp
findEpsilonNeighborListWith !knn !epsilon !t !query =
{-# SCC findEpsilonNeighborListWith #-}
-- prunefoldC (knn_catadp smudge query) knn t
-- prunefoldB_CanError_sort query (knn_catadp smudge query) (knn_cata_dist smudge query) knn t
prunefoldB_CanError (knn_catadp smudge query) (knn_cata smudge query) knn t
-- prunefoldD (knn_catadp smudge query) (knn_cata2 smudge query) knn t
where
smudge = 1/(1+epsilon)
{-# INLINABLE knn_catadp #-}
-- {-# INLINE knn_catadp #-}
knn_catadp :: forall k dp.
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> dp -> NeighborList k dp -> NeighborList k dp
knn_catadp !smudge !query !dp !knn = {-# SCC knn_catadp #-}
-- dist==0 is equivalent to query==dp,
-- but we have to calculate dist anyways so it's faster
if dist==0 || dist>bound
-- if dist==0 || isError dist
then knn
else nlAddNeighbor knn $ Neighbor dp dist
where
dist = distanceUB dp query bound
bound = smudge*nlMaxDist knn
-- dist = isFartherThanWithDistanceCanError dp query
-- $ nlMaxDist knn * smudge
-- {-# INLINABLE knn_cata #-}
{-# INLINE knn_cata #-}
knn_cata :: forall k t dp.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Floating (Scalar dp)
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> t dp -> NeighborList k dp -> NeighborList k dp
knn_cata !smudge !query !t !knn = {-# SCC knn_cata #-}
if dist==0
then if isError knn
then nlSingleton $ Neighbor (stNode t) maxBound
else knn
else if isError dist
then errorVal
else nlAddNeighbor knn $ Neighbor (stNode t) dist
where
dist = stIsMinDistanceDpFartherThanWithDistanceCanError t query
$ nlMaxDist knn * smudge
{-# INLINABLE prunefoldB_CanError_sort #-}
prunefoldB_CanError_sort ::
( SpaceTree t a
, ValidNeighbor a
, b ~ NeighborList k a
, ClassicalLogic a
, CanError (Scalar a)
, Bounded (Scalar a)
) =>
a -> (a -> b -> b) -> (Scalar a -> t a -> b -> b) -> b -> t a -> b
prunefoldB_CanError_sort !query !f1 !f2 !b !t = {-# SCC prunefoldB_CanError_sort #-}
go ( distance (stNode t) query :!: t ) b
where
go !( dist :!: t ) !b = if isError res
then b
else foldr' go b'' children'
where
res = f2 dist t b
b'' = foldr' f1 res (stLeaves t)
children'
= {-# SCC children' #-} qsortHalf (\( d1 :!: _ ) ( d2 :!: _ ) -> compare d2 d1)
$ map (\x -> ( stIsMinDistanceDpFartherThanWithDistanceCanError x query maxdist
:!: x ))
-- $ map (\x -> ( distanceUB (stNode x) query (lambda t+maxdist), x ))
-- $ map (\x -> ( distance (stNode x) query , x ))
$ toList
$ stChildren t
maxdist = nlMaxDist b''
-- | This is a version of quicksort that only descends on its lower half.
-- That is, it only "approximately" sorts a list.
-- It is modified from http://en.literateprograms.org/Quicksort_%28Haskell%29
{-# INLINABLE qsortHalf #-}
qsortHalf :: (a -> a -> Ordering) -> [a] -> [a]
qsortHalf !cmp !x = {-# SCC qsortHalf #-} go x []
where
go [] !y = y
go [x] !y = x:y
go (x:xs) !y = part xs [] [x] []
where
part [] !l !e !g = go l (e ++ g ++ y)
part (z:zs) !l !e !g = case cmp z x of
GT -> part zs l e (z:g)
LT -> part zs (z:l) e g
EQ -> part zs l (z:e) g
{-# INLINABLE knn_cata_dist #-}
knn_cata_dist :: forall k t dp.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Floating (Scalar dp)
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> Scalar dp -> t dp -> NeighborList k dp -> NeighborList k dp
knn_cata_dist !smudge !query !dist !t !knn = {-# SCC knn_cata #-}
if dist==0
then if isError knn
then nlSingleton $ Neighbor (stNode t) maxBound
else knn
-- else if dist - lambda t > nlMaxDist knn * smudge -- isError dist
else if isError dist
then errorVal
else nlAddNeighbor knn $ Neighbor (stNode t) dist
-- where
-- dist = stIsMinDistanceDpFartherThanWithDistanceCanError t query
-- $ nlMaxDist knn * smudge
---------------------------------------
{-# INLINABLE findAllNeighbors #-}
findAllNeighbors :: forall k dp t.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, NFData (Scalar dp)
, NFData dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> t dp -> [dp] -> Seq (dp,NeighborList k dp)
findAllNeighbors epsilon rtree qs = reduce $ map
(\dp -> singleton (dp, findEpsilonNeighborListWith zero epsilon rtree dp))
qs
{-# INLINABLE findAllNeighbors' #-}
findAllNeighbors' :: forall k dp t.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, NFData (Scalar dp)
, NFData dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> t dp -> [dp] -> Seq (Labeled' dp (NeighborList k dp))
findAllNeighbors' epsilon rtree qs = fromList $ map
(\dp -> mkLabeled' dp $ findEpsilonNeighborListWith zero epsilon rtree dp)
qs
-- findAllNeighbors' epsilon rtree qs = reduce $ map
-- (\dp -> singleton $ mkLabeled' dp $ findEpsilonNeighborListWith zero epsilon rtree dp)
-- qs
mkLabeled' :: x -> y -> Labeled' x y
mkLabeled' x y = Labeled' x y
| ehlemur/HLearn | src/HLearn/Data/SpaceTree/Algorithms/NearestNeighbor.hs | bsd-3-clause | 13,421 | 0 | 17 | 3,820 | 3,487 | 1,806 | 1,681 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
module TestModule where
-- Product
data MyProd = MyProd Bool Int
type MyProdAlias = MyProd
-- Strict product
data MyStrict = MyStrict !Bool !Int
-- Polymorphic
data MyPoly a = MyPoly a
type MyPolyAlias = MyPoly Int
-- Regular datatype
data List a = Nil | Cons a (List a)
-- Mutual recursive datatypes
data MutRecA a = MRANill a | MutRecA (MutRecB a)
data MutRecB b = MRBNill b | MutRecB (MutRecA b)
-- Nested datatype
data Perfect a = Perfect (Perfect (a,a))
-- Existential
data Exist = forall a. Exist a
-- GADTs
data Expr a where
I :: Int -> Expr Int
B :: Bool -> Expr Bool
Add :: Expr Int -> Expr Int -> Expr Int
Mul :: Expr Int -> Expr Int -> Expr Int
Eq :: Expr Int -> Expr Int -> Expr Bool
-- Newtype
newtype Foo = Foo Int
data Tree = Empty | Leaf Int | Node Tree Tree Tree
data TTree a = Tip Int | Branch (TTree a) a (TTree a)
data Toeplitz a = Toeplitz a [(a,a)]
data Comp f g a = C (f (g a))
data HFix f a = Hln (f (HFix f) a)
| norm2782/DGG | examples/testmodule.hs | bsd-3-clause | 1,083 | 0 | 10 | 265 | 389 | 220 | 169 | 30 | 0 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Rsc.Pretty.Types (
) where
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import Language.Fixpoint.Misc (intersperse)
import qualified Language.Fixpoint.Types as F
import Language.Rsc.Pretty.Common
import Language.Rsc.Program
import Language.Rsc.Typecheck.Subst
import Language.Rsc.Typecheck.Types
import Language.Rsc.Types
import Text.PrettyPrint.HughesPJ
angles p = langle <> p <> rangle
langle = char '<'
rangle = char '>'
instance PP Bool where
pp True = text "True"
pp False = text "False"
instance PP () where
pp _ = text ""
instance PP a => PP (Maybe a) where
pp = maybe (text "Nothing") pp
instance PP Char where
pp = char
instance (F.Reftable r, PP r) => PP (RTypeQ q r) where
pp (TPrim c r) = F.ppTy r $ pp c
pp (TVar α r) = F.ppTy r $ pp α
pp (TOr [] _) = pp (TPrim TBot ())
pp (TOr (t:ts) r) = F.ppTy r $ sep $ pp t : map ((text "+" <+>) . pp) ts
pp (TAnd ts) = vcat [text "/\\" <+> pp t | t <- ts]
pp t@TRef{} | mutRelated t = ppMut t
pp (TRef t r) = F.ppTy r (pp t)
pp (TObj m ms r) = parens (pp (toType m)) <+> ppBody ms r
where
ppBody ms r = F.ppTy r (hsep [lbrace, nest 2 (pp ms), rbrace])
pp (TClass t) = text "class" <+> pp t
pp (TMod t) = text "module" <+> pp t
pp t@(TAll _ _) = ppArgs angles comma αs <> pp t' where (αs, t') = bkAll t
pp (TFun xts t _) = ppArgs parens comma xts <+> text "=>" <+> pp t
pp (TExp e) = pprint e
ppMut t@TRef{} | isUQ t = pp "UQ"
| isIM t = pp "IM"
| isAF t = pp "AF"
| isRO t = pp "RO"
| isMU t = pp "MU"
ppMut (TVar v _ ) = pp v
ppMut _ = pp "MUT???"
instance (F.Reftable r, PP r) => PP (TypeMembersQ q r) where
pp (TM ms sms cs cts sidx nidx)
= ppMem ms $+$
ppSMem sms $+$
ppCall cs $+$
ppCtor cts $+$
ppSIdx sidx $+$
ppNIdx nidx
ppMem = sep . map (\(_, f) -> pp f <> semi) . F.toListSEnv
ppSMem = sep . map (\(_, f) -> pp "static" <+> pp f <> semi) . F.toListSEnv
ppCall optT | Just t <- optT = pp t <> semi | otherwise = empty
ppCtor optT | Just t <- optT = pp "new" <+> pp t <> semi | otherwise = empty
ppSIdx (Just t) = pp "[x: string]:" <+> pp t <> semi
ppSIdx _ = empty
ppNIdx (Just t) = pp "[x: number]:" <+> pp t <> semi
ppNIdx _ = empty
instance PPR r => PP (TypeMemberQ q r) where
pp (FI s o m t) = parens (pp (toType m)) <+> pp s <> pp o <> colon <+> pp t
pp (MI s o mts) = vcat (map (\(m,t) -> char '@' <> pp (toType m) <+> pp s <> pp o <> pp t) mts)
instance PP Optionality where
pp Opt = text "?"
pp Req = empty
instance (F.Reftable r, PP r) => PP (TGenQ q r) where
pp (Gen x []) = pp x
pp (Gen x (m:ts)) = pp x <> angles (intersperse comma (ppMut m : map pp ts))
instance (F.Reftable r, PP r) => PP (BTGenQ q r) where
pp (BGen x []) = pp x
pp (BGen x ts) = pp x <> ppArgs angles comma ts
instance PP TVar where
pp = pprint . F.symbol
instance (F.Reftable r, PP r) => PP (BTVarQ q r) where
pp (BTV v _ (Just t)) = pprint v <+> text "<:" <+> pp t
pp (BTV v _ _ ) = pprint v
instance PP TPrim where
pp TString = text "string"
pp (TStrLit s) = text "\"" <> text s <> text "\""
pp TNumber = text "number"
pp TReal = text "real"
pp TBoolean = text "boolean"
pp TBV32 = text "bitvector32"
pp TVoid = text "void"
pp TUndefined = text "undefined"
pp TNull = text "null"
pp TBot = text "_|_"
pp TTop = text "Top"
pp TAny = text "any"
pp TFPBool = text "_bool_"
instance (PP r, F.Reftable r) => PP (BindQ q r) where
pp (B x o t) = pp x <> pp o <> colon <+> pp t
instance (PP s, PP t) => PP (M.Map s t) where
pp m = vcat $ pp <$> M.toList m
instance PP Locality where
pp Exported = text "Exported"
pp Local = text "Local"
instance PP Assignability where
pp Ambient = text "Ambient"
pp RdOnly = text "ReadOnly"
pp WriteLocal = text "WriteLocal"
pp ForeignLocal = text "ForeignLocal"
pp WriteGlobal = text "WriteGlobal"
pp ReturnVar = text "ReturnVar"
instance (PP r, F.Reftable r) => PP (TypeDeclQ q r) where
pp (TD s p m) = pp s $+$ text "pred" <+> pp p $+$
lbrace $+$ nest 4 (pp m) $+$ rbrace
instance (PP r, F.Reftable r) => PP (TypeSigQ q r) where
pp (TS k n h) = pp k <+> pp n <+> ppHeritage h
instance PP TypeDeclKind where
pp InterfaceTDK = text "interface"
pp ClassTDK = text "class"
ppHeritage (es,is) = ppExtends es <+> ppImplements is
ppExtends [] = text ""
ppExtends (n:_) = text "extends" <+> pp n
ppImplements [] = text ""
ppImplements ts = text "implements" <+> intersperse comma (pp <$> ts)
instance PP EnumDef where
pp (EnumDef n m) = pp n <+> braces (pp m)
instance PP IContext where
pp (IC x) = text "Context: " <+> pp x
instance (PP a, PP s, PP t) => PP (Alias a s t) where
pp (Alias n αs xs body) = text "alias" <+> pp n <> withnull angles comma αs <>
withnull brackets comma xs <+> text "=" <+> pp body
where
withnull _ _ [] = empty
withnull s p xs = s $ intersperse p (map pp xs)
instance (PP r, F.Reftable r) => PP (Rsc a r) where
pp pgm@(Rsc {code = (Src s) }) = extras
$+$ text "\n// CODE"
$+$ pp s
where
extras = -- text "\nCONSTANTS" $+$ nest 4 (pp (consts pgm)) $+$
text "\nPREDICATE ALIASES" $+$ nest 4 (pp (pAlias pgm))
$+$ text "\nTYPE ALIASES" $+$ nest 4 (pp (tAlias pgm))
-- $+$ text "\nQUALIFIERS" $+$ nest 4 (vcat (F.toFix <$> take 3 (pQuals pgm)))
-- $+$ text "\nQUALIFIERS" $+$ nest 4 (vcat (F.toFix <$> pQuals pgm))
-- $+$ text "..."
$+$ text "\nINVARIANTS" $+$ nest 4 (vcat (pp <$> invts pgm))
instance (F.Reftable r, PP r) => PP (RSubst r) where
pp (Su m) | HM.null m = text "empty"
| HM.size m < 10 = intersperse comma $ (ppBind <$>) $ HM.toList m
| otherwise = vcat $ (ppBind <$>) $ HM.toList m
ppBind (x, t) = pp x <+> text ":=" <+> pp t
-- | PP Fixpoint
instance PP F.Sort where
pp = pprint
instance PP (F.SortedReft) where
pp (F.RR s b) = braces (pp s <+> text "|" <+> pp b)
instance PP F.Reft where
pp = pprint
instance PP (F.SubC c) where
pp s = parens (pp (F.slhs s)) <+> text " => " <+> pp (F.srhs s)
| UCSD-PL/RefScript | src/Language/Rsc/Pretty/Types.hs | bsd-3-clause | 7,069 | 0 | 17 | 2,228 | 3,082 | 1,504 | 1,578 | 158 | 1 |
{-|
Module : Control.Lens.Extra
Description : Extra utility functions for working with lenses.
Copyright : (c) Henry J. Wylde, 2016
License : BSD3
Maintainer : [email protected]
Extra utility functions for working with lenses.
-}
{-# LANGUAGE Rank2Types #-}
module Control.Lens.Extra (
module Control.Lens,
-- * Folds
is, isn't, hasuse, hasn'tuse,
-- * Traversals
filteredBy,
) where
import Control.Lens hiding (isn't, filteredBy)
import Control.Monad.State
import Data.Monoid
-- | The counter-part to 'isn't', but more general as it takes a 'Getting' instead.
--
-- @'is' = 'has'@
is :: Getting Any s a -> s -> Bool
is = has
-- | A re-write of 'Control.Lens.Prism.isn't' to be more general by taking a 'Getting' instead.
--
-- @'isn't' = 'hasn't'@
isn't :: Getting All s a -> s -> Bool
isn't = hasn't
-- | Check to see if this 'Fold' or 'Traversal' matches 1 or more entries in the current state.
--
-- @'hasuse' = 'gets' . 'has'@
hasuse :: MonadState s m => Getting Any s a -> m Bool
hasuse = gets . has
-- | Check to see if this 'Fold' or 'Traversal' has no matches in the current state.
--
-- @'hasn'tuse' = 'gets' . 'hasn't'@
hasn'tuse :: MonadState s m => Getting All s a -> m Bool
hasn'tuse = gets . hasn't
-- | A companion to 'filtered' that, rather than using a predicate, filters on the given lens for
-- matches.
filteredBy :: Eq b => Lens' a b -> b -> Traversal' a a
filteredBy lens value = filtered ((value ==) . view lens)
| hjwylde/werewolf | src/Control/Lens/Extra.hs | bsd-3-clause | 1,491 | 0 | 8 | 314 | 255 | 145 | 110 | 18 | 1 |
module ETCS.SDM.Intern where
import Control.Lens hiding ((*~), _2)
import ETCS.SDM.Helper
import ETCS.SDM.Types
import Numeric.Units.Dimensional.TF.Prelude
import Prelude ()
validConvertion :: (HasConvertingBreakingModelInput i f, RealFloat f) => i -> Bool
validConvertion i =
let v = i ^. bmiMaxVelocity
bp = i ^. bmiBreakingPercentage
l = i ^. bmiTrainLength
lmax = case (i ^. bmiBreakPosition) of
PassangerTrainP -> 900 *~ meter
FreightTrainG -> 1500 *~ meter
FreightTrainP -> 1500 *~ meter
in (0 *~ kmh <= v) && (v <= 200 *~ kmh) &&
(30 *~ one < bp ) && (bp <= 250 *~ one) &&
(0 *~ meter < l) && (l <= lmax)
breakingModelConverter'
:: (HasConvertingBreakingModelInput i f, RealFloat f, Floating f) => i ->
ConvertedBreakingModel f
breakingModelConverter' i =
let (ea, sa) = basicDeceleration $ i ^. bmiBreakingPercentage
bpos = i ^. bmiBreakPosition
l = i ^. bmiTrainLength
in ConvertedBreakingModel {
_cbmBreakingModelInput = i ^. convertingBreakingModelInput,
_cbmBreakingModel =
BreakingModelBase (ea, sa, t_brake_emergency_cm bpos l
, t_brake_service_cm bpos l)
}
basicDeceleration :: (RealFloat f, Floating f) =>
BreakingPercentage f -> (A_Break f, A_Break f)
basicDeceleration lambda =
let l0_emergency = lambda
l0_service = min (135.0 *~ one) lambda
ad_n l0 v =
let vlim = v_lim l0
(a3_n, a2_n, a1_n, a0_n) = a_n_ms n
n = nfromV vlim v
in if (v <= vlim) then (ad_0 l0)
else a3_n * (l0 ** (3 *~ one)) + a2_n * (l0 ** (2 *~ one)) +
a1_n * l0 + a0_n
in ( ad_n l0_emergency, ad_n l0_service )
t_brake_service_cm :: (RealFloat f, Floating f) =>
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_service_cm = t_brake_cm t_brake_basic_sb
t_brake_emergency_cm :: (RealFloat f, Floating f) =>
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_emergency_cm = t_brake_cm t_brake_basic_eb
ad_0 :: (RealFloat f, Floating f) => BreakingPercentage f -> Acceleration f
ad_0 l0 = a * l0 + b
where a = 0.0075 *~ ms2
b = 0.076 *~ ms2
v_lim :: (RealFloat f, Floating f) => BreakingPercentage f -> Velocity f
v_lim l0 = ((l0 ** y)) * x
where x = 16.85 *~ kmh
y = 0.428 *~ one
a_n_ms :: (RealFloat f, Floating f) => Int -> ( Acceleration f, Acceleration f
, Acceleration f, Acceleration f)
a_n_ms 1 = ((-6.30e-7) *~ ms2, 6.10e-5 *~ ms2, 4.72e-3 *~ ms2, 0.0663 *~ ms2)
a_n_ms 2 = ( 2.73e-7 *~ ms2, (-4.54e-6) *~ ms2, 5.13e-3 *~ ms2, 0.1300 *~ ms2)
a_n_ms 3 = ( 5.58e-8 *~ ms2, (-6.76e-6) *~ ms2, 5.81e-3 *~ ms2, 0.0479 *~ ms2)
a_n_ms 4 = ( 3.00e-8 *~ ms2, (-3.85e-6) *~ ms2, 5.52e-3 *~ ms2, 0.0480 *~ ms2)
a_n_ms 5 = ( 3.23e-9 *~ ms2, 1.66e-6 *~ ms2, 5.06e-3 *~ ms2, 0.0559 *~ ms2)
a_n_ms _ = error "a_n_ms called for undefined n"
nfromV :: (RealFloat f, Floating f) => Velocity f -> Velocity f -> Int
nfromV vlim v
| ((vlim < v) && (v <= 100 *~ kmh) &&
(vlim <= 100 *~ kmh)) = 1
| ((vlim < v) && (v <= 120 *~ kmh) &&
(100 *~ kmh < vlim) && (vlim <= 120 *~ kmh)) = 2
| ((100 *~ kmh < v) && (v <= 120 *~ kmh) &&
(vlim <= 100 *~ kmh)) = 2
| ((vlim < v) && (v <= 150 *~ kmh) &&
(120 *~ kmh < vlim) && (vlim <= 150 *~ kmh)) = 3
| ((120 *~ kmh < v) && (v <= 150 *~ kmh) &&
(vlim <= 120 *~ kmh)) = 3
| ((vlim < v) && (v <= 180 *~ kmh) &&
(150 *~ kmh < vlim) && (vlim <= 180 *~ kmh)) = 4
| ((150 *~ kmh < v) && (v <= 180 *~ kmh) &&
(vlim <= 150 *~ kmh)) = 4
| ((vlim < v) && (vlim > 180 *~ kmh)) = 5
| ((180 *~ kmh < v) && (vlim <= 180 *~ kmh)) = 5
| otherwise = error "nfromV: undefined range"
t_brake_basic :: (RealFloat f, Floating f) =>
(Length f -> Length f) ->
Length f -> Time f -> Time f -> Time f -> Time f
t_brake_basic fl l' a b c = a + (b * l) + (c * (l ** _2))
where l = (fl l') / (100 *~ meter)
t_brake_basic_eb :: (RealFloat f, Floating f) => BreakPosition -> Length f -> Time f
t_brake_basic_eb PassangerTrainP l =
t_brake_basic (max (400 *~ meter))
l (2.30 *~ second) (0 *~ second) (0.17 *~ second)
t_brake_basic_eb FreightTrainP l
| l <= 900 *~ meter =
t_brake_basic (max (400 *~ meter))
l (2.30 *~ second) (0 *~ second) (0.17 *~ second)
| otherwise =
t_brake_basic (max (400 *~ meter))
l ((-0.4) *~ second) (1.6 *~ second) (0.03 *~ second)
t_brake_basic_eb FreightTrainG l
| l <= 900 *~ meter =
t_brake_basic id
l (12.0 *~ second) (0 *~ second) (0.05 *~ second)
| otherwise =
t_brake_basic id
l ((-0.4) *~ second) (1.6 *~ second) (0.03 *~ second)
t_brake_basic_sb :: (RealFloat f, Floating f) => BreakPosition -> Length f -> Time f
t_brake_basic_sb PassangerTrainP l =
t_brake_basic id
l (3.00 *~ second) (1.5 *~ second) (0.1 *~ second)
t_brake_basic_sb FreightTrainP l
| l <= 900 *~ meter =
t_brake_basic id
l (3 *~ second) (2.77 *~ second) (0 *~ second)
| otherwise =
t_brake_basic id
l (10.5 *~ second) (0.32 *~ second) (0.18 *~ second)
t_brake_basic_sb FreightTrainG l
| l <= 900 *~ meter =
t_brake_basic (max (400 *~ meter))
l (3 *~ second) (2.77 *~ second) (0 *~ second)
| otherwise =
t_brake_basic (max (400 *~ meter))
l (10.5 *~ second) (0.32 *~ second) (0.18 *~ second)
t_brake_cm :: (RealFloat f, Floating f) =>
(BreakPosition -> Length f -> Time f) ->
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_cm f bp l v = t_brake_cm' f v bp l
t_brake_cm' :: (RealFloat f, Floating f) =>
(BreakPosition -> Length f -> Time f) ->
Velocity f -> BreakPosition -> Length f -> Time f
t_brake_cm' f v_target
| (v_target == 0 *~ kmh) = f
| (v_target > 0 *~ kmh) = (\bp l -> f bp l * (kto bp))
| otherwise = error $ "t_brake_cm undefined for v_target < 0 m/s"
kto :: (RealFloat f, Floating f) => BreakPosition -> Dimensionless f
kto FreightTrainG = 0.16 *~ one
kto FreightTrainP = 0.20 *~ one
kto PassangerTrainP = 0.20 *~one
| open-etcs/openetcs-sdm | src/ETCS/SDM/Intern.hs | bsd-3-clause | 6,268 | 0 | 21 | 1,785 | 2,656 | 1,384 | 1,272 | 140 | 3 |
{- Data/Singletons/Util.hs
(c) Richard Eisenberg 2012
[email protected]
This file contains helper functions internal to the singletons package.
Users of the package should not need to consult this file.
-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Data.Singletons.Util where
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Data.Char
import Data.Maybe
import Data.Data
import Data.List
import Control.Monad
import Control.Monad.Writer
import qualified Data.Map as Map
import Data.Generics
-- reify a declaration, warning the user about splices if the reify fails
reifyWithWarning :: Name -> Q Info
reifyWithWarning name = recover
(fail $ "Looking up " ++ (show name) ++ " in the list of available " ++
"declarations failed.\nThis lookup fails if the declaration " ++
"referenced was made in same Template\nHaskell splice as the use " ++
"of the declaration. If this is the case, put\nthe reference to " ++
"the declaration in a new splice.")
(reify name)
-- check if a string is the name of a tuple
isTupleString :: String -> Bool
isTupleString s =
(length s > 1) &&
(head s == '(') &&
(last s == ')') &&
((length (takeWhile (== ',') (tail s))) == ((length s) - 2))
-- check if a name is a tuple name
isTupleName :: Name -> Bool
isTupleName = isTupleString . nameBase
-- extract the degree of a tuple
tupleDegree :: String -> Int
tupleDegree "()" = 0
tupleDegree s = length s - 1
-- reduce the four cases of a 'Con' to just two: monomorphic and polymorphic
-- and convert 'StrictType' to 'Type'
ctorCases :: (Name -> [Type] -> a) -> ([TyVarBndr] -> Cxt -> Con -> a) -> Con -> a
ctorCases genFun forallFun ctor = case ctor of
NormalC name stypes -> genFun name (map snd stypes)
RecC name vstypes -> genFun name (map (\(_,_,ty) -> ty) vstypes)
InfixC (_,ty1) name (_,ty2) -> genFun name [ty1, ty2]
ForallC [] [] ctor' -> ctorCases genFun forallFun ctor'
ForallC tvbs cx ctor' -> forallFun tvbs cx ctor'
-- reduce the four cases of a 'Con' to just 1: a polymorphic Con is treated
-- as a monomorphic one
ctor1Case :: (Name -> [Type] -> a) -> Con -> a
ctor1Case mono = ctorCases mono (\_ _ ctor -> ctor1Case mono ctor)
-- extract the name and number of arguments to a constructor
extractNameArgs :: Con -> (Name, Int)
extractNameArgs = ctor1Case (\name tys -> (name, length tys))
-- reinterpret a name. This is useful when a Name has an associated
-- namespace that we wish to forget
reinterpret :: Name -> Name
reinterpret = mkName . nameBase
-- is an identifier uppercase?
isUpcase :: Name -> Bool
isUpcase n = let first = head (nameBase n) in isUpper first || first == ':'
-- make an identifier uppercase
upcase :: Name -> Name
upcase n =
let str = nameBase n
first = head str in
if isLetter first
then mkName ((toUpper first) : tail str)
else mkName (':' : str)
-- make an identifier lowercase
locase :: Name -> Name
locase n =
let str = nameBase n
first = head str in
if isLetter first
then mkName ((toLower first) : tail str)
else mkName (tail str) -- remove the ":"
-- put an uppercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixUCName :: String -> String -> Name -> Name
prefixUCName pre tyPre n = case (nameBase n) of
(':' : rest) -> mkName (tyPre ++ rest)
alpha -> mkName (pre ++ alpha)
-- put a lowercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixLCName :: String -> String -> Name -> Name
prefixLCName pre tyPre n =
let str = nameBase n
first = head str in
if isLetter first
then mkName (pre ++ str)
else mkName (tyPre ++ str)
-- extract the name from a TyVarBndr
extractTvbName :: TyVarBndr -> Name
extractTvbName (PlainTV n) = n
extractTvbName (KindedTV n _) = n
-- extract the kind from a TyVarBndr. Returns '*' by default.
extractTvbKind :: TyVarBndr -> Kind
extractTvbKind (PlainTV _) = StarT -- FIXME: This seems wrong.
extractTvbKind (KindedTV _ k) = k
-- apply a type to a list of types
foldType :: Type -> [Type] -> Type
foldType = foldl AppT
-- apply an expression to a list of expressions
foldExp :: Exp -> [Exp] -> Exp
foldExp = foldl AppE
-- is a kind a variable?
isVarK :: Kind -> Bool
isVarK (VarT _) = True
isVarK _ = False
-- a monad transformer for writing a monoid alongside returning a Q
type QWithAux m = WriterT m Q
-- run a computation with an auxiliary monoid, discarding the monoid result
evalWithoutAux :: QWithAux m a -> Q a
evalWithoutAux = liftM fst . runWriterT
-- run a computation with an auxiliary monoid, returning only the monoid result
evalForAux :: QWithAux m a -> Q m
evalForAux = execWriterT
-- run a computation with an auxiliary monoid, return both the result
-- of the computation and the monoid result
evalForPair :: QWithAux m a -> Q (a, m)
evalForPair = runWriterT
-- in a computation with an auxiliary map, add a binding to the map
addBinding :: Ord k => k -> v -> QWithAux (Map.Map k v) ()
addBinding k v = tell (Map.singleton k v)
-- in a computation with an auxiliar list, add an element to the list
addElement :: elt -> QWithAux [elt] ()
addElement elt = tell [elt]
-- does a TH structure contain a name?
containsName :: Data a => Name -> a -> Bool
containsName n = everything (||) (mkQ False (== n))
| jonsterling/singletons | Data/Singletons/Util.hs | bsd-3-clause | 5,340 | 0 | 14 | 1,100 | 1,400 | 744 | 656 | 97 | 5 |
module Data.Minecraft.Snapshot15w40b
( module Data.Minecraft.Snapshot15w40b.Protocol
, module Data.Minecraft.Snapshot15w40b.Version
) where
import Data.Minecraft.Snapshot15w40b.Protocol
import Data.Minecraft.Snapshot15w40b.Version
| oldmanmike/hs-minecraft-protocol | src/Data/Minecraft/Snapshot15w40b.hs | bsd-3-clause | 238 | 0 | 5 | 21 | 39 | 28 | 11 | 5 | 0 |
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, FlexibleInstances, MultiParamTypeClasses, UndecidableInstances, TypeFamilies, DeriveDataTypeable #-}
module Web.Scotty.Internal.Types where
import Blaze.ByteString.Builder (Builder)
import Control.Applicative
import qualified Control.Exception as E
import Control.Monad.Base (MonadBase, liftBase, liftBaseDefault)
import Control.Monad.Error.Class
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Control (MonadBaseControl, StM, liftBaseWith, restoreM, ComposeSt, defaultLiftBaseWith, defaultRestoreM, MonadTransControl, StT, liftWith, restoreT)
import Control.Monad.Trans.Except
import qualified Data.ByteString as BS
import Data.ByteString.Lazy.Char8 (ByteString)
import Data.Default.Class (Default, def)
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid (mempty)
#endif
import Data.String (IsString(..))
import Data.Text.Lazy (Text, pack)
import Data.Typeable (Typeable)
import Network.HTTP.Types
import Network.Wai hiding (Middleware, Application)
import qualified Network.Wai as Wai
import Network.Wai.Handler.Warp (Settings, defaultSettings, setFdCacheDuration)
import Network.Wai.Parse (FileInfo)
--------------------- Options -----------------------
data Options = Options { verbose :: Int -- ^ 0 = silent, 1(def) = startup banner
, settings :: Settings -- ^ Warp 'Settings'
-- Note: to work around an issue in warp,
-- the default FD cache duration is set to 0
-- so changes to static files are always picked
-- up. This likely has performance implications,
-- so you may want to modify this for production
-- servers using `setFdCacheDuration`.
}
instance Default Options where
def = Options 1 (setFdCacheDuration 0 defaultSettings)
----- Transformer Aware Applications/Middleware -----
type Middleware m = Application m -> Application m
type Application m = Request -> m Response
--------------- Scotty Applications -----------------
data ScottyState e m =
ScottyState { middlewares :: [Wai.Middleware]
, routes :: [Middleware m]
, handler :: ErrorHandler e m
}
instance Default (ScottyState e m) where
def = ScottyState [] [] Nothing
addMiddleware :: Wai.Middleware -> ScottyState e m -> ScottyState e m
addMiddleware m s@(ScottyState {middlewares = ms}) = s { middlewares = m:ms }
addRoute :: Middleware m -> ScottyState e m -> ScottyState e m
addRoute r s@(ScottyState {routes = rs}) = s { routes = r:rs }
addHandler :: ErrorHandler e m -> ScottyState e m -> ScottyState e m
addHandler h s = s { handler = h }
newtype ScottyT e m a = ScottyT { runS :: State (ScottyState e m) a }
deriving ( Functor, Applicative, Monad )
------------------ Scotty Errors --------------------
data ActionError e = Redirect Text
| Next
| ActionError e
-- | In order to use a custom exception type (aside from 'Text'), you must
-- define an instance of 'ScottyError' for that type.
class ScottyError e where
stringError :: String -> e
showError :: e -> Text
instance ScottyError Text where
stringError = pack
showError = id
instance ScottyError e => ScottyError (ActionError e) where
stringError = ActionError . stringError
showError (Redirect url) = url
showError Next = pack "Next"
showError (ActionError e) = showError e
type ErrorHandler e m = Maybe (e -> ActionT e m ())
------------------ Scotty Actions -------------------
type Param = (Text, Text)
type File = (Text, FileInfo ByteString)
data ActionEnv = Env { getReq :: Request
, getParams :: [Param]
, getBody :: IO ByteString
, getBodyChunk :: IO BS.ByteString
, getFiles :: [File]
}
data RequestBodyState = BodyUntouched
| BodyCached ByteString [BS.ByteString] -- whole body, chunks left to stream
| BodyCorrupted
data BodyPartiallyStreamed = BodyPartiallyStreamed deriving (Show, Typeable)
instance E.Exception BodyPartiallyStreamed
data Content = ContentBuilder Builder
| ContentFile FilePath
| ContentStream StreamingBody
data ScottyResponse = SR { srStatus :: Status
, srHeaders :: ResponseHeaders
, srContent :: Content
}
instance Default ScottyResponse where
def = SR status200 [] (ContentBuilder mempty)
newtype ActionT e m a = ActionT { runAM :: ExceptT (ActionError e) (ReaderT ActionEnv (StateT ScottyResponse m)) a }
deriving ( Functor, Applicative, MonadIO )
instance (Monad m, ScottyError e) => Monad (ActionT e m) where
return = ActionT . return
ActionT m >>= k = ActionT (m >>= runAM . k)
fail = ActionT . throwError . stringError
instance ( Monad m, ScottyError e
#if !(MIN_VERSION_base(4,8,0))
, Functor m
#endif
) => Alternative (ActionT e m) where
empty = mzero
(<|>) = mplus
instance (Monad m, ScottyError e) => MonadPlus (ActionT e m) where
mzero = ActionT . ExceptT . return $ Left Next
ActionT m `mplus` ActionT n = ActionT . ExceptT $ do
a <- runExceptT m
case a of
Left _ -> runExceptT n
Right r -> return $ Right r
instance MonadTrans (ActionT e) where
lift = ActionT . lift . lift . lift
instance (ScottyError e, Monad m) => MonadError (ActionError e) (ActionT e m) where
throwError = ActionT . throwError
catchError (ActionT m) f = ActionT (catchError m (runAM . f))
instance (MonadBase b m, ScottyError e) => MonadBase b (ActionT e m) where
liftBase = liftBaseDefault
instance MonadTransControl (ActionT e) where
type StT (ActionT e) a = StT (StateT ScottyResponse) (StT (ReaderT ActionEnv) (StT (ExceptT (ActionError e)) a))
liftWith = \f ->
ActionT $ liftWith $ \run ->
liftWith $ \run' ->
liftWith $ \run'' ->
f $ run'' . run' . run . runAM
restoreT = ActionT . restoreT . restoreT . restoreT
instance (ScottyError e, MonadBaseControl b m) => MonadBaseControl b (ActionT e m) where
type StM (ActionT e m) a = ComposeSt (ActionT e) m a
liftBaseWith = defaultLiftBaseWith
restoreM = defaultRestoreM
------------------ Scotty Routes --------------------
data RoutePattern = Capture Text
| Literal Text
| Function (Request -> Maybe [Param])
instance IsString RoutePattern where
fromString = Capture . pack
| beni55/scotty | Web/Scotty/Internal/Types.hs | bsd-3-clause | 7,087 | 0 | 16 | 2,082 | 1,755 | 980 | 775 | 120 | 1 |
-----------------------------------------------------------------------------
--
-- Machine-dependent assembly language
--
-- (c) The University of Glasgow 1993-2004
--
-----------------------------------------------------------------------------
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
#include "HsVersions.h"
#include "nativeGen/NCG.h"
module SPARC.Instr (
RI(..),
riZero,
fpRelEA,
moveSp,
isUnconditionalJump,
Instr(..),
maxSpillSlots
)
where
import SPARC.Stack
import SPARC.Imm
import SPARC.AddrMode
import SPARC.Cond
import SPARC.Regs
import SPARC.RegPlate
import SPARC.Base
import TargetReg
import Instruction
import RegClass
import Reg
import Size
import CLabel
import BlockId
import OldCmm
import FastString
import FastBool
import Outputable
import Platform
-- | Register or immediate
data RI
= RIReg Reg
| RIImm Imm
-- | Check if a RI represents a zero value.
-- - a literal zero
-- - register %g0, which is always zero.
--
riZero :: RI -> Bool
riZero (RIImm (ImmInt 0)) = True
riZero (RIImm (ImmInteger 0)) = True
riZero (RIReg (RegReal (RealRegSingle 0))) = True
riZero _ = False
-- | Calculate the effective address which would be used by the
-- corresponding fpRel sequence.
fpRelEA :: Int -> Reg -> Instr
fpRelEA n dst
= ADD False False fp (RIImm (ImmInt (n * wordLength))) dst
-- | Code to shift the stack pointer by n words.
moveSp :: Int -> Instr
moveSp n
= ADD False False sp (RIImm (ImmInt (n * wordLength))) sp
-- | An instruction that will cause the one after it never to be exectuted
isUnconditionalJump :: Instr -> Bool
isUnconditionalJump ii
= case ii of
CALL{} -> True
JMP{} -> True
JMP_TBL{} -> True
BI ALWAYS _ _ -> True
BF ALWAYS _ _ -> True
_ -> False
-- | instance for sparc instruction set
instance Instruction Instr where
regUsageOfInstr = sparc_regUsageOfInstr
patchRegsOfInstr = sparc_patchRegsOfInstr
isJumpishInstr = sparc_isJumpishInstr
jumpDestsOfInstr = sparc_jumpDestsOfInstr
patchJumpInstr = sparc_patchJumpInstr
mkSpillInstr = sparc_mkSpillInstr
mkLoadInstr = sparc_mkLoadInstr
takeDeltaInstr = sparc_takeDeltaInstr
isMetaInstr = sparc_isMetaInstr
mkRegRegMoveInstr = sparc_mkRegRegMoveInstr
takeRegRegMoveInstr = sparc_takeRegRegMoveInstr
mkJumpInstr = sparc_mkJumpInstr
-- | SPARC instruction set.
-- Not complete. This is only the ones we need.
--
data Instr
-- meta ops --------------------------------------------------
-- comment pseudo-op
= COMMENT FastString
-- some static data spat out during code generation.
-- Will be extracted before pretty-printing.
| LDATA Section CmmStatics
-- Start a new basic block. Useful during codegen, removed later.
-- Preceding instruction should be a jump, as per the invariants
-- for a BasicBlock (see Cmm).
| NEWBLOCK BlockId
-- specify current stack offset for benefit of subsequent passes.
| DELTA Int
-- real instrs -----------------------------------------------
-- Loads and stores.
| LD Size AddrMode Reg -- size, src, dst
| ST Size Reg AddrMode -- size, src, dst
-- Int Arithmetic.
-- x: add/sub with carry bit.
-- In SPARC V9 addx and friends were renamed addc.
--
-- cc: modify condition codes
--
| ADD Bool Bool Reg RI Reg -- x?, cc?, src1, src2, dst
| SUB Bool Bool Reg RI Reg -- x?, cc?, src1, src2, dst
| UMUL Bool Reg RI Reg -- cc?, src1, src2, dst
| SMUL Bool Reg RI Reg -- cc?, src1, src2, dst
-- The SPARC divide instructions perform 64bit by 32bit division
-- The Y register is xored into the first operand.
-- On _some implementations_ the Y register is overwritten by
-- the remainder, so we have to make sure it is 0 each time.
-- dst <- ((Y `shiftL` 32) `or` src1) `div` src2
| UDIV Bool Reg RI Reg -- cc?, src1, src2, dst
| SDIV Bool Reg RI Reg -- cc?, src1, src2, dst
| RDY Reg -- move contents of Y register to reg
| WRY Reg Reg -- Y <- src1 `xor` src2
-- Logic operations.
| AND Bool Reg RI Reg -- cc?, src1, src2, dst
| ANDN Bool Reg RI Reg -- cc?, src1, src2, dst
| OR Bool Reg RI Reg -- cc?, src1, src2, dst
| ORN Bool Reg RI Reg -- cc?, src1, src2, dst
| XOR Bool Reg RI Reg -- cc?, src1, src2, dst
| XNOR Bool Reg RI Reg -- cc?, src1, src2, dst
| SLL Reg RI Reg -- src1, src2, dst
| SRL Reg RI Reg -- src1, src2, dst
| SRA Reg RI Reg -- src1, src2, dst
-- Load immediates.
| SETHI Imm Reg -- src, dst
-- Do nothing.
-- Implemented by the assembler as SETHI 0, %g0, but worth an alias
| NOP
-- Float Arithmetic.
-- Note that we cheat by treating F{ABS,MOV,NEG} of doubles as single
-- instructions right up until we spit them out.
--
| FABS Size Reg Reg -- src dst
| FADD Size Reg Reg Reg -- src1, src2, dst
| FCMP Bool Size Reg Reg -- exception?, src1, src2, dst
| FDIV Size Reg Reg Reg -- src1, src2, dst
| FMOV Size Reg Reg -- src, dst
| FMUL Size Reg Reg Reg -- src1, src2, dst
| FNEG Size Reg Reg -- src, dst
| FSQRT Size Reg Reg -- src, dst
| FSUB Size Reg Reg Reg -- src1, src2, dst
| FxTOy Size Size Reg Reg -- src, dst
-- Jumping around.
| BI Cond Bool BlockId -- cond, annul?, target
| BF Cond Bool BlockId -- cond, annul?, target
| JMP AddrMode -- target
-- With a tabled jump we know all the possible destinations.
-- We also need this info so we can work out what regs are live across the jump.
--
| JMP_TBL AddrMode [Maybe BlockId] CLabel
| CALL (Either Imm Reg) Int Bool -- target, args, terminal
-- | regUsage returns the sets of src and destination registers used
-- by a particular instruction. Machine registers that are
-- pre-allocated to stgRegs are filtered out, because they are
-- uninteresting from a register allocation standpoint. (We wouldn't
-- want them to end up on the free list!) As far as we are concerned,
-- the fixed registers simply don't exist (for allocation purposes,
-- anyway).
-- regUsage doesn't need to do any trickery for jumps and such. Just
-- state precisely the regs read and written by that insn. The
-- consequences of control flow transfers, as far as register
-- allocation goes, are taken care of by the register allocator.
--
sparc_regUsageOfInstr :: Platform -> Instr -> RegUsage
sparc_regUsageOfInstr _ instr
= case instr of
LD _ addr reg -> usage (regAddr addr, [reg])
ST _ reg addr -> usage (reg : regAddr addr, [])
ADD _ _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SUB _ _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
UMUL _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SMUL _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
UDIV _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SDIV _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
RDY rd -> usage ([], [rd])
WRY r1 r2 -> usage ([r1, r2], [])
AND _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
ANDN _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
OR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
ORN _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
XOR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
XNOR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SLL r1 ar r2 -> usage (r1 : regRI ar, [r2])
SRL r1 ar r2 -> usage (r1 : regRI ar, [r2])
SRA r1 ar r2 -> usage (r1 : regRI ar, [r2])
SETHI _ reg -> usage ([], [reg])
FABS _ r1 r2 -> usage ([r1], [r2])
FADD _ r1 r2 r3 -> usage ([r1, r2], [r3])
FCMP _ _ r1 r2 -> usage ([r1, r2], [])
FDIV _ r1 r2 r3 -> usage ([r1, r2], [r3])
FMOV _ r1 r2 -> usage ([r1], [r2])
FMUL _ r1 r2 r3 -> usage ([r1, r2], [r3])
FNEG _ r1 r2 -> usage ([r1], [r2])
FSQRT _ r1 r2 -> usage ([r1], [r2])
FSUB _ r1 r2 r3 -> usage ([r1, r2], [r3])
FxTOy _ _ r1 r2 -> usage ([r1], [r2])
JMP addr -> usage (regAddr addr, [])
JMP_TBL addr _ _ -> usage (regAddr addr, [])
CALL (Left _ ) _ True -> noUsage
CALL (Left _ ) n False -> usage (argRegs n, callClobberedRegs)
CALL (Right reg) _ True -> usage ([reg], [])
CALL (Right reg) n False -> usage (reg : (argRegs n), callClobberedRegs)
_ -> noUsage
where
usage (src, dst)
= RU (filter interesting src) (filter interesting dst)
regAddr (AddrRegReg r1 r2) = [r1, r2]
regAddr (AddrRegImm r1 _) = [r1]
regRI (RIReg r) = [r]
regRI _ = []
-- | Interesting regs are virtuals, or ones that are allocatable
-- by the register allocator.
interesting :: Reg -> Bool
interesting reg
= case reg of
RegVirtual _ -> True
RegReal (RealRegSingle r1) -> isFastTrue (freeReg r1)
RegReal (RealRegPair r1 _) -> isFastTrue (freeReg r1)
-- | Apply a given mapping to tall the register references in this instruction.
sparc_patchRegsOfInstr :: Instr -> (Reg -> Reg) -> Instr
sparc_patchRegsOfInstr instr env = case instr of
LD sz addr reg -> LD sz (fixAddr addr) (env reg)
ST sz reg addr -> ST sz (env reg) (fixAddr addr)
ADD x cc r1 ar r2 -> ADD x cc (env r1) (fixRI ar) (env r2)
SUB x cc r1 ar r2 -> SUB x cc (env r1) (fixRI ar) (env r2)
UMUL cc r1 ar r2 -> UMUL cc (env r1) (fixRI ar) (env r2)
SMUL cc r1 ar r2 -> SMUL cc (env r1) (fixRI ar) (env r2)
UDIV cc r1 ar r2 -> UDIV cc (env r1) (fixRI ar) (env r2)
SDIV cc r1 ar r2 -> SDIV cc (env r1) (fixRI ar) (env r2)
RDY rd -> RDY (env rd)
WRY r1 r2 -> WRY (env r1) (env r2)
AND b r1 ar r2 -> AND b (env r1) (fixRI ar) (env r2)
ANDN b r1 ar r2 -> ANDN b (env r1) (fixRI ar) (env r2)
OR b r1 ar r2 -> OR b (env r1) (fixRI ar) (env r2)
ORN b r1 ar r2 -> ORN b (env r1) (fixRI ar) (env r2)
XOR b r1 ar r2 -> XOR b (env r1) (fixRI ar) (env r2)
XNOR b r1 ar r2 -> XNOR b (env r1) (fixRI ar) (env r2)
SLL r1 ar r2 -> SLL (env r1) (fixRI ar) (env r2)
SRL r1 ar r2 -> SRL (env r1) (fixRI ar) (env r2)
SRA r1 ar r2 -> SRA (env r1) (fixRI ar) (env r2)
SETHI imm reg -> SETHI imm (env reg)
FABS s r1 r2 -> FABS s (env r1) (env r2)
FADD s r1 r2 r3 -> FADD s (env r1) (env r2) (env r3)
FCMP e s r1 r2 -> FCMP e s (env r1) (env r2)
FDIV s r1 r2 r3 -> FDIV s (env r1) (env r2) (env r3)
FMOV s r1 r2 -> FMOV s (env r1) (env r2)
FMUL s r1 r2 r3 -> FMUL s (env r1) (env r2) (env r3)
FNEG s r1 r2 -> FNEG s (env r1) (env r2)
FSQRT s r1 r2 -> FSQRT s (env r1) (env r2)
FSUB s r1 r2 r3 -> FSUB s (env r1) (env r2) (env r3)
FxTOy s1 s2 r1 r2 -> FxTOy s1 s2 (env r1) (env r2)
JMP addr -> JMP (fixAddr addr)
JMP_TBL addr ids l -> JMP_TBL (fixAddr addr) ids l
CALL (Left i) n t -> CALL (Left i) n t
CALL (Right r) n t -> CALL (Right (env r)) n t
_ -> instr
where
fixAddr (AddrRegReg r1 r2) = AddrRegReg (env r1) (env r2)
fixAddr (AddrRegImm r1 i) = AddrRegImm (env r1) i
fixRI (RIReg r) = RIReg (env r)
fixRI other = other
--------------------------------------------------------------------------------
sparc_isJumpishInstr :: Instr -> Bool
sparc_isJumpishInstr instr
= case instr of
BI{} -> True
BF{} -> True
JMP{} -> True
JMP_TBL{} -> True
CALL{} -> True
_ -> False
sparc_jumpDestsOfInstr :: Instr -> [BlockId]
sparc_jumpDestsOfInstr insn
= case insn of
BI _ _ id -> [id]
BF _ _ id -> [id]
JMP_TBL _ ids _ -> [id | Just id <- ids]
_ -> []
sparc_patchJumpInstr :: Instr -> (BlockId -> BlockId) -> Instr
sparc_patchJumpInstr insn patchF
= case insn of
BI cc annul id -> BI cc annul (patchF id)
BF cc annul id -> BF cc annul (patchF id)
JMP_TBL n ids l -> JMP_TBL n (map (fmap patchF) ids) l
_ -> insn
--------------------------------------------------------------------------------
-- | Make a spill instruction.
-- On SPARC we spill below frame pointer leaving 2 words/spill
sparc_mkSpillInstr
:: Platform
-> Reg -- ^ register to spill
-> Int -- ^ current stack delta
-> Int -- ^ spill slot to use
-> Instr
sparc_mkSpillInstr platform reg _ slot
= let off = spillSlotToOffset slot
off_w = 1 + (off `div` 4)
sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcFloat -> FF32
RcDouble -> FF64
_ -> panic "sparc_mkSpillInstr"
in ST sz reg (fpRel (negate off_w))
-- | Make a spill reload instruction.
sparc_mkLoadInstr
:: Platform
-> Reg -- ^ register to load into
-> Int -- ^ current stack delta
-> Int -- ^ spill slot to use
-> Instr
sparc_mkLoadInstr platform reg _ slot
= let off = spillSlotToOffset slot
off_w = 1 + (off `div` 4)
sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcFloat -> FF32
RcDouble -> FF64
_ -> panic "sparc_mkLoadInstr"
in LD sz (fpRel (- off_w)) reg
--------------------------------------------------------------------------------
-- | See if this instruction is telling us the current C stack delta
sparc_takeDeltaInstr
:: Instr
-> Maybe Int
sparc_takeDeltaInstr instr
= case instr of
DELTA i -> Just i
_ -> Nothing
sparc_isMetaInstr
:: Instr
-> Bool
sparc_isMetaInstr instr
= case instr of
COMMENT{} -> True
LDATA{} -> True
NEWBLOCK{} -> True
DELTA{} -> True
_ -> False
-- | Make a reg-reg move instruction.
-- On SPARC v8 there are no instructions to move directly between
-- floating point and integer regs. If we need to do that then we
-- have to go via memory.
--
sparc_mkRegRegMoveInstr
:: Platform
-> Reg
-> Reg
-> Instr
sparc_mkRegRegMoveInstr platform src dst
| srcClass <- targetClassOfReg platform src
, dstClass <- targetClassOfReg platform dst
, srcClass == dstClass
= case srcClass of
RcInteger -> ADD False False src (RIReg g0) dst
RcDouble -> FMOV FF64 src dst
RcFloat -> FMOV FF32 src dst
_ -> panic "sparc_mkRegRegMoveInstr"
| otherwise
= panic "SPARC.Instr.mkRegRegMoveInstr: classes of src and dest not the same"
-- | Check whether an instruction represents a reg-reg move.
-- The register allocator attempts to eliminate reg->reg moves whenever it can,
-- by assigning the src and dest temporaries to the same real register.
--
sparc_takeRegRegMoveInstr :: Instr -> Maybe (Reg,Reg)
sparc_takeRegRegMoveInstr instr
= case instr of
ADD False False src (RIReg src2) dst
| g0 == src2 -> Just (src, dst)
FMOV FF64 src dst -> Just (src, dst)
FMOV FF32 src dst -> Just (src, dst)
_ -> Nothing
-- | Make an unconditional branch instruction.
sparc_mkJumpInstr
:: BlockId
-> [Instr]
sparc_mkJumpInstr id
= [BI ALWAYS False id
, NOP] -- fill the branch delay slot.
| nomeata/ghc | compiler/nativeGen/SPARC/Instr.hs | bsd-3-clause | 15,521 | 447 | 15 | 4,353 | 4,968 | 2,612 | 2,356 | 298 | 39 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Jenkins where
import Control.Lens hiding (deep) -- lens
import Control.Monad.IO.Class
import Control.Monad.Trans.Control
import Data.Aeson.Lens -- lens-aeson
import Data.Map (empty)
import Data.Text hiding (empty)
import Jenkins.Rest (Jenkins, (-?-), (-=-)) -- libjenkins
import qualified Jenkins.Rest as JR
import Text.Hamlet.XML
import Text.XML
import Prelude hiding (unwords)
{-
data BuildPlan = BuildPlan {
vcsInfo :: Maybe VCSRoot,
buildWith :: Text,
extraParams :: [Text]
} deriving(Eq, Show)
-- | Information about a VCS root.
class VCSInfo v where
vcsTool :: v -> Text
vcsRootUrl :: v -> Text
data VCSRoot = VCSGit VCSInfoGit | VCSSvn VCSInfoSvn deriving (Eq, Show)
data VCSInfoGit = VCSInfoGit {
gitRepoUrl :: Text
} deriving (Eq, Show)
instance VCSInfo VCSInfoGit where
vcsTool _ = "git"
vcsRootUrl = gitRepoUrl
data VCSInfoSvn = VCSInfoSvn {
svnRepoUrl :: Text
} deriving (Eq, Show)
instance VCSInfo VCSInfoSvn where
vcsTool _ = "svn"
vcsRootUrl = svnRepoUrl
-- | The jenkins master (hardcoded for now)
master :: JR.Master
master = JR.defaultMaster &
JR.url .~ "http://192.168.59.103:8080"
-- | Test configration, describes a job with a single build step that
-- echoes test
testConfig :: Element
testConfig = Element "project" empty [xml|
<actions>
<description>
<keepDependencies>
false
<properties>
<scm class="hudson.scm.NullSCM">
<canRoam>true
<disabled> false
<blockBuildWhenDownstreamBuilding>false
<blockBuildWhenUpstreamBuilding>false
<triggers>
<concurrentBuild>
false
<builders>
<hudson.tasks.Shell>
<command>
echo "test"
<publishers>
<buildWrappers>
|]
testPlan = BuildPlan {
vcsInfo = Just $
VCSGit $
VCSInfoGit "https://github.com/wayofthepie/github-maven-example",
buildWith = "mvn",
extraParams = ["clean", "install"]
}
gitPluginVersion = "[email protected]"
-- |
-- To be accurate about plugin values, the api should be queried at
-- http://(jenkins)/pluginManager/api/json?depth=1.
--
-- TODO: Create a converter for plans to xml.
plan2Cfg :: BuildPlan -> Element
plan2Cfg b = Element "project" empty [xml|
<actions>
<description>
<keepDependencies>
false
<properties>
<scm class=hudson.plugins.git.GitSCM>
<configVersion>
2
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>#{repoUrl b}
<branches>
<hudson.plugins.git.BranchSpec>
<name>
*/master
<doGenerateSubmoduleConfigurations>
false
<submoduleCfg class="list">
<extensions>
<canRoam>
true
<disabled>
false
<blockBuildWhenDownstreamBuilding>
false
<blockBuildWhenUpstreamBuilding>
false
<triggers>
<concurrentBuild>
false
<builders>
<hudson.tasks.Shell>
<command>#{buildWith b} #{unwords $ extraParams b}
<publishers>
<buildWrappers>
|]
-- | Create a job named __n__ with the config __c__
createJob :: ( MonadBaseControl IO m, MonadIO m ) => Text -> Element -> m ( JR.Result () )
createJob n c =
JR.run (JR.defaultMaster &
JR.url .~ ("http://192.168.59.103:8080/")) $
JR.postXml ("createItem" -?- "name" -=- n) $ e2bs c
where
e2bs xml = renderLBS def $ Document (Prologue [] Nothing []) xml []
-}
| wayofthepie/riverd | src/lib/Jenkins.hs | bsd-3-clause | 3,688 | 0 | 5 | 987 | 111 | 77 | 34 | 15 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : A
-- Copyright : (c) 2008 - 2010 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : [email protected]
--
-- An example type representation.
-----------------------------------------------------------------------------
-- {-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module A where
import Prelude hiding (Read, Show)
import qualified Prelude as P (Read, Show)
import Data.Generics (Data, Typeable)
import Control.Applicative (Alternative, pure)
import Generics.EMGM.Base
import Generics.EMGM.Functions.Collect
import Generics.EMGM.Functions.Everywhere
import Generics.EMGM.Functions.Meta
data A a
= A1 a
| A2 Integer (A a)
| A3 { unA3 :: Double }
| A4 { unA4a :: A a, unA4b :: Int }
| A5 { unA5a :: Char, unA5b :: A a, unA5c :: a }
| A a :^: Float
| (:<>:) { unA7a :: A a, unA7b :: A a }
deriving (P.Show, P.Read, Eq, Ord, Data, Typeable)
infixr 6 :^:
infixl 5 :<>:
type AS a
{- A1 -} = a
{- A2 -} :+: Integer :*: A a
{- A3 -} :+: Double
{- A4 -} :+: A a :*: Int
{- A5 -} :+: Char :*: A a :*: a
{- :^: -} :+: A a :*: Float
{- :<>: -} :+: A a :*: A a
fromA :: A a -> AS a
fromA t = case t of
A1 x1 -> L x1
A2 x1 x2 -> R (L (x1 :*: x2))
A3 x1 -> R (R (L x1))
A4 x1 x2 -> R (R (R (L (x1 :*: x2))))
A5 x1 x2 x3 -> R (R (R (R (L (x1 :*: x2 :*: x3)))))
x1 :^: x2 -> R (R (R (R (R (L (x1 :*: x2))))))
x1 :<>: x2 -> R (R (R (R (R (R (x1 :*: x2))))))
toA :: AS a -> A a
toA s = case s of
L x1 -> A1 x1
R (L (x1 :*: x2)) -> A2 x1 x2
R (R (L x1)) -> A3 x1
R (R (R (L (x1 :*: x2)))) -> A4 x1 x2
R (R (R (R (L (x1 :*: x2 :*: x3))))) -> A5 x1 x2 x3
R (R (R (R (R (L (x1 :*: x2)))))) -> x1 :^: x2
R (R (R (R (R (R (x1 :*: x2)))))) -> x1 :<>: x2
epA :: EP (A a) (AS a)
epA = EP fromA toA
instance HasEP (A a) (AS a) where
epOf _ = epA
conA1 = ConDescr "A1" 1 False Prefix
conA2 = ConDescr "A2" 2 False Prefix
conA3 = ConDescr "A3" 1 True Prefix
conA4 = ConDescr "A4" 2 True Prefix
conA5 = ConDescr "A5" 3 True Prefix
conA6 = ConDescr ":^:" 2 False (Infix RightAssoc 6)
conA7 = ConDescr ":<>:" 2 True (Infix LeftAssoc 5)
lblUnA3 = LblDescr "unA3"
lblUnA4a = LblDescr "unA4a"
lblUnA4b = LblDescr "unA4b"
lblUnA5a = LblDescr "unA5a"
lblUnA5b = LblDescr "unA5b"
lblUnA5c = LblDescr "unA5c"
lblUnA7a = LblDescr "unA7a"
lblUnA7b = LblDescr "unA7b"
instance (Generic g, Rep g a, Rep g Char, Rep g Double, Rep g Float, Rep g Integer, Rep g Int) => Rep g (A a) where
rep = rtype epA
$ rcon conA1 rep
`rsum` rcon conA2 (rep `rprod` rep)
`rsum` rcon conA3 (rlbl lblUnA3 rep)
`rsum` rcon conA4 (rlbl lblUnA4a rep `rprod` rlbl lblUnA4b rep)
`rsum` rcon conA5 (rlbl lblUnA5a rep `rprod` rlbl lblUnA5b rep `rprod` rlbl lblUnA5c rep)
`rsum` rcon conA6 (rep `rprod` rep)
`rsum` rcon conA7 (rlbl lblUnA7a rep `rprod` rlbl lblUnA7b rep)
instance (Generic g) => FRep g A where
frep ra = rtype epA
$ rcon conA1 ra
`rsum` rcon conA2 (rinteger `rprod` frep ra)
`rsum` rcon conA3 (rlbl lblUnA3 rdouble)
`rsum` rcon conA4 (rlbl lblUnA4a (frep ra) `rprod` rlbl lblUnA4b rint)
`rsum` rcon conA5 (rlbl lblUnA5a rchar `rprod` rlbl lblUnA5b (frep ra) `rprod` rlbl lblUnA5c ra)
`rsum` rcon conA6 (frep ra `rprod` rfloat)
`rsum` rcon conA7 (rlbl lblUnA7a (frep ra) `rprod` rlbl lblUnA7b (frep ra))
instance (Generic2 g) => FRep2 g A where
frep2 ra = rtype2 epA epA
$ rcon2 conA1 ra
`rsum2` rcon2 conA2 (rinteger2 `rprod2` frep2 ra)
`rsum2` rcon2 conA3 (rlbl2 lblUnA3 rdouble2)
`rsum2` rcon2 conA4 (rlbl2 lblUnA4a (frep2 ra) `rprod2` rlbl2 lblUnA4b rint2)
`rsum2` rcon2 conA5 (rlbl2 lblUnA5a rchar2 `rprod2` rlbl2 lblUnA5b (frep2 ra) `rprod2` rlbl2 lblUnA5c ra)
`rsum2` rcon2 conA6 (frep2 ra `rprod2` rfloat2)
`rsum2` rcon2 conA7 (rlbl2 lblUnA7a (frep2 ra) `rprod2` rlbl2 lblUnA7b (frep2 ra))
instance (Generic3 g) => FRep3 g A where
frep3 ra = rtype3 epA epA epA
$ rcon3 conA1 ra
`rsum3` rcon3 conA2 (rinteger3 `rprod3` frep3 ra)
`rsum3` rcon3 conA3 (rlbl3 lblUnA3 rdouble3)
`rsum3` rcon3 conA4 (rlbl3 lblUnA4a (frep3 ra) `rprod3` rlbl3 lblUnA4b rint3)
`rsum3` rcon3 conA5 (rlbl3 lblUnA5a rchar3 `rprod3` rlbl3 lblUnA5b (frep3 ra) `rprod3` rlbl3 lblUnA5c ra)
`rsum3` rcon3 conA6 (frep3 ra `rprod3` rfloat3)
`rsum3` rcon3 conA7 (rlbl3 lblUnA7a (frep3 ra) `rprod3` rlbl3 lblUnA7b (frep3 ra))
instance (Alternative f) => Rep (Collect f (A a)) (A a) where
rep = Collect pure
instance (Rep (Everywhere (A a)) a) => Rep (Everywhere (A a)) (A a) where
rep = Everywhere app
where
app f x =
case x of
A1 x1 -> f (A1 (selEverywhere rep f x1))
A2 x1 x2 -> f (A2 (selEverywhere rep f x1) (selEverywhere rep f x2))
A3 x1 -> f (A3 (selEverywhere rep f x1))
A4 x1 x2 -> f (A4 (selEverywhere rep f x1) (selEverywhere rep f x2))
A5 x1 x2 x3 -> f (A5 (selEverywhere rep f x1) (selEverywhere rep f x2) (selEverywhere rep f x3))
x1 :^: x2 -> f (selEverywhere rep f x1 :^: selEverywhere rep f x2)
x1 :<>: x2 -> f (selEverywhere rep f x1 :<>: selEverywhere rep f x2)
instance Rep (Everywhere' (A a)) (A a) where
rep = Everywhere' ($)
v1 = A1 (5 :: Int)
v2 = A2 37 v1
v3 = A3 9999.9999 :: A Float
v4 = A4 v3 79
v5 = A5 'a' v4 5.0
v6 = v5 :^: 0.12345
v7 = v6 :<>: v6
| spl/emgm | tests/A.hs | bsd-3-clause | 5,933 | 6 | 20 | 1,574 | 2,584 | 1,346 | 1,238 | 131 | 7 |
{-# OPTIONS -fno-warn-unused-imports #-}
#include "HsConfigure.h"
-- #hide
module Data.Time.Calendar.Days
(
-- * Days
Day(..),addDays,diffDays
) where
import Control.DeepSeq
import Data.Ix
import Data.Typeable
#if LANGUAGE_Rank2Types
import Data.Data
#endif
-- | The Modified Julian Day is a standard count of days, with zero being the day 1858-11-17.
--
-- For the 'Read' instance of 'Day',
-- import "Data.Time" or "Data.Time.Format".
newtype Day = ModifiedJulianDay {toModifiedJulianDay :: Integer} deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
,Data, Typeable
#endif
#endif
)
instance NFData Day where
rnf (ModifiedJulianDay a) = rnf a
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Enum Day where
succ (ModifiedJulianDay a) = ModifiedJulianDay (succ a)
pred (ModifiedJulianDay a) = ModifiedJulianDay (pred a)
toEnum = ModifiedJulianDay . toEnum
fromEnum (ModifiedJulianDay a) = fromEnum a
enumFrom (ModifiedJulianDay a) = fmap ModifiedJulianDay (enumFrom a)
enumFromThen (ModifiedJulianDay a) (ModifiedJulianDay b) = fmap ModifiedJulianDay (enumFromThen a b)
enumFromTo (ModifiedJulianDay a) (ModifiedJulianDay b) = fmap ModifiedJulianDay (enumFromTo a b)
enumFromThenTo (ModifiedJulianDay a) (ModifiedJulianDay b) (ModifiedJulianDay c) = fmap ModifiedJulianDay (enumFromThenTo a b c)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Ix Day where
range (ModifiedJulianDay a,ModifiedJulianDay b) = fmap ModifiedJulianDay (range (a,b))
index (ModifiedJulianDay a,ModifiedJulianDay b) (ModifiedJulianDay c) = index (a,b) c
inRange (ModifiedJulianDay a,ModifiedJulianDay b) (ModifiedJulianDay c) = inRange (a,b) c
rangeSize (ModifiedJulianDay a,ModifiedJulianDay b) = rangeSize (a,b)
addDays :: Integer -> Day -> Day
addDays n (ModifiedJulianDay a) = ModifiedJulianDay (a + n)
diffDays :: Day -> Day -> Integer
diffDays (ModifiedJulianDay a) (ModifiedJulianDay b) = a - b
| bergmark/time | lib/Data/Time/Calendar/Days.hs | bsd-3-clause | 2,031 | 0 | 9 | 325 | 566 | 300 | 266 | 28 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module HW05 where
import Data.ByteString.Lazy (ByteString)
import Data.Map.Strict (Map)
import System.Environment (getArgs)
import Data.Word8 (Word8)
import Data.List
import Data.Ord
import Data.Maybe
import qualified Data.ByteString.Lazy as BS
import qualified Data.Map.Strict as Map
import qualified Data.Bits as Bits
-- import qualified Data.ByteString.Lazy.Char8 as C
import Parser
-- Exercise 1 -----------------------------------------
getSecret :: FilePath -> FilePath -> IO ByteString
getSecret f1 f2 = do
rf1 <- BS.readFile f1
rf2 <- BS.readFile f2
return (BS.filter (/=0) (BS.pack $ BS.zipWith (Bits.xor) rf1 rf2))
-- Exercise 2 -----------------------------------------
decryptWithKey :: ByteString -> FilePath -> IO ()
decryptWithKey k f = do
rf <- BS.readFile $ f ++ ".enc"
BS.writeFile f $ BS.pack $ go k rf
where go :: ByteString -> ByteString -> [Word8]
go k' b = zipWith (Bits.xor) (cycle $ BS.unpack k') (BS.unpack b)
-- Exercise 3 -----------------------------------------
parseFile :: FromJSON a => FilePath -> IO (Maybe a)
parseFile f = do
rf <- BS.readFile f
return $ decode rf
-- Exercise 4 -----------------------------------------
getBadTs :: FilePath -> FilePath -> IO (Maybe [Transaction])
getBadTs vp tp = do
mvs <- parseFile vp :: IO (Maybe [TId])
mts <- parseFile tp
case (mvs, mts) of
(Just vs, Just ts) -> return $ Just $ filter(\ t -> elem (tid t) vs) ts
(_, _) -> return Nothing
-- case mvs of
-- Nothing -> return mts
-- Just vs -> do
-- case mts of
-- Nothing -> return Nothing
-- Just ts -> return $ Just $ filter(\t -> elem (tid t) vs) ts
test :: IO (Map String Integer)
test = do
t <- (getBadTs "victims.json" "transactions.json")
return $ getFlow $ fromJust t
-- Exercise 5 -----------------------------------------
getFlow :: [Transaction] -> Map String Integer
getFlow ts' = go ts' Map.empty
where go :: [Transaction] -> Map String Integer -> Map String Integer
go [] m = m
go (t:ts) m = let u1 = ((to t), (amount t))
u2 = ((from t), negate (amount t))
m' = Map.insertWith (+) (fst u1) (snd u1) m
m'' = Map.insertWith (+) (fst u2) (snd u2) m'
in go ts m''
-- Exercise 6 -----------------------------------------
getCriminal :: Map String Integer -> String
getCriminal m = fst $ maximumBy (comparing $ snd) (Map.toList m)
-- Exercise 7 -----------------------------------------
undoTs :: Map String Integer -> [TId] -> [Transaction]
undoTs m ts = makeT ts (payers m) (payees m)
where payers :: Map String Integer -> [(String, Integer)]
payers m' = reverse $ sort' $ filter (\ x -> (snd x) > 0) (Map.toList m')
payees :: Map String Integer -> [(String, Integer)]
payees m'' = sort' $ filter (\ x -> (snd x) < 0) (Map.toList m'')
sort' :: [(String, Integer)] -> [(String, Integer)]
sort' ls = sortBy (comparing $ snd) ls
makeT :: [TId] -> [(String, Integer)] -> [(String, Integer)] -> [Transaction]
makeT ts [] [] = []
makeT (t:ts) (p1:p1s) (p2:p2s) = let am = if (snd p1) > (abs $ snd p2) then (abs $ snd p2) else snd p1
nt = Transaction {from = (fst p1), to = (fst p2), amount = am, tid = t}
p1s' = if (snd p1) == am then p1s else (fst p1, (snd p1 - am)) : p1s
p2s' = if (abs $ snd p2) == am then p2s else (fst p2, (snd p2 + am)) : p2s
in (nt : makeT ts p1s' p2s')
-- Exercise 8 -----------------------------------------
writeJSON :: ToJSON a => FilePath -> a -> IO ()
writeJSON fp ts = BS.writeFile fp $ encode ts
-- Exercise 9 -----------------------------------------
doEverything :: FilePath -> FilePath -> FilePath -> FilePath -> FilePath
-> FilePath -> IO String
doEverything dog1 dog2 trans vict fids out = do
key <- getSecret dog1 dog2
decryptWithKey key vict
mts <- getBadTs vict trans
case mts of
Nothing -> error "No Transactions"
Just ts -> do
mids <- parseFile fids
case mids of
Nothing -> error "No ids"
Just ids -> do
let flow = getFlow ts
writeJSON out (undoTs flow ids)
return (getCriminal flow)
main' :: IO ()
main' = do
args <- getArgs
crim <-
case args of
dog1:dog2:trans:vict:ids:out:_ ->
doEverything dog1 dog2 trans vict ids out
_ -> doEverything "dog-original.jpg"
"dog.jpg"
"transactions.json"
"victims.json"
"new-ids.json"
"new-transactions.json"
putStrLn crim
| ImsungChoi/haskell-test | src/HW05.hs | bsd-3-clause | 4,930 | 0 | 20 | 1,431 | 1,702 | 881 | 821 | 98 | 5 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
module FreshNames where
import Prelude hiding ((<))
newtype PolyFreshNames a = FreshNames { unFreshNames :: a }
deriving (Show, Eq, Ord, Functor)
type FreshNames = PolyFreshNames Int
class FreshName a where
getFreshName :: PolyFreshNames a -> (a, PolyFreshNames a)
defaultNames :: PolyFreshNames a
getNames :: Int -> PolyFreshNames a -> ([a], PolyFreshNames a)
instance FreshName Int where
getFreshName (FreshNames x) = (x, FreshNames $ succ x)
defaultNames = FreshNames 0
getNames n (FreshNames x) =
let (xs, h:_) = splitAt n $ enumFrom x
in (xs, FreshNames h)
| kajigor/uKanren_transformations | src/FreshNames.hs | bsd-3-clause | 723 | 0 | 11 | 151 | 226 | 123 | 103 | 18 | 0 |
-- |
-- Module : Database.Monarch
-- Copyright : 2013 Noriyuki OHKAWA
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Provide TokyoTyrant monadic access interface.
--
module Database.Monarch
(
Monarch, MonarchT
, Connection, ConnectionPool
, withMonarchConn
, withMonarchPool
, runMonarchConn
, runMonarchPool
, ExtOption(..), RestoreOption(..), MiscOption(..)
, Code(..)
, MonadMonarch(..)
) where
import Database.Monarch.Types hiding (sendLBS, recvLBS)
import Database.Monarch.Action ()
| notogawa/monarch | src/Database/Monarch.hs | bsd-3-clause | 617 | 0 | 5 | 132 | 102 | 73 | 29 | 13 | 0 |
{-# LANGUAGE TemplateHaskell #-}
-- | Generate AST types, functions and instances for tuples.
module Database.DSH.Frontend.TupleTypes
( -- * Generate tuple types, functions and instances
mkQAInstances
, mkTAInstances
, mkTupleConstructors
, mkTupleAccessors
, mkTupElemType
, mkTupElemCompile
, mkReifyInstances
, mkTranslateTupleTerm
, mkTranslateType
, mkViewInstances
, mkTupleAstComponents
-- * Helper functions
, innerConst
, outerConst
, tupAccName
, mkTupElemTerm
, mkTupConstTerm
, tupTyConstName
) where
import Data.List
import Text.Printf
import Language.Haskell.TH
import Database.DSH.Common.Impossible
import Database.DSH.Common.TH
import Database.DSH.Common.Nat
import qualified Database.DSH.Common.Type as T
import qualified Database.DSH.CL.Primitives as CP
import qualified Database.DSH.CL.Lang as CL
--------------------------------------------------------------------------------
-- Tuple Accessors
-- | Generate all constructors for a given tuple width.
mkTupElemCons :: Name -> Name -> Int -> Q [Con]
mkTupElemCons aTyVar bTyVar width = do
boundTyVars <- mapM (\i -> newName $ printf "t%d" i) [1..width-1]
mapM (mkTupElemCon aTyVar bTyVar boundTyVars width) [1..width]
mkTupType :: Int -> Int -> [Name] -> Name -> Type
mkTupType elemIdx width boundTyVars bTyVar =
let elemTys = map VarT $ take (elemIdx - 1) boundTyVars
++ [bTyVar]
++ drop (elemIdx - 1) boundTyVars
in foldl' AppT (TupleT width) elemTys
mkTupElemCon :: Name -> Name -> [Name] -> Int -> Int -> Q Con
mkTupElemCon aTyVar bTyVar boundTyVars width elemIdx = do
let binders = map PlainTV boundTyVars
let tupTy = mkTupType elemIdx width boundTyVars bTyVar
let con = tupAccName width elemIdx
let ctx = [equalConstrTy (VarT aTyVar) tupTy]
return $ ForallC binders ctx (NormalC con [])
-- | Generate the complete type of tuple acccessors for all tuple
-- widths.
--
-- @
-- data TupElem a b where
-- Tup2_1 :: TupElem (a, b) a
-- Tup2_2 :: TupElem (a, b) b
-- Tup3_1 :: TupElem (a, b, c) a
-- Tup3_2 :: TupElem (a, b, c) b
-- Tup3_3 :: TupElem (a, b, c) c
-- ...
-- @
--
-- Due to the lack of support for proper GADT syntax in TH, we have
-- to work with explicit universal quantification:
--
-- @
-- data TupElem a b =
-- | forall d. a ~ (b, d) => Tup2_1
-- | forall d. a ~ (d, b) => Tup2_2
--
-- | forall d e. a ~ (b, d, e) => Tup3_1
-- | forall d e. a ~ (d, b, e) => Tup3_2
-- | forall d e. a ~ (d, e, b) => Tup3_3
-- ...
-- @
mkTupElemType :: Int -> Q [Dec]
mkTupElemType maxWidth = do
let tyName = mkName "TupElem"
aTyVar <- newName "a"
bTyVar <- newName "b"
let tyVars = map PlainTV [aTyVar, bTyVar]
cons <- concat <$> mapM (mkTupElemCons aTyVar bTyVar) [2..maxWidth]
return $ [DataD [] tyName tyVars Nothing cons []]
--------------------------------------------------------------------------------
-- Translation of tuple accessors to CL
-- TupElem a b -> Exp a -> Compile CL.Expr
-- \te e ->
-- case te of
-- Tup{2}_{1} -> CP.tupElem (indIndex 1) <$> translate e
-- Tup{2}_{k} -> CP.tupElem (indIndex k) <$> translate e
-- Tup{3}_{1} -> CP.tupElem (indIndex 1) <$> translate e
-- ...
-- Tup{n}_{j} -> CP.tupElem (indIndex j) <$> translate e
-- FIXME mkTupElemCompile does not depend on 'translate'
-- anymore. Therefore, we could inject a regular global binding for
-- the function instead of a lambda.
mkCompileMatch :: (Name, Int) -> Q Match
mkCompileMatch (con, elemIdx) = do
let idxLit = return $ LitE $ IntegerL $ fromIntegral elemIdx
bodyExp <- [| CP.tupElem (intIndex $idxLit) |]
let body = NormalB $ bodyExp
return $ Match (ConP con []) body []
mkTupElemCompile :: Int -> Q Exp
mkTupElemCompile maxWidth = do
let cons = concat [ [ (tupAccName width idx, idx)
| idx <- [1..width]
]
| width <- [2..maxWidth]
]
opName <- newName "te"
matches <- mapM mkCompileMatch cons
let lamBody = CaseE (VarE opName) matches
return $ LamE [VarP opName] lamBody
--------------------------------------------------------------------------------
-- Reify instances for tuple types
reifyType :: Name -> Exp
reifyType tyName = AppE (VarE $ mkName "reify") (SigE (VarE 'undefined) (VarT tyName))
mkReifyFun :: [Name] -> Dec
mkReifyFun tyNames =
let argTys = map reifyType tyNames
body = AppE (ConE $ mkName "TupleT")
$ foldl' AppE (ConE $ tupTyConstName "" $ length tyNames) argTys
in FunD (mkName "reify") [Clause [WildP] (NormalB body) []]
mkReifyInstance :: Int -> Dec
mkReifyInstance width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
instTy = AppT (ConT $ mkName "Reify") $ tupleType $ map VarT tyNames
reifyCxt = map (\tyName -> nameTyApp (mkName "Reify") (VarT tyName)) tyNames
in InstanceD Nothing reifyCxt instTy [mkReifyFun tyNames]
mkReifyInstances :: Int -> Q [Dec]
mkReifyInstances maxWidth = return $ map mkReifyInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- QA instances for tuple types
mkToExp :: Int -> [Name] -> Dec
mkToExp width elemNames =
let toExpVar = VarE $ mkName "toExp"
elemArgs = map (\n -> AppE toExpVar (VarE n)) elemNames
body = NormalB $ AppE (ConE $ outerConst "")
$ foldl' AppE (ConE $ innerConst "" width) elemArgs
tupClause = Clause [TupP $ map VarP elemNames] body []
in FunD (mkName "toExp") [tupClause]
mkFrExp :: Int -> [Name] -> Q Dec
mkFrExp width elemNames = do
impossibleExpr <- [| error $(litE $ StringL $ printf "frExp %d" width) |]
let tupPattern = ConP (outerConst "")
[ConP (innerConst "" width) (map VarP elemNames) ]
tupleExpr = TupE $ map (\n -> AppE (VarE $ mkName "frExp") (VarE n))
elemNames
tupleClause = Clause [tupPattern] (NormalB tupleExpr) []
impossibleClause = Clause [WildP] (NormalB impossibleExpr) []
return $ FunD (mkName "frExp") [tupleClause, impossibleClause]
mkRep :: Int -> [Name] -> Type -> Dec
mkRep width tyNames tupTyPat =
let resTy = foldl' AppT (TupleT width)
$ map (AppT $ ConT $ mkName "Rep")
$ map VarT tyNames
in TySynInstD (mkName "Rep") (TySynEqn [tupTyPat] resTy)
mkQAInstance :: Int -> Q Dec
mkQAInstance width = do
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = tupleType $ map VarT tyNames
instTy = AppT (ConT $ mkName "QA") tupTy
qaCxt = map (\tyName -> nameTyApp (mkName "QA") (VarT tyName)) tyNames
rep = mkRep width tyNames tupTy
toExp = mkToExp width tyNames
frExp <- mkFrExp width tyNames
return $ InstanceD Nothing qaCxt instTy [rep, toExp, frExp]
-- | Generate QA instances for tuple types according to the following template:
--
-- @
-- instance (QA t1, ..., QA tn) => QA (t1, ..., tn) where
-- type Rep (t1, ..., tn) = (Rep t1, ..., Rep tn)
-- toExp (v1, ..., vn) = TupleConstE (Tuple<n>E (toExp v1) ... (toExp vn))
-- frExp (TupleConstE (Tuple<n>E v1 ... vn)) = (frExp v1, ... b, frExp vn)
-- frExp _ = $impossible
-- @
mkQAInstances :: Int -> Q [Dec]
mkQAInstances maxWidth = mapM mkQAInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- TA instances for tuple types
mkTAInstance :: Int -> Dec
mkTAInstance width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = foldl' AppT (TupleT width) $ map VarT tyNames
instTy = AppT (ConT $ mkName "TA") tupTy
taCxt = map (\tyName -> nameTyApp (mkName "BasicType") (VarT tyName)) tyNames
in InstanceD Nothing taCxt instTy []
-- | Generate TA instances for tuple types according to the following template:
--
-- @
-- instance (BasicType t1, ..., BasicType tn) => TA (t1, ..., tn) where
-- @
mkTAInstances :: Int -> Q [Dec]
mkTAInstances maxWidth = return $ map mkTAInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- Smart constructors for tuple values
tupConName :: Int -> Name
tupConName width = mkName $ printf "tup%d" width
mkArrowTy :: Type -> Type -> Type
mkArrowTy domTy coDomTy = AppT (AppT ArrowT domTy) coDomTy
mkTupleConstructor :: Int -> [Dec]
mkTupleConstructor width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
-- Type stuff
tupTy = AppT (ConT qName) $ foldl' AppT (TupleT width) $ map VarT tyNames
elemTys = map (AppT (ConT qName)) $ map VarT tyNames
arrowTy = foldr mkArrowTy tupTy elemTys
qaConstr = map (\n -> nameTyApp (mkName "QA") (VarT n)) tyNames
funTy = ForallT (map PlainTV tyNames) qaConstr arrowTy
-- Term stuff
qPats = map (\n -> ConP qName [VarP n]) tyNames
tupConApp = foldl' AppE (ConE $ innerConst "" width) $ map VarE tyNames
bodyExp = AppE (ConE qName) (AppE (ConE $ outerConst "") tupConApp)
sig = SigD (tupConName width) funTy
body = FunD (tupConName width) [Clause qPats (NormalB bodyExp) []]
in [sig, body]
-- | Construct smart constructors for tuple types according to the
-- following template.
--
-- @
-- tup<n> :: (QA t1, ...,QA tn) => Q t1 -> ... -> Q tn -> Q (t1, ..., tn)
-- tup<n> (Q v1) ... (Q vn)= Q (TupleConstE (Tuple<n>E v1 ... vn))
-- @
mkTupleConstructors :: Int -> Q [Dec]
mkTupleConstructors maxWidth = return $ concatMap mkTupleConstructor [2..maxWidth]
--------------------------------------------------------------------------------
-- Tuple accessors
mkTupleAccessor :: Int -> Int -> Q [Dec]
mkTupleAccessor width idx = do
-- Construct the function type
fieldTyName <- newName "a"
otherFieldTyNames <- mapM (\i -> newName $ printf "b%d" i) [1..width-1]
let elemTyNames = take (idx - 1) otherFieldTyNames
++ [fieldTyName]
++ drop (idx - 1) otherFieldTyNames
elemTyVars = map VarT elemTyNames
qaCxt = map (\tyName -> nameTyApp (mkName "QA") (VarT tyName)) elemTyNames
tupTy = AppT (ConT qName) $ foldl' AppT (TupleT width) elemTyVars
fieldTy = AppT (ConT qName) (VarT fieldTyName)
arrowTy = mkArrowTy tupTy fieldTy
funTy = ForallT (map PlainTV elemTyNames) qaCxt arrowTy
funSig = SigD (tupAccFunName width idx) funTy
-- Construct the function equation
exprName <- newName "e"
funBody <- appE (conE qName) $ mkTupElemTerm width idx (VarE exprName)
let qPat = ConP qName [VarP exprName]
funDef = FunD (tupAccFunName width idx) [Clause [qPat] (NormalB funBody) []]
return [funSig, funDef]
-- | Construct field accessor functions for tuple types.
--
-- @
-- tup<n>_<i> :: (QA t1, ..., QA t_n) => Q (t_1, ..., t_n) -> Q t_i
-- tup<n>_<i> (Q e) = Q (AppE (TupElem Tup<n>_<i>) e)
-- @
mkTupleAccessors :: Int -> Q [Dec]
mkTupleAccessors maxWidth = concat <$> sequence [ mkTupleAccessor width idx
| width <- [2..maxWidth]
, idx <- [1..width]
]
--------------------------------------------------------------------------------
-- Translation function for tuple constructors in terms
{-
\t -> case t of
Tuple2E a b -> do
a' <- translate a
b' <- translate b
return $ CL.MkTuple (T.TupleT $ map T.typeOf [a', b']) [a', b']
Tuple3E a b c -> ...
-}
mkTransBind :: Name -> Name -> Stmt
mkTransBind argName resName =
BindS (VarP resName) (AppE (VarE $ mkName "translate") (VarE argName))
-- | Generate the translation case for a particular tuple value
-- constructor.
mkTranslateTermMatch :: Int -> Q Match
mkTranslateTermMatch width = do
let names = map (\c -> [c]) $ take width ['a' .. 'z']
subTermNames = map mkName names
transTermNames = map (mkName . (++ "'")) names
transBinds = zipWith mkTransBind subTermNames transTermNames
transTerms = listE $ map varE transTermNames
conStmt <- NoBindS <$>
[| return $ CL.MkTuple (T.TupleT $ map T.typeOf $transTerms) $transTerms |]
let matchBody = DoE $ transBinds ++ [conStmt]
matchPat = ConP (innerConst "" width) (map VarP subTermNames)
return $ Match matchPat (NormalB matchBody) []
-- | Generate the lambda expression that translates frontend tuple
-- value constructors into CL tuple constructors.
mkTranslateTupleTerm :: Int -> Q Exp
mkTranslateTupleTerm maxWidth = do
lamArgName <- newName "tupleConst"
matches <- mapM mkTranslateTermMatch [2..maxWidth]
let lamBody = CaseE (VarE lamArgName) matches
return $ LamE [VarP lamArgName] lamBody
--------------------------------------------------------------------------------
-- Translation function for tuple types
{-
\t -> case t of
Tuple3T t1 t2 t3 -> T.TupleT [translateType t1, translateType t2, translateType t3]
-}
mkTranslateTypeMatch :: Int -> Q Match
mkTranslateTypeMatch width = do
let subTyNames = map mkName $ map (\c -> [c]) $ take width ['a' .. 'z']
matchPat = ConP (tupTyConstName "" width) (map VarP subTyNames)
transElemTys = ListE $ map (\n -> AppE (VarE $ mkName "translateType") (VarE n)) subTyNames
let matchBody = AppE (ConE 'T.TupleT) transElemTys
return $ Match matchPat (NormalB matchBody) []
mkTranslateType :: Int -> Q Exp
mkTranslateType maxWidth = do
lamArgName <- newName "typeConst"
matches <- mapM mkTranslateTypeMatch [2..maxWidth]
let lamBody = CaseE (VarE lamArgName) matches
return $ LamE [VarP lamArgName] lamBody
--------------------------------------------------------------------------------
-- View instances
{-
instance (QA a,QA b,QA c) => View (Q (a,b,c)) where
type ToView (Q (a,b,c)) = (Q a,Q b,Q c)
view (Q e) = ( Q (AppE (TupElem Tup3_1) e)
, Q (AppE (TupElem Tup3_2) e)
, Q (AppE (TupElem Tup3_3) e)
)
-}
mkToView :: [Name] -> Type -> Dec
mkToView names tupTyPat =
let qTupPat = AppT (ConT qName) tupTyPat
resTupTy = tupleType $ map (\n -> AppT (ConT qName) (VarT n)) names
in TySynInstD (mkName "ToView") (TySynEqn [qTupPat] resTupTy)
mkViewFun :: Int -> Q Dec
mkViewFun width = do
expName <- newName "e"
let expVar = VarE expName
qPat = ConP qName [VarP expName]
viewBodyExp <- TupE <$> mapM (\idx -> appE (conE qName) $ mkTupElemTerm width idx expVar)
[1..width]
let viewClause = Clause [qPat] (NormalB viewBodyExp) []
return $ FunD (mkName "view") [viewClause]
mkViewInstance :: Int -> Q Dec
mkViewInstance width = do
let names = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = tupleType $ map VarT names
instTy = AppT (ConT $ mkName "View") (AppT (ConT qName) tupTy)
viewCxt = map (\n -> nameTyApp (mkName "QA") (VarT n)) names
toViewDec = mkToView names tupTy
viewDec <- mkViewFun width
return $ InstanceD Nothing viewCxt instTy [toViewDec, viewDec]
mkViewInstances :: Int -> Q [Dec]
mkViewInstances maxWidth = mapM mkViewInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- Generate the 'TupleConst' type
tupElemTyName :: Int -> Q Name
tupElemTyName i = newName $ printf "t%d" i
-- | Generate a single constructor for the 'TabTuple' type.
mkTupleCons :: Name -> (Int -> Name) -> (Type -> Type) -> Int -> Q Con
mkTupleCons tupTyName conName elemTyCons width = do
tupElemTyNames <- mapM tupElemTyName [1..width]
let tyVarBinders = map PlainTV tupElemTyNames
-- (t1, ..., t<n>)
tupTy = foldl' AppT (TupleT width)
$ map VarT tupElemTyNames
-- a ~ (t1, ..., t<n>)
tupConstraint = equalConstrTy (VarT tupTyName) tupTy
-- Reify t1, ..., Reify t<n>
reifyConstraints = map (\n -> nameTyApp (mkName "Reify") (VarT n)) tupElemTyNames
constraints = tupConstraint : reifyConstraints
let -- '(Exp/Type t1) ... (Exp/Type t<n>)'
elemTys = [ (strict, elemTyCons (VarT t))
| t <- tupElemTyNames
]
return $ ForallC tyVarBinders constraints
$ NormalC (conName width) elemTys
where
strict = Bang NoSourceUnpackedness SourceStrict
-- | Generate the types for AST type and term tuple constructors: 'TupleConst' and
-- 'TupleType'. The first parameter is the name of the type. The second parameter
-- is the type constructor for element fields and the third parameter generates
-- the constructor name for a given tuple width.
--
-- @
-- data TupleConst a where
-- Tuple<n>E :: (Reify t1, ..., Reify t<n>) => Exp t1
-- -> ...
-- -> Exp t<n>
-- -> TupleConst (t1, ..., t<n>)
-- @
--
-- Because TH does not directly support GADT syntax, we have to
-- emulate it using explicit universal quantification:
--
-- @
-- data TupleConst a =
-- forall t1, ..., t<n>. a ~ (t1, ..., t<n>),
-- Reify t1,
-- ...
-- Reify t<n> =>
-- Exp t1 -> ... -> Exp t<n>
-- @
mkTupleASTTy :: Name -> (Type -> Type) -> (Int -> Name) -> Int -> Q [Dec]
mkTupleASTTy tyName elemTyCons conName maxWidth = do
tupTyName <- newName "a"
cons <- mapM (mkTupleCons tupTyName conName elemTyCons) [2..maxWidth]
return [DataD [] tyName [PlainTV tupTyName] Nothing cons []]
-- | Generate the 'TupleConst' AST type for tuple term construction
mkAstTupleConst :: Int -> Q [Dec]
mkAstTupleConst maxWidth =
mkTupleASTTy (mkName "TupleConst") expCon (innerConst "") maxWidth
where
expCon = AppT $ ConT $ mkName "Exp"
-- | Generate the 'TupleConst' AST type for tuple term construction
mkAstTupleType :: Int -> Q [Dec]
mkAstTupleType maxWidth =
mkTupleASTTy (mkName "TupleType") expCon (tupTyConstName "") maxWidth
where
expCon = AppT $ ConT $ mkName "Type"
mkTupleAstComponents :: Int -> Q [Dec]
mkTupleAstComponents maxWidth = (++) <$> mkAstTupleConst maxWidth <*> mkAstTupleType maxWidth
--------------------------------------------------------------------------------
-- Helper functions
-- | The name of the constructor that constructs a tuple construction
-- term.
outerConst :: String -> Name
outerConst "" = mkName "TupleConstE"
outerConst m = mkName $ printf "%s.TupleConstE" m
-- | The name of the constructor for a given tuple width.
innerConst :: String -> Int -> Name
innerConst "" width = mkName $ printf "Tuple%dE" width
innerConst m width = mkName $ printf "%s.Tuple%dE" m width
-- | The name of a tuple access constructor for a given tuple width
-- and element index.
tupAccName :: Int -> Int -> Name
tupAccName width elemIdx = mkName $ printf "Tup%d_%d" width elemIdx
-- | The name of a tuple access function for a given tuple width and element
-- index.
tupAccFunName :: Int -> Int -> Name
tupAccFunName width elemIdx = mkName $ printf "tup%d_%d" width elemIdx
-- | The name of the tuple type constructor for a given tuple width.
tupTyConstName :: String -> Int -> Name
tupTyConstName "" width = mkName $ printf "Tuple%dT" width
tupTyConstName m width = mkName $ printf "%s.Tuple%dT" m width
-- |
tupleType :: [Type] -> Type
tupleType elemTypes = foldl' AppT (TupleT width) elemTypes
where
width = length elemTypes
qName :: Name
qName = mkName "Q"
-- | Construct a DSH term that accesses a specificed tuple element.
mkTupElemTerm :: Int -> Int -> Exp -> Q Exp
mkTupElemTerm width idx arg = do
let ta = ConE $ tupAccName width idx
return $ AppE (AppE (ConE $ mkName "AppE") (AppE (ConE $ mkName "TupElem") ta)) arg
-- | From a list of operand terms, construct a DSH tuple term.
mkTupConstTerm :: [Exp] -> Q Exp
mkTupConstTerm ts
| length ts <= 16 = return $ AppE (ConE $ mkName "TupleConstE")
$ foldl' AppE (ConE $ innerConst "" $ length ts) ts
| otherwise = impossible
| ulricha/dsh | src/Database/DSH/Frontend/TupleTypes.hs | bsd-3-clause | 20,752 | 0 | 17 | 5,380 | 5,178 | 2,664 | 2,514 | 291 | 1 |
import Control.Monad.Logger
import Data.ByteString.Char8 (pack)
import Meadowstalk.Application
import Network.Wai.Handler.Warp
import System.Environment
-------------------------------------------------------------------------------
main :: IO ()
main = do
port <- read <$> getEnv "PORT"
connstr <- pack <$> getEnv "DB"
app <- makeApplication connstr
runSettings (setPort port defaultSettings) app
| HalfWayMan/meadowstalk | src/Main.hs | bsd-3-clause | 415 | 1 | 9 | 57 | 108 | 54 | 54 | 11 | 1 |
{-|
Most data types are defined here to avoid import cycles.
Here is an overview of the hledger data model:
> Journal -- a journal is read from one or more data files. It contains..
> [Transaction] -- journal transactions (aka entries), which have date, cleared status, code, description and..
> [Posting] -- multiple account postings, which have account name and amount
> [MarketPrice] -- historical market prices for commodities
>
> Ledger -- a ledger is derived from a journal, by applying a filter specification and doing some further processing. It contains..
> Journal -- a filtered copy of the original journal, containing only the transactions and postings we are interested in
> [Account] -- all accounts, in tree order beginning with a "root" account", with their balances and sub/parent accounts
For more detailed documentation on each type, see the corresponding modules.
-}
-- {-# LANGUAGE DeriveAnyClass #-} -- https://hackage.haskell.org/package/deepseq-1.4.4.0/docs/Control-DeepSeq.html#v:rnf
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
module Hledger.Data.Types (
module Hledger.Data.Types,
#if MIN_VERSION_time(1,11,0)
Year
#endif
)
where
import GHC.Generics (Generic)
import Data.Decimal (Decimal, DecimalRaw(..))
import Data.Default (Default(..))
import Data.Functor (($>))
import Data.List (intercalate)
--XXX https://hackage.haskell.org/package/containers/docs/Data-Map.html
--Note: You should use Data.Map.Strict instead of this module if:
--You will eventually need all the values stored.
--The stored values don't represent large virtual data structures to be lazily computed.
import qualified Data.Map as M
import Data.Ord (comparing)
import Data.Text (Text)
import Data.Time.Calendar (Day)
import Data.Time.Clock.POSIX (POSIXTime)
import Data.Time.LocalTime (LocalTime)
import Data.Word (Word8)
import Text.Blaze (ToMarkup(..))
import Text.Megaparsec (SourcePos)
import Hledger.Utils.Regex
-- synonyms for various date-related scalars
#if MIN_VERSION_time(1,11,0)
import Data.Time.Calendar (Year)
#else
type Year = Integer
#endif
type Month = Int -- 1-12
type Quarter = Int -- 1-4
type YearWeek = Int -- 1-52
type MonthWeek = Int -- 1-5
type YearDay = Int -- 1-366
type MonthDay = Int -- 1-31
type WeekDay = Int -- 1-7
-- | A possibly incomplete year-month-day date provided by the user, to be
-- interpreted as either a date or a date span depending on context. Missing
-- parts "on the left" will be filled from the provided reference date, e.g. if
-- the year and month are missing, the reference date's year and month are used.
-- Missing parts "on the right" are assumed, when interpreting as a date, to be
-- 1, (e.g. if the year and month are present but the day is missing, it means
-- first day of that month); or when interpreting as a date span, to be a
-- wildcard (so it would mean all days of that month). See the `smartdate`
-- parser for more examples.
--
-- Or, one of the standard periods and an offset relative to the reference date:
-- (last|this|next) (day|week|month|quarter|year), where "this" means the period
-- containing the reference date.
data SmartDate
= SmartAssumeStart Year (Maybe (Month, Maybe MonthDay))
| SmartFromReference (Maybe Month) MonthDay
| SmartMonth Month
| SmartRelative Integer SmartInterval
deriving (Show)
data SmartInterval = Day | Week | Month | Quarter | Year deriving (Show)
data WhichDate = PrimaryDate | SecondaryDate deriving (Eq,Show)
data DateSpan = DateSpan (Maybe Day) (Maybe Day) deriving (Eq,Ord,Generic)
instance Default DateSpan where def = DateSpan Nothing Nothing
-- Typical report periods (spans of time), both finite and open-ended.
-- A higher-level abstraction than DateSpan.
data Period =
DayPeriod Day
| WeekPeriod Day
| MonthPeriod Year Month
| QuarterPeriod Year Quarter
| YearPeriod Year
| PeriodBetween Day Day
| PeriodFrom Day
| PeriodTo Day
| PeriodAll
deriving (Eq,Ord,Show,Generic)
instance Default Period where def = PeriodAll
---- Typical report period/subperiod durations, from a day to a year.
--data Duration =
-- DayLong
-- WeekLong
-- MonthLong
-- QuarterLong
-- YearLong
-- deriving (Eq,Ord,Show,Generic)
-- Ways in which a period can be divided into subperiods.
data Interval =
NoInterval
| Days Int
| Weeks Int
| Months Int
| Quarters Int
| Years Int
| DayOfMonth Int
| WeekdayOfMonth Int Int
| DaysOfWeek [Int]
| DayOfYear Int Int -- Month, Day
-- WeekOfYear Int
-- MonthOfYear Int
-- QuarterOfYear Int
deriving (Eq,Show,Ord,Generic)
instance Default Interval where def = NoInterval
type Payee = Text
type AccountName = Text
data AccountType =
Asset
| Liability
| Equity
| Revenue
| Expense
| Cash -- ^ a subtype of Asset - liquid assets to show in cashflow report
| Conversion -- ^ a subtype of Equity - account in which to generate conversion postings for transaction prices
deriving (Eq,Ord,Generic)
instance Show AccountType where
show Asset = "A"
show Liability = "L"
show Equity = "E"
show Revenue = "R"
show Expense = "X"
show Cash = "C"
show Conversion = "V"
-- | Check whether the first argument is a subtype of the second: either equal
-- or one of the defined subtypes.
isAccountSubtypeOf :: AccountType -> AccountType -> Bool
isAccountSubtypeOf Asset Asset = True
isAccountSubtypeOf Liability Liability = True
isAccountSubtypeOf Equity Equity = True
isAccountSubtypeOf Revenue Revenue = True
isAccountSubtypeOf Expense Expense = True
isAccountSubtypeOf Cash Cash = True
isAccountSubtypeOf Cash Asset = True
isAccountSubtypeOf Conversion Conversion = True
isAccountSubtypeOf Conversion Equity = True
isAccountSubtypeOf _ _ = False
-- not worth the trouble, letters defined in accountdirectivep for now
--instance Read AccountType
-- where
-- readsPrec _ ('A' : xs) = [(Asset, xs)]
-- readsPrec _ ('L' : xs) = [(Liability, xs)]
-- readsPrec _ ('E' : xs) = [(Equity, xs)]
-- readsPrec _ ('R' : xs) = [(Revenue, xs)]
-- readsPrec _ ('X' : xs) = [(Expense, xs)]
-- readsPrec _ _ = []
data AccountAlias = BasicAlias AccountName AccountName
| RegexAlias Regexp Replacement
deriving (Eq, Read, Show, Ord, Generic)
data Side = L | R deriving (Eq,Show,Read,Ord,Generic)
-- | One of the decimal marks we support: either period or comma.
type DecimalMark = Char
isDecimalMark :: Char -> Bool
isDecimalMark c = c == '.' || c == ','
-- | The basic numeric type used in amounts.
type Quantity = Decimal
-- The following is for hledger-web, and requires blaze-markup.
-- Doing it here avoids needing a matching flag on the hledger-web package.
instance ToMarkup Quantity
where
toMarkup = toMarkup . show
deriving instance Generic (DecimalRaw a)
-- | An amount's per-unit or total cost/selling price in another
-- commodity, as recorded in the journal entry eg with @ or @@.
-- Docs call this "transaction price". The amount is always positive.
data AmountPrice = UnitPrice !Amount | TotalPrice !Amount
deriving (Eq,Ord,Generic,Show)
-- | Display style for an amount.
data AmountStyle = AmountStyle {
ascommodityside :: !Side, -- ^ does the symbol appear on the left or the right ?
ascommodityspaced :: !Bool, -- ^ space between symbol and quantity ?
asprecision :: !AmountPrecision, -- ^ number of digits displayed after the decimal point
asdecimalpoint :: !(Maybe Char), -- ^ character used as decimal point: period or comma. Nothing means "unspecified, use default"
asdigitgroups :: !(Maybe DigitGroupStyle) -- ^ style for displaying digit groups, if any
} deriving (Eq,Ord,Read,Generic)
instance Show AmountStyle where
show AmountStyle{..} = concat
[ "AmountStylePP \""
, show ascommodityside
, show ascommodityspaced
, show asprecision
, show asdecimalpoint
, show asdigitgroups
, "..\""
]
-- | The "display precision" for a hledger amount, by which we mean
-- the number of decimal digits to display to the right of the decimal mark.
-- This can be from 0 to 255 digits (the maximum supported by the Decimal library),
-- or NaturalPrecision meaning "show all significant decimal digits".
data AmountPrecision = Precision !Word8 | NaturalPrecision deriving (Eq,Ord,Read,Show,Generic)
-- | A style for displaying digit groups in the integer part of a
-- floating point number. It consists of the character used to
-- separate groups (comma or period, whichever is not used as decimal
-- point), and the size of each group, starting with the one nearest
-- the decimal point. The last group size is assumed to repeat. Eg,
-- comma between thousands is DigitGroups ',' [3].
data DigitGroupStyle = DigitGroups !Char ![Word8]
deriving (Eq,Ord,Read,Show,Generic)
type CommoditySymbol = Text
data Commodity = Commodity {
csymbol :: CommoditySymbol,
cformat :: Maybe AmountStyle
} deriving (Show,Eq,Generic) --,Ord)
data Amount = Amount {
acommodity :: !CommoditySymbol, -- commodity symbol, or special value "AUTO"
aquantity :: !Quantity, -- numeric quantity, or zero in case of "AUTO"
astyle :: !AmountStyle,
aprice :: !(Maybe AmountPrice) -- ^ the (fixed, transaction-specific) price for this amount, if any
} deriving (Eq,Ord,Generic,Show)
newtype MixedAmount = Mixed (M.Map MixedAmountKey Amount) deriving (Generic,Show)
instance Eq MixedAmount where a == b = maCompare a b == EQ
instance Ord MixedAmount where compare = maCompare
-- | Compare two MixedAmounts, substituting 0 for the quantity of any missing
-- commodities in either.
maCompare :: MixedAmount -> MixedAmount -> Ordering
maCompare (Mixed a) (Mixed b) = go (M.toList a) (M.toList b)
where
go xss@((kx,x):xs) yss@((ky,y):ys) = case compare kx ky of
EQ -> compareQuantities (Just x) (Just y) <> go xs ys
LT -> compareQuantities (Just x) Nothing <> go xs yss
GT -> compareQuantities Nothing (Just y) <> go xss ys
go ((_,x):xs) [] = compareQuantities (Just x) Nothing <> go xs []
go [] ((_,y):ys) = compareQuantities Nothing (Just y) <> go [] ys
go [] [] = EQ
compareQuantities = comparing (maybe 0 aquantity) <> comparing (maybe 0 totalprice)
totalprice x = case aprice x of
Just (TotalPrice p) -> aquantity p
_ -> 0
-- | Stores the CommoditySymbol of the Amount, along with the CommoditySymbol of
-- the price, and its unit price if being used.
data MixedAmountKey
= MixedAmountKeyNoPrice !CommoditySymbol
| MixedAmountKeyTotalPrice !CommoditySymbol !CommoditySymbol
| MixedAmountKeyUnitPrice !CommoditySymbol !CommoditySymbol !Quantity
deriving (Eq,Generic,Show)
-- | We don't auto-derive the Ord instance because it would give an undesired ordering.
-- We want the keys to be sorted lexicographically:
-- (1) By the primary commodity of the amount.
-- (2) By the commodity of the price, with no price being first.
-- (3) By the unit price, from most negative to most positive, with total prices
-- before unit prices.
-- For example, we would like the ordering to give
-- MixedAmountKeyNoPrice "X" < MixedAmountKeyTotalPrice "X" "Z" < MixedAmountKeyNoPrice "Y"
instance Ord MixedAmountKey where
compare = comparing commodity <> comparing pCommodity <> comparing pPrice
where
commodity (MixedAmountKeyNoPrice c) = c
commodity (MixedAmountKeyTotalPrice c _) = c
commodity (MixedAmountKeyUnitPrice c _ _) = c
pCommodity (MixedAmountKeyNoPrice _) = Nothing
pCommodity (MixedAmountKeyTotalPrice _ pc) = Just pc
pCommodity (MixedAmountKeyUnitPrice _ pc _) = Just pc
pPrice (MixedAmountKeyNoPrice _) = Nothing
pPrice (MixedAmountKeyTotalPrice _ _) = Nothing
pPrice (MixedAmountKeyUnitPrice _ _ q) = Just q
data PostingType = RegularPosting | VirtualPosting | BalancedVirtualPosting
deriving (Eq,Show,Generic)
type TagName = Text
type TagValue = Text
type Tag = (TagName, TagValue) -- ^ A tag name and (possibly empty) value.
type DateTag = (TagName, Day)
-- | The status of a transaction or posting, recorded with a status mark
-- (nothing, !, or *). What these mean is ultimately user defined.
data Status = Unmarked | Pending | Cleared
deriving (Eq,Ord,Bounded,Enum,Generic)
instance Show Status where -- custom show.. bad idea.. don't do it..
show Unmarked = ""
show Pending = "!"
show Cleared = "*"
-- | A balance assertion is a declaration about an account's expected balance
-- at a certain point (posting date and parse order). They provide additional
-- error checking and readability to a journal file.
--
-- The 'BalanceAssertion' type is also used to represent balance assignments,
-- which instruct hledger what an account's balance should become at a certain
-- point.
--
-- Different kinds of balance assertions are discussed eg on #290.
-- Variables include:
--
-- - which postings are to be summed (real/virtual; unmarked/pending/cleared; this account/this account including subs)
--
-- - which commodities within the balance are to be checked
--
-- - whether to do a partial or a total check (disallowing other commodities)
--
-- I suspect we want:
--
-- 1. partial, subaccount-exclusive, Ledger-compatible assertions. Because
-- they're what we've always had, and removing them would break some
-- journals unnecessarily. Implemented with = syntax.
--
-- 2. total assertions. Because otherwise assertions are a bit leaky.
-- Implemented with == syntax.
--
-- 3. subaccount-inclusive assertions. Because that's something folks need.
-- Not implemented.
--
-- 4. flexible assertions allowing custom criteria (perhaps arbitrary
-- queries). Because power users have diverse needs and want to try out
-- different schemes (assert cleared balances, assert balance from real or
-- virtual postings, etc.). Not implemented.
--
-- 5. multicommodity assertions, asserting the balance of multiple commodities
-- at once. Not implemented, requires #934.
--
data BalanceAssertion = BalanceAssertion {
baamount :: Amount, -- ^ the expected balance in a particular commodity
batotal :: Bool, -- ^ disallow additional non-asserted commodities ?
bainclusive :: Bool, -- ^ include subaccounts when calculating the actual balance ?
baposition :: SourcePos -- ^ the assertion's file position, for error reporting
} deriving (Eq,Generic,Show)
data Posting = Posting {
pdate :: Maybe Day, -- ^ this posting's date, if different from the transaction's
pdate2 :: Maybe Day, -- ^ this posting's secondary date, if different from the transaction's
pstatus :: Status,
paccount :: AccountName,
pamount :: MixedAmount,
pcomment :: Text, -- ^ this posting's comment lines, as a single non-indented multi-line string
ptype :: PostingType,
ptags :: [Tag], -- ^ tag names and values, extracted from the posting comment
-- and (after finalisation) the posting account's directive if any
pbalanceassertion :: Maybe BalanceAssertion, -- ^ an expected balance in the account after this posting,
-- in a single commodity, excluding subaccounts.
ptransaction :: Maybe Transaction, -- ^ this posting's parent transaction (co-recursive types).
-- Tying this knot gets tedious, Maybe makes it easier/optional.
poriginal :: Maybe Posting -- ^ When this posting has been transformed in some way
-- (eg its amount or price was inferred, or the account name was
-- changed by a pivot or budget report), this references the original
-- untransformed posting (which will have Nothing in this field).
} deriving (Generic)
-- The equality test for postings ignores the parent transaction's
-- identity, to avoid recurring ad infinitum.
-- XXX could check that it's Just or Nothing.
instance Eq Posting where
(==) (Posting a1 b1 c1 d1 e1 f1 g1 h1 i1 _ _) (Posting a2 b2 c2 d2 e2 f2 g2 h2 i2 _ _) = a1==a2 && b1==b2 && c1==c2 && d1==d2 && e1==e2 && f1==f2 && g1==g2 && h1==h2 && i1==i2
-- | Posting's show instance elides the parent transaction so as not to recurse forever.
instance Show Posting where
show Posting{..} = "PostingPP {" ++ intercalate ", " [
"pdate=" ++ show (show pdate)
,"pdate2=" ++ show (show pdate2)
,"pstatus=" ++ show (show pstatus)
,"paccount=" ++ show paccount
,"pamount=" ++ show pamount
,"pcomment=" ++ show pcomment
,"ptype=" ++ show ptype
,"ptags=" ++ show ptags
,"pbalanceassertion=" ++ show pbalanceassertion
,"ptransaction=" ++ show (ptransaction $> "txn")
,"poriginal=" ++ show poriginal
] ++ "}"
data Transaction = Transaction {
tindex :: Integer, -- ^ this transaction's 1-based position in the transaction stream, or 0 when not available
tprecedingcomment :: Text, -- ^ any comment lines immediately preceding this transaction
tsourcepos :: (SourcePos, SourcePos), -- ^ the file position where the date starts, and where the last posting ends
tdate :: Day,
tdate2 :: Maybe Day,
tstatus :: Status,
tcode :: Text,
tdescription :: Text,
tcomment :: Text, -- ^ this transaction's comment lines, as a single non-indented multi-line string
ttags :: [Tag], -- ^ tag names and values, extracted from the comment
tpostings :: [Posting] -- ^ this transaction's postings
} deriving (Eq,Generic,Show)
-- | A transaction modifier rule. This has a query which matches postings
-- in the journal, and a list of transformations to apply to those
-- postings or their transactions. Currently there is one kind of transformation:
-- the TMPostingRule, which adds a posting ("auto posting") to the transaction,
-- optionally setting its amount to the matched posting's amount multiplied by a constant.
data TransactionModifier = TransactionModifier {
tmquerytxt :: Text,
tmpostingrules :: [TMPostingRule]
} deriving (Eq,Generic,Show)
nulltransactionmodifier = TransactionModifier{
tmquerytxt = ""
,tmpostingrules = []
}
-- | A transaction modifier transformation, which adds an extra posting
-- to the matched posting's transaction.
-- Can be like a regular posting, or can have the tmprIsMultiplier flag set,
-- indicating that it's a multiplier for the matched posting's amount.
data TMPostingRule = TMPostingRule
{ tmprPosting :: Posting
, tmprIsMultiplier :: Bool
} deriving (Eq,Generic,Show)
-- | A periodic transaction rule, describing a transaction that recurs.
data PeriodicTransaction = PeriodicTransaction {
ptperiodexpr :: Text, -- ^ the period expression as written
ptinterval :: Interval, -- ^ the interval at which this transaction recurs
ptspan :: DateSpan, -- ^ the (possibly unbounded) period during which this transaction recurs. Contains a whole number of intervals.
--
ptstatus :: Status, -- ^ some of Transaction's fields
ptcode :: Text,
ptdescription :: Text,
ptcomment :: Text,
pttags :: [Tag],
ptpostings :: [Posting]
} deriving (Eq,Generic) -- , Show in PeriodicTransaction.hs
nullperiodictransaction = PeriodicTransaction{
ptperiodexpr = ""
,ptinterval = def
,ptspan = def
,ptstatus = Unmarked
,ptcode = ""
,ptdescription = ""
,ptcomment = ""
,pttags = []
,ptpostings = []
}
data TimeclockCode = SetBalance | SetRequiredHours | In | Out | FinalOut deriving (Eq,Ord,Generic)
data TimeclockEntry = TimeclockEntry {
tlsourcepos :: SourcePos,
tlcode :: TimeclockCode,
tldatetime :: LocalTime,
tlaccount :: AccountName,
tldescription :: Text
} deriving (Eq,Ord,Generic)
-- | A market price declaration made by the journal format's P directive.
-- It declares two things: a historical exchange rate between two commodities,
-- and an amount display style for the second commodity.
data PriceDirective = PriceDirective {
pddate :: Day
,pdcommodity :: CommoditySymbol
,pdamount :: Amount
} deriving (Eq,Ord,Generic,Show)
-- | A historical market price (exchange rate) from one commodity to another.
-- A more concise form of a PriceDirective, without the amount display info.
data MarketPrice = MarketPrice {
mpdate :: Day -- ^ Date on which this price becomes effective.
,mpfrom :: CommoditySymbol -- ^ The commodity being converted from.
,mpto :: CommoditySymbol -- ^ The commodity being converted to.
,mprate :: Quantity -- ^ One unit of the "from" commodity is worth this quantity of the "to" commodity.
} deriving (Eq,Ord,Generic, Show)
-- additional valuation-related types in Valuation.hs
-- | A Journal, containing transactions and various other things.
-- The basic data model for hledger.
--
-- This is used during parsing (as the type alias ParsedJournal), and
-- then finalised/validated for use as a Journal. Some extra
-- parsing-related fields are included for convenience, at least for
-- now. In a ParsedJournal these are updated as parsing proceeds, in a
-- Journal they represent the final state at end of parsing (used eg
-- by the add command).
--
data Journal = Journal {
-- parsing-related data
jparsedefaultyear :: Maybe Year -- ^ the current default year, specified by the most recent Y directive (or current date)
,jparsedefaultcommodity :: Maybe (CommoditySymbol,AmountStyle) -- ^ the current default commodity and its format, specified by the most recent D directive
,jparsedecimalmark :: Maybe DecimalMark -- ^ the character to always parse as decimal point, if set by CsvReader's decimal-mark (or a future journal directive)
,jparseparentaccounts :: [AccountName] -- ^ the current stack of parent account names, specified by apply account directives
,jparsealiases :: [AccountAlias] -- ^ the current account name aliases in effect, specified by alias directives (& options ?)
-- ,jparsetransactioncount :: Integer -- ^ the current count of transactions parsed so far (only journal format txns, currently)
,jparsetimeclockentries :: [TimeclockEntry] -- ^ timeclock sessions which have not been clocked out
,jincludefilestack :: [FilePath]
-- principal data
,jdeclaredpayees :: [(Payee,PayeeDeclarationInfo)] -- ^ Payees declared by payee directives, in parse order (after journal finalisation)
,jdeclaredaccounts :: [(AccountName,AccountDeclarationInfo)] -- ^ Accounts declared by account directives, in parse order (after journal finalisation)
,jdeclaredaccounttags :: M.Map AccountName [Tag] -- ^ Accounts which have tags declared in their directives, and those tags. (Does not include parents' tags.)
,jdeclaredaccounttypes :: M.Map AccountType [AccountName] -- ^ Accounts whose type has been explicitly declared in their account directives, grouped by type.
,jaccounttypes :: M.Map AccountName AccountType -- ^ All accounts for which a type has been declared or can be inferred from its parent or its name.
,jglobalcommoditystyles :: M.Map CommoditySymbol AmountStyle -- ^ per-commodity display styles declared globally, eg by command line option or import command
,jcommodities :: M.Map CommoditySymbol Commodity -- ^ commodities and formats declared by commodity directives
,jinferredcommodities :: M.Map CommoditySymbol AmountStyle -- ^ commodities and formats inferred from journal amounts
,jpricedirectives :: [PriceDirective] -- ^ Declarations of market prices by P directives, in parse order (after journal finalisation)
,jinferredmarketprices :: [MarketPrice] -- ^ Market prices implied by transactions, in parse order (after journal finalisation)
,jtxnmodifiers :: [TransactionModifier]
,jperiodictxns :: [PeriodicTransaction]
,jtxns :: [Transaction]
,jfinalcommentlines :: Text -- ^ any final trailing comments in the (main) journal file
,jfiles :: [(FilePath, Text)] -- ^ the file path and raw text of the main and
-- any included journal files. The main file is first,
-- followed by any included files in the order encountered.
-- TODO: FilePath is a sloppy type here, don't assume it's a
-- real file; values like "", "-", "(string)" can be seen
,jlastreadtime :: POSIXTime -- ^ when this journal was last read from its file(s)
-- NOTE: after adding new fields, eg involving account names, consider updating
-- the Anon instance in Hleger.Cli.Anon
} deriving (Eq, Generic)
-- | A journal in the process of being parsed, not yet finalised.
-- The data is partial, and list fields are in reverse order.
type ParsedJournal = Journal
-- | The id of a data format understood by hledger, eg @journal@ or @csv@.
-- The --output-format option selects one of these for output.
type StorageFormat = String
-- | Extra information found in a payee directive.
data PayeeDeclarationInfo = PayeeDeclarationInfo {
pdicomment :: Text -- ^ any comment lines following the payee directive
,pditags :: [Tag] -- ^ tags extracted from the comment, if any
} deriving (Eq,Show,Generic)
nullpayeedeclarationinfo = PayeeDeclarationInfo {
pdicomment = ""
,pditags = []
}
-- | Extra information about an account that can be derived from
-- its account directive (and the other account directives).
data AccountDeclarationInfo = AccountDeclarationInfo {
adicomment :: Text -- ^ any comment lines following an account directive for this account
,aditags :: [Tag] -- ^ tags extracted from the account comment, if any
,adideclarationorder :: Int -- ^ the order in which this account was declared,
-- relative to other account declarations, during parsing (1..)
} deriving (Eq,Show,Generic)
nullaccountdeclarationinfo = AccountDeclarationInfo {
adicomment = ""
,aditags = []
,adideclarationorder = 0
}
-- | An account, with its balances, parent/subaccount relationships, etc.
-- Only the name is required; the other fields are added when needed.
data Account = Account {
aname :: AccountName -- ^ this account's full name
,adeclarationinfo :: Maybe AccountDeclarationInfo -- ^ optional extra info from account directives
-- relationships in the tree
,asubs :: [Account] -- ^ this account's sub-accounts
,aparent :: Maybe Account -- ^ parent account
,aboring :: Bool -- ^ used in the accounts report to label elidable parents
-- balance information
,anumpostings :: Int -- ^ the number of postings to this account
,aebalance :: MixedAmount -- ^ this account's balance, excluding subaccounts
,aibalance :: MixedAmount -- ^ this account's balance, including subaccounts
} deriving (Generic)
-- | Whether an account's balance is normally a positive number (in
-- accounting terms, a debit balance) or a negative number (credit balance).
-- Assets and expenses are normally positive (debit), while liabilities, equity
-- and income are normally negative (credit).
-- https://en.wikipedia.org/wiki/Normal_balance
data NormalSign = NormallyPositive | NormallyNegative deriving (Show, Eq)
-- | A Ledger has the journal it derives from, and the accounts
-- derived from that. Accounts are accessible both list-wise and
-- tree-wise, since each one knows its parent and subs; the first
-- account is the root of the tree and always exists.
data Ledger = Ledger {
ljournal :: Journal
,laccounts :: [Account]
} deriving (Generic)
| simonmichael/hledger | hledger-lib/Hledger/Data/Types.hs | gpl-3.0 | 29,659 | 0 | 22 | 7,517 | 3,911 | 2,315 | 1,596 | 380 | 7 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.AutoScaling.CreateOrUpdateTags
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates or updates tags for the specified Auto Scaling group.
--
-- A tag is defined by its resource ID, resource type, key, value, and
-- propagate flag. The value and the propagate flag are optional
-- parameters. The only supported resource type is 'auto-scaling-group',
-- and the resource ID must be the name of the group. The
-- 'PropagateAtLaunch' flag determines whether the tag is added to
-- instances launched in the group. Valid values are 'true' or 'false'.
--
-- When you specify a tag with a key that already exists, the operation
-- overwrites the previous tag definition, and you do not get an error
-- message.
--
-- For more information, see
-- <http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/ASTagging.html Tagging Auto Scaling Groups and Instances>
-- in the /Auto Scaling Developer Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_CreateOrUpdateTags.html AWS API Reference> for CreateOrUpdateTags.
module Network.AWS.AutoScaling.CreateOrUpdateTags
(
-- * Creating a Request
createOrUpdateTags
, CreateOrUpdateTags
-- * Request Lenses
, coutTags
-- * Destructuring the Response
, createOrUpdateTagsResponse
, CreateOrUpdateTagsResponse
) where
import Network.AWS.AutoScaling.Types
import Network.AWS.AutoScaling.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'createOrUpdateTags' smart constructor.
newtype CreateOrUpdateTags = CreateOrUpdateTags'
{ _coutTags :: [Tag]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateOrUpdateTags' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'coutTags'
createOrUpdateTags
:: CreateOrUpdateTags
createOrUpdateTags =
CreateOrUpdateTags'
{ _coutTags = mempty
}
-- | One or more tags.
coutTags :: Lens' CreateOrUpdateTags [Tag]
coutTags = lens _coutTags (\ s a -> s{_coutTags = a}) . _Coerce;
instance AWSRequest CreateOrUpdateTags where
type Rs CreateOrUpdateTags =
CreateOrUpdateTagsResponse
request = postQuery autoScaling
response = receiveNull CreateOrUpdateTagsResponse'
instance ToHeaders CreateOrUpdateTags where
toHeaders = const mempty
instance ToPath CreateOrUpdateTags where
toPath = const "/"
instance ToQuery CreateOrUpdateTags where
toQuery CreateOrUpdateTags'{..}
= mconcat
["Action" =: ("CreateOrUpdateTags" :: ByteString),
"Version" =: ("2011-01-01" :: ByteString),
"Tags" =: toQueryList "member" _coutTags]
-- | /See:/ 'createOrUpdateTagsResponse' smart constructor.
data CreateOrUpdateTagsResponse =
CreateOrUpdateTagsResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateOrUpdateTagsResponse' with the minimum fields required to make a request.
--
createOrUpdateTagsResponse
:: CreateOrUpdateTagsResponse
createOrUpdateTagsResponse = CreateOrUpdateTagsResponse'
| fmapfmapfmap/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/CreateOrUpdateTags.hs | mpl-2.0 | 3,835 | 0 | 10 | 724 | 385 | 241 | 144 | 51 | 1 |
module HN.Optimizer.FormalArgumentsDeleter (runB) where
import Compiler.Hoopl hiding ((<*>))
import Safe.Exact
import HN.Intermediate
import HN.Optimizer.Node
import HN.Optimizer.Pass
import HN.Optimizer.ExpressionRewriter
import HN.Optimizer.ArgumentValues (ArgFact)
import HN.Optimizer.Utils
rewriteB :: DefinitionNode -> FactBase ArgFact -> Maybe DefinitionNode
rewriteB = rewriteNode WithoutChildren $ \f n -> case n of
Application aa @ (Atom a) b -> fmap (smartApplication aa . map fst) . rewrite WithChildren deleteArg
=<< zipExactMay b
=<< convertFact
=<< lookupFact a f
_ -> Nothing
convertFact :: ArgFact -> Maybe [WithTopAndBot ExpressionFix]
convertFact ((PElem a, _), _) = Just a
convertFact _ = Nothing
smartApplication a [] = a
smartApplication a b = Application a b
deleteArg :: Rewrite [(ExpressionFix, WithTopAndBot ExpressionFix)]
deleteArg ((_, PElem _) : tail) = Just tail
deleteArg _ = Nothing
runB :: Pass ArgFact ArgFact
runB = runPassB PassParams
{ ppConvertFacts = const . convertFactBase
, ppTransfer = noTransferMapB
, ppRewrite = pureBRewrite $ rewriteExitB rewriteB
}
| kayuri/HNC | HN/Optimizer/FormalArgumentsDeleter.hs | lgpl-3.0 | 1,125 | 5 | 17 | 177 | 371 | 198 | 173 | 29 | 2 |
module System.Console.Haskeline.Prefs(
Prefs(..),
defaultPrefs,
readPrefs,
CompletionType(..),
BellStyle(..),
EditMode(..),
HistoryDuplicates(..),
lookupKeyBinding
) where
import Control.Monad.Catch (handle)
import Control.Exception (IOException)
import Data.Char(isSpace,toLower)
import Data.List(foldl')
import qualified Data.Map as Map
import System.Console.Haskeline.Key
{- |
'Prefs' allow the user to customize the terminal-style line-editing interface. They are
read by default from @~/.haskeline@; to override that behavior, use
'readPrefs' and @runInputTWithPrefs@.
Each line of a @.haskeline@ file defines
one field of the 'Prefs' datatype; field names are case-insensitive and
unparseable lines are ignored. For example:
> editMode: Vi
> completionType: MenuCompletion
> maxhistorysize: Just 40
-}
data Prefs = Prefs { bellStyle :: !BellStyle,
editMode :: !EditMode,
maxHistorySize :: !(Maybe Int),
historyDuplicates :: HistoryDuplicates,
completionType :: !CompletionType,
completionPaging :: !Bool,
-- ^ When listing completion alternatives, only display
-- one screen of possibilities at a time.
completionPromptLimit :: !(Maybe Int),
-- ^ If more than this number of completion
-- possibilities are found, then ask before listing
-- them.
listCompletionsImmediately :: !Bool,
-- ^ If 'False', completions with multiple possibilities
-- will ring the bell and only display them if the user
-- presses @TAB@ again.
customBindings :: Map.Map Key [Key],
-- (termName, keysequence, key)
customKeySequences :: [(Maybe String, String,Key)]
}
deriving Show
data CompletionType = ListCompletion | MenuCompletion
deriving (Read,Show)
data BellStyle = NoBell | VisualBell | AudibleBell
deriving (Show, Read)
data EditMode = Vi | Emacs
deriving (Show,Read)
data HistoryDuplicates = AlwaysAdd | IgnoreConsecutive | IgnoreAll
deriving (Show,Read)
-- | The default preferences which may be overwritten in the
-- @.haskeline@ file.
defaultPrefs :: Prefs
defaultPrefs = Prefs {bellStyle = AudibleBell,
maxHistorySize = Just 100,
editMode = Emacs,
completionType = ListCompletion,
completionPaging = True,
completionPromptLimit = Just 100,
listCompletionsImmediately = True,
historyDuplicates = AlwaysAdd,
customBindings = Map.empty,
customKeySequences = []
}
mkSettor :: Read a => (a -> Prefs -> Prefs) -> String -> Prefs -> Prefs
mkSettor f str = maybe id f (readMaybe str)
readMaybe :: Read a => String -> Maybe a
readMaybe str = case reads str of
[(x,_)] -> Just x
_ -> Nothing
settors :: [(String, String -> Prefs -> Prefs)]
settors = [("bellstyle", mkSettor $ \x p -> p {bellStyle = x})
,("editmode", mkSettor $ \x p -> p {editMode = x})
,("maxhistorysize", mkSettor $ \x p -> p {maxHistorySize = x})
,("completiontype", mkSettor $ \x p -> p {completionType = x})
,("completionpaging", mkSettor $ \x p -> p {completionPaging = x})
,("completionpromptlimit", mkSettor $ \x p -> p {completionPromptLimit = x})
,("listcompletionsimmediately", mkSettor $ \x p -> p {listCompletionsImmediately = x})
,("historyduplicates", mkSettor $ \x p -> p {historyDuplicates = x})
,("bind", addCustomBinding)
,("keyseq", addCustomKeySequence)
]
addCustomBinding :: String -> Prefs -> Prefs
addCustomBinding str p = case mapM parseKey (words str) of
Just (k:ks) -> p {customBindings = Map.insert k ks (customBindings p)}
_ -> p
addCustomKeySequence :: String -> Prefs -> Prefs
addCustomKeySequence str = maybe id addKS maybeParse
where
maybeParse :: Maybe (Maybe String, String,Key)
maybeParse = case words str of
[cstr,kstr] -> parseWords Nothing cstr kstr
[term,cstr,kstr] -> parseWords (Just term) cstr kstr
_ -> Nothing
parseWords mterm cstr kstr = do
k <- parseKey kstr
cs <- readMaybe cstr
return (mterm,cs,k)
addKS ks p = p {customKeySequences = ks:customKeySequences p}
lookupKeyBinding :: Key -> Prefs -> [Key]
lookupKeyBinding k = Map.findWithDefault [k] k . customBindings
-- | Read 'Prefs' from a given file. If there is an error reading the file,
-- the 'defaultPrefs' will be returned.
readPrefs :: FilePath -> IO Prefs
readPrefs file = handle (\(_::IOException) -> return defaultPrefs) $ do
ls <- fmap lines $ readFile file
return $! foldl' applyField defaultPrefs ls
where
applyField p l = case break (==':') l of
(name,val) -> case lookup (map toLower $ trimSpaces name) settors of
Nothing -> p
Just set -> set (drop 1 val) p -- drop initial ":", don't crash if val==""
trimSpaces = dropWhile isSpace . reverse . dropWhile isSpace . reverse
| judah/haskeline | System/Console/Haskeline/Prefs.hs | bsd-3-clause | 5,774 | 0 | 15 | 1,979 | 1,295 | 730 | 565 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Stencil2 where
import Control.Monad
import Control.Exception
import System.Random.MWC
import Data.Array.Unboxed hiding (Array)
import Data.Array.Accelerate hiding (round, min, max, fromIntegral)
import qualified Data.Array.IArray as IArray
stencil2D2 :: Floating (Exp a) => Stencil3x3 a -> Stencil3x3 a -> Exp a
stencil2D2 ((_,t,_), (_,x,_), (_,b,_))
((_,_,_), (l,y,r), (_,_,_)) = t + b + l + r - ((x+y) / 2)
test_stencil2_2D :: Int -> IO (() -> UArray (Int,Int) Float, () -> Acc (Array DIM2 Float))
test_stencil2_2D n2 = withSystemRandom $ \gen -> do
let n = round $ sqrt (fromIntegral n2 :: Double)
m = n * 2
u = m `div` 3
v = n + m
m1 <- listArray ((0,0),(n-1,m-1)) `fmap` replicateM (n*m) (uniformR (-1,1) gen) :: IO (UArray (Int,Int) Float)
m2 <- listArray ((0,0),(u-1,v-1)) `fmap` replicateM (u*v) (uniformR (-1,1) gen) :: IO (UArray (Int,Int) Float)
m1' <- let m1' = fromIArray m1 in evaluate (m1' `indexArray` (Z:.0:.0)) >> return m1'
m2' <- let m2' = fromIArray m2 in evaluate (m2' `indexArray` (Z:.0:.0)) >> return m2'
--
return (\() -> run_ref m1 m2, \() -> run_acc m1' m2')
where
run_acc xs ys = stencil2 stencil2D2 Mirror (use xs) Wrap (use ys)
run_ref xs ys =
let (_,(n,m)) = bounds xs
(_,(u,v)) = bounds ys
sh = ((0,0), (n `min` u, m `min` v))
-- boundary conditions are placed on the *source* arrays
--
get1 (x,y) = xs IArray.! (mirror n x, mirror m y)
get2 (x,y) = ys IArray.! (wrap u x, wrap v y)
mirror sz i
| i < 0 = -i
| i > sz = sz - (i-sz)
| otherwise = i
wrap sz i
| i < 0 = sz + i + 1
| i > sz = i - sz - 1
| otherwise = i
f (ix,iy) = let t = get1 (ix, iy-1)
b = get1 (ix, iy+1)
x = get1 (ix, iy)
l = get2 (ix-1,iy)
r = get2 (ix+1,iy)
y = get2 (ix, iy)
in
t + b + l + r - ((x+y) / 2)
in
array sh [(ix, f ix) | ix <- range sh]
-- Main
-- ----
run2D :: String -> Int -> IO (() -> UArray (Int,Int) Float, () -> Acc (Array DIM2 Float))
run2D "2D" = test_stencil2_2D
run2D x = error $ "unknown variant: " ++ x
| wilbowma/accelerate | accelerate-examples/tests/primitives/Stencil2.hs | bsd-3-clause | 2,440 | 0 | 18 | 858 | 1,227 | 665 | 562 | 48 | 1 |
-- -----------------------------------------------------------------------------
--
-- CharSet.hs, part of Alex
--
-- (c) Chris Dornan 1995-2000, Simon Marlow 2003
--
-- An abstract CharSet type for Alex. To begin with we'll use Alex's
-- original definition of sets as functions, then later will
-- transition to something that will work better with Unicode.
--
-- ----------------------------------------------------------------------------}
module CharSet (
setSingleton,
Encoding(..),
Byte,
ByteSet,
byteSetSingleton,
byteRanges,
byteSetRange,
CharSet, -- abstract
emptyCharSet,
charSetSingleton,
charSet,
charSetMinus,
charSetComplement,
charSetRange,
charSetUnion,
charSetQuote,
setUnions,
byteSetToArray,
byteSetElems,
byteSetElem
) where
import Data.Array
import Data.Ranged
import Data.Word
import Data.Maybe (catMaybes)
import Data.Char (chr,ord)
import UTF8
type Byte = Word8
-- Implementation as functions
type CharSet = RSet Char
type ByteSet = RSet Byte
-- type Utf8Set = RSet [Byte]
type Utf8Range = Span [Byte]
data Encoding = Latin1 | UTF8
emptyCharSet :: CharSet
emptyCharSet = rSetEmpty
byteSetElem :: ByteSet -> Byte -> Bool
byteSetElem = rSetHas
charSetSingleton :: Char -> CharSet
charSetSingleton = rSingleton
setSingleton :: DiscreteOrdered a => a -> RSet a
setSingleton = rSingleton
charSet :: [Char] -> CharSet
charSet = setUnions . fmap charSetSingleton
charSetMinus :: CharSet -> CharSet -> CharSet
charSetMinus = rSetDifference
charSetUnion :: CharSet -> CharSet -> CharSet
charSetUnion = rSetUnion
setUnions :: DiscreteOrdered a => [RSet a] -> RSet a
setUnions = foldr rSetUnion rSetEmpty
charSetComplement :: CharSet -> CharSet
charSetComplement = rSetNegation
charSetRange :: Char -> Char -> CharSet
charSetRange c1 c2 = makeRangedSet [Range (BoundaryBelow c1) (BoundaryAbove c2)]
byteSetToArray :: ByteSet -> Array Byte Bool
byteSetToArray set = array (fst (head ass), fst (last ass)) ass
where ass = [(c,rSetHas set c) | c <- [0..0xff]]
byteSetElems :: ByteSet -> [Byte]
byteSetElems set = [c | c <- [0 .. 0xff], rSetHas set c]
charToRanges :: Encoding -> CharSet -> [Utf8Range]
charToRanges Latin1 =
map (fmap ((: []).fromIntegral.ord)) -- Span [Byte]
. catMaybes
. fmap (charRangeToCharSpan False)
. rSetRanges
charToRanges UTF8 =
concat -- Span [Byte]
. fmap toUtfRange -- [Span [Byte]]
. fmap (fmap UTF8.encode) -- Span [Byte]
. catMaybes
. fmap (charRangeToCharSpan True)
. rSetRanges
-- | Turns a range of characters expressed as a pair of UTF-8 byte sequences into a set of ranges, in which each range of the resulting set is between pairs of sequences of the same length
toUtfRange :: Span [Byte] -> [Span [Byte]]
toUtfRange (Span x y) = fix x y
fix :: [Byte] -> [Byte] -> [Span [Byte]]
fix x y
| length x == length y = [Span x y]
| length x == 1 = Span x [0x7F] : fix [0xC2,0x80] y
| length x == 2 = Span x [0xDF,0xBF] : fix [0xE0,0x80,0x80] y
| length x == 3 = Span x [0xEF,0xBF,0xBF] : fix [0xF0,0x80,0x80,0x80] y
| otherwise = error "fix: incorrect input given"
byteRangeToBytePair :: Span [Byte] -> ([Byte],[Byte])
byteRangeToBytePair (Span x y) = (x,y)
data Span a = Span a a -- lower bound inclusive, higher bound exclusive
-- (SDM: upper bound inclusive, surely??)
instance Functor Span where
fmap f (Span x y) = Span (f x) (f y)
charRangeToCharSpan :: Bool -> Range Char -> Maybe (Span Char)
charRangeToCharSpan _ (Range BoundaryAboveAll _) = Nothing
charRangeToCharSpan _ (Range _ BoundaryBelowAll) = Nothing
charRangeToCharSpan uni (Range x y) = Just (Span (l x) (h y))
where l b = case b of
BoundaryBelowAll -> '\0'
BoundaryBelow a -> a
BoundaryAbove a -> succ a
BoundaryAboveAll -> error "panic: charRangeToCharSpan"
h b = case b of
BoundaryBelowAll -> error "panic: charRangeToCharSpan"
BoundaryBelow a -> pred a
BoundaryAbove a -> a
BoundaryAboveAll | uni -> chr 0x10ffff
| otherwise -> chr 0xff
byteRanges :: Encoding -> CharSet -> [([Byte],[Byte])]
byteRanges enc = fmap byteRangeToBytePair . charToRanges enc
byteSetRange :: Byte -> Byte -> ByteSet
byteSetRange c1 c2 = makeRangedSet [Range (BoundaryBelow c1) (BoundaryAbove c2)]
byteSetSingleton :: Byte -> ByteSet
byteSetSingleton = rSingleton
instance DiscreteOrdered Word8 where
adjacent x y = x + 1 == y
adjacentBelow 0 = Nothing
adjacentBelow x = Just (x-1)
-- TODO: More efficient generated code!
charSetQuote :: CharSet -> String
charSetQuote s = "(\\c -> " ++ foldr (\x y -> x ++ " || " ++ y) "False" (map quoteRange (rSetRanges s)) ++ ")"
where quoteRange (Range l h) = quoteL l ++ " && " ++ quoteH h
quoteL (BoundaryAbove a) = "c > " ++ show a
quoteL (BoundaryBelow a) = "c >= " ++ show a
quoteL (BoundaryAboveAll) = "False"
quoteL (BoundaryBelowAll) = "True"
quoteH (BoundaryAbove a) = "c <= " ++ show a
quoteH (BoundaryBelow a) = "c < " ++ show a
quoteH (BoundaryAboveAll) = "True"
quoteH (BoundaryBelowAll) = "False"
| beni55/alex | src/CharSet.hs | bsd-3-clause | 5,261 | 0 | 15 | 1,183 | 1,610 | 847 | 763 | 120 | 7 |
{-# OPTIONS_GHC -fno-implicit-prelude -#include "HsBase.h" #-}
#undef DEBUG_DUMP
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO
-- Copyright : (c) The University of Glasgow, 1992-2001
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- String I\/O functions
--
-----------------------------------------------------------------------------
-- #hide
module GHC.IO (
hWaitForInput, hGetChar, hGetLine, hGetContents, hPutChar, hPutStr,
commitBuffer', -- hack, see below
hGetcBuffered, -- needed by ghc/compiler/utils/StringBuffer.lhs
hGetBuf, hGetBufNonBlocking, hPutBuf, hPutBufNonBlocking, slurpFile,
memcpy_ba_baoff,
memcpy_ptr_baoff,
memcpy_baoff_ba,
memcpy_baoff_ptr,
) where
import Foreign
import Foreign.C
import System.IO.Error
import Data.Maybe
import Control.Monad
import System.Posix.Internals
import GHC.Enum
import GHC.Base
import GHC.IOBase
import GHC.Handle -- much of the real stuff is in here
import GHC.Real
import GHC.Num
import GHC.Show
import GHC.List
import GHC.Exception ( ioError, catch )
#ifdef mingw32_HOST_OS
import GHC.Conc
#endif
-- ---------------------------------------------------------------------------
-- Simple input operations
-- If hWaitForInput finds anything in the Handle's buffer, it
-- immediately returns. If not, it tries to read from the underlying
-- OS handle. Notice that for buffered Handles connected to terminals
-- this means waiting until a complete line is available.
-- | Computation 'hWaitForInput' @hdl t@
-- waits until input is available on handle @hdl@.
-- It returns 'True' as soon as input is available on @hdl@,
-- or 'False' if no input is available within @t@ milliseconds.
--
-- If @t@ is less than zero, then @hWaitForInput@ waits indefinitely.
-- NOTE: in the current implementation, this is the only case that works
-- correctly (if @t@ is non-zero, then all other concurrent threads are
-- blocked until data is available).
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hWaitForInput :: Handle -> Int -> IO Bool
hWaitForInput h msecs = do
wantReadableHandle "hWaitForInput" h $ \ handle_ -> do
let ref = haBuffer handle_
buf <- readIORef ref
if not (bufferEmpty buf)
then return True
else do
if msecs < 0
then do buf' <- fillReadBuffer (haFD handle_) True
(haIsStream handle_) buf
writeIORef ref buf'
return True
else do r <- throwErrnoIfMinus1Retry "hWaitForInput" $
inputReady (fromIntegral (haFD handle_))
(fromIntegral msecs) (haIsStream handle_)
return (r /= 0)
foreign import ccall safe "inputReady"
inputReady :: CInt -> CInt -> Bool -> IO CInt
-- ---------------------------------------------------------------------------
-- hGetChar
-- | Computation 'hGetChar' @hdl@ reads a character from the file or
-- channel managed by @hdl@, blocking until a character is available.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetChar :: Handle -> IO Char
hGetChar handle =
wantReadableHandle "hGetChar" handle $ \handle_ -> do
let fd = haFD handle_
ref = haBuffer handle_
buf <- readIORef ref
if not (bufferEmpty buf)
then hGetcBuffered fd ref buf
else do
-- buffer is empty.
case haBufferMode handle_ of
LineBuffering -> do
new_buf <- fillReadBuffer fd True (haIsStream handle_) buf
hGetcBuffered fd ref new_buf
BlockBuffering _ -> do
new_buf <- fillReadBuffer fd True (haIsStream handle_) buf
-- ^^^^
-- don't wait for a completely full buffer.
hGetcBuffered fd ref new_buf
NoBuffering -> do
-- make use of the minimal buffer we already have
let raw = bufBuf buf
r <- readRawBuffer "hGetChar" (fromIntegral fd) (haIsStream handle_) raw 0 1
if r == 0
then ioe_EOF
else do (c,_) <- readCharFromBuffer raw 0
return c
hGetcBuffered fd ref buf@Buffer{ bufBuf=b, bufRPtr=r, bufWPtr=w }
= do (c,r) <- readCharFromBuffer b r
let new_buf | r == w = buf{ bufRPtr=0, bufWPtr=0 }
| otherwise = buf{ bufRPtr=r }
writeIORef ref new_buf
return c
-- ---------------------------------------------------------------------------
-- hGetLine
-- ToDo: the unbuffered case is wrong: it doesn't lock the handle for
-- the duration.
-- | Computation 'hGetLine' @hdl@ reads a line from the file or
-- channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file is encountered when reading
-- the /first/ character of the line.
--
-- If 'hGetLine' encounters end-of-file at any other point while reading
-- in a line, it is treated as a line terminator and the (partial)
-- line is returned.
hGetLine :: Handle -> IO String
hGetLine h = do
m <- wantReadableHandle "hGetLine" h $ \ handle_ -> do
case haBufferMode handle_ of
NoBuffering -> return Nothing
LineBuffering -> do
l <- hGetLineBuffered handle_
return (Just l)
BlockBuffering _ -> do
l <- hGetLineBuffered handle_
return (Just l)
case m of
Nothing -> hGetLineUnBuffered h
Just l -> return l
hGetLineBuffered handle_ = do
let ref = haBuffer handle_
buf <- readIORef ref
hGetLineBufferedLoop handle_ ref buf []
hGetLineBufferedLoop handle_ ref
buf@Buffer{ bufRPtr=r, bufWPtr=w, bufBuf=raw } xss =
let
-- find the end-of-line character, if there is one
loop raw r
| r == w = return (False, w)
| otherwise = do
(c,r') <- readCharFromBuffer raw r
if c == '\n'
then return (True, r) -- NB. not r': don't include the '\n'
else loop raw r'
in do
(eol, off) <- loop raw r
#ifdef DEBUG_DUMP
puts ("hGetLineBufferedLoop: r=" ++ show r ++ ", w=" ++ show w ++ ", off=" ++ show off ++ "\n")
#endif
xs <- unpack raw r off
-- if eol == True, then off is the offset of the '\n'
-- otherwise off == w and the buffer is now empty.
if eol
then do if (w == off + 1)
then writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
else writeIORef ref buf{ bufRPtr = off + 1 }
return (concat (reverse (xs:xss)))
else do
maybe_buf <- maybeFillReadBuffer (haFD handle_) True (haIsStream handle_)
buf{ bufWPtr=0, bufRPtr=0 }
case maybe_buf of
-- Nothing indicates we caught an EOF, and we may have a
-- partial line to return.
Nothing -> do
writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
let str = concat (reverse (xs:xss))
if not (null str)
then return str
else ioe_EOF
Just new_buf ->
hGetLineBufferedLoop handle_ ref new_buf (xs:xss)
maybeFillReadBuffer fd is_line is_stream buf
= catch
(do buf <- fillReadBuffer fd is_line is_stream buf
return (Just buf)
)
(\e -> do if isEOFError e
then return Nothing
else ioError e)
unpack :: RawBuffer -> Int -> Int -> IO [Char]
unpack buf r 0 = return ""
unpack buf (I# r) (I# len) = IO $ \s -> unpack [] (len -# 1#) s
where
unpack acc i s
| i <# r = (# s, acc #)
| otherwise =
case readCharArray# buf i s of
(# s, ch #) -> unpack (C# ch : acc) (i -# 1#) s
hGetLineUnBuffered :: Handle -> IO String
hGetLineUnBuffered h = do
c <- hGetChar h
if c == '\n' then
return ""
else do
l <- getRest
return (c:l)
where
getRest = do
c <-
catch
(hGetChar h)
(\ err -> do
if isEOFError err then
return '\n'
else
ioError err)
if c == '\n' then
return ""
else do
s <- getRest
return (c:s)
-- -----------------------------------------------------------------------------
-- hGetContents
-- hGetContents on a DuplexHandle only affects the read side: you can
-- carry on writing to it afterwards.
-- | Computation 'hGetContents' @hdl@ returns the list of characters
-- corresponding to the unread portion of the channel or file managed
-- by @hdl@, which is put into an intermediate state, /semi-closed/.
-- In this state, @hdl@ is effectively closed,
-- but items are read from @hdl@ on demand and accumulated in a special
-- list returned by 'hGetContents' @hdl@.
--
-- Any operation that fails because a handle is closed,
-- also fails if a handle is semi-closed. The only exception is 'hClose'.
-- A semi-closed handle becomes closed:
--
-- * if 'hClose' is applied to it;
--
-- * if an I\/O error occurs when reading an item from the handle;
--
-- * or once the entire contents of the handle has been read.
--
-- Once a semi-closed handle becomes closed, the contents of the
-- associated list becomes fixed. The contents of this final list is
-- only partially specified: it will contain at least all the items of
-- the stream that were evaluated prior to the handle becoming closed.
--
-- Any I\/O errors encountered while a handle is semi-closed are simply
-- discarded.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetContents :: Handle -> IO String
hGetContents handle =
withHandle "hGetContents" handle $ \handle_ ->
case haType handle_ of
ClosedHandle -> ioe_closedHandle
SemiClosedHandle -> ioe_closedHandle
AppendHandle -> ioe_notReadable
WriteHandle -> ioe_notReadable
_ -> do xs <- lazyRead handle
return (handle_{ haType=SemiClosedHandle}, xs )
-- Note that someone may close the semi-closed handle (or change its
-- buffering), so each time these lazy read functions are pulled on,
-- they have to check whether the handle has indeed been closed.
lazyRead :: Handle -> IO String
lazyRead handle =
unsafeInterleaveIO $
withHandle "lazyRead" handle $ \ handle_ -> do
case haType handle_ of
ClosedHandle -> return (handle_, "")
SemiClosedHandle -> lazyRead' handle handle_
_ -> ioException
(IOError (Just handle) IllegalOperation "lazyRead"
"illegal handle type" Nothing)
lazyRead' h handle_ = do
let ref = haBuffer handle_
fd = haFD handle_
-- even a NoBuffering handle can have a char in the buffer...
-- (see hLookAhead)
buf <- readIORef ref
if not (bufferEmpty buf)
then lazyReadHaveBuffer h handle_ fd ref buf
else do
case haBufferMode handle_ of
NoBuffering -> do
-- make use of the minimal buffer we already have
let raw = bufBuf buf
r <- readRawBuffer "lazyRead" (fromIntegral fd) (haIsStream handle_) raw 0 1
if r == 0
then do handle_ <- hClose_help handle_
return (handle_, "")
else do (c,_) <- readCharFromBuffer raw 0
rest <- lazyRead h
return (handle_, c : rest)
LineBuffering -> lazyReadBuffered h handle_ fd ref buf
BlockBuffering _ -> lazyReadBuffered h handle_ fd ref buf
-- we never want to block during the read, so we call fillReadBuffer with
-- is_line==True, which tells it to "just read what there is".
lazyReadBuffered h handle_ fd ref buf = do
catch
(do buf <- fillReadBuffer fd True{-is_line-} (haIsStream handle_) buf
lazyReadHaveBuffer h handle_ fd ref buf
)
-- all I/O errors are discarded. Additionally, we close the handle.
(\e -> do handle_ <- hClose_help handle_
return (handle_, "")
)
lazyReadHaveBuffer h handle_ fd ref buf = do
more <- lazyRead h
writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
s <- unpackAcc (bufBuf buf) (bufRPtr buf) (bufWPtr buf) more
return (handle_, s)
unpackAcc :: RawBuffer -> Int -> Int -> [Char] -> IO [Char]
unpackAcc buf r 0 acc = return acc
unpackAcc buf (I# r) (I# len) acc = IO $ \s -> unpack acc (len -# 1#) s
where
unpack acc i s
| i <# r = (# s, acc #)
| otherwise =
case readCharArray# buf i s of
(# s, ch #) -> unpack (C# ch : acc) (i -# 1#) s
-- ---------------------------------------------------------------------------
-- hPutChar
-- | Computation 'hPutChar' @hdl ch@ writes the character @ch@ to the
-- file or channel managed by @hdl@. Characters may be buffered if
-- buffering is enabled for @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutChar :: Handle -> Char -> IO ()
hPutChar handle c = do
c `seq` return ()
wantWritableHandle "hPutChar" handle $ \ handle_ -> do
let fd = haFD handle_
case haBufferMode handle_ of
LineBuffering -> hPutcBuffered handle_ True c
BlockBuffering _ -> hPutcBuffered handle_ False c
NoBuffering ->
with (castCharToCChar c) $ \buf -> do
writeRawBufferPtr "hPutChar" (fromIntegral fd) (haIsStream handle_) buf 0 1
return ()
hPutcBuffered handle_ is_line c = do
let ref = haBuffer handle_
buf <- readIORef ref
let w = bufWPtr buf
w' <- writeCharIntoBuffer (bufBuf buf) w c
let new_buf = buf{ bufWPtr = w' }
if bufferFull new_buf || is_line && c == '\n'
then do
flushed_buf <- flushWriteBuffer (haFD handle_) (haIsStream handle_) new_buf
writeIORef ref flushed_buf
else do
writeIORef ref new_buf
hPutChars :: Handle -> [Char] -> IO ()
hPutChars handle [] = return ()
hPutChars handle (c:cs) = hPutChar handle c >> hPutChars handle cs
-- ---------------------------------------------------------------------------
-- hPutStr
-- We go to some trouble to avoid keeping the handle locked while we're
-- evaluating the string argument to hPutStr, in case doing so triggers another
-- I/O operation on the same handle which would lead to deadlock. The classic
-- case is
--
-- putStr (trace "hello" "world")
--
-- so the basic scheme is this:
--
-- * copy the string into a fresh buffer,
-- * "commit" the buffer to the handle.
--
-- Committing may involve simply copying the contents of the new
-- buffer into the handle's buffer, flushing one or both buffers, or
-- maybe just swapping the buffers over (if the handle's buffer was
-- empty). See commitBuffer below.
-- | Computation 'hPutStr' @hdl s@ writes the string
-- @s@ to the file or channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutStr :: Handle -> String -> IO ()
hPutStr handle str = do
buffer_mode <- wantWritableHandle "hPutStr" handle
(\ handle_ -> do getSpareBuffer handle_)
case buffer_mode of
(NoBuffering, _) -> do
hPutChars handle str -- v. slow, but we don't care
(LineBuffering, buf) -> do
writeLines handle buf str
(BlockBuffering _, buf) -> do
writeBlocks handle buf str
getSpareBuffer :: Handle__ -> IO (BufferMode, Buffer)
getSpareBuffer Handle__{haBuffer=ref,
haBuffers=spare_ref,
haBufferMode=mode}
= do
case mode of
NoBuffering -> return (mode, error "no buffer!")
_ -> do
bufs <- readIORef spare_ref
buf <- readIORef ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return ( mode, newEmptyBuffer b WriteBuffer (bufSize buf))
BufferListNil -> do
new_buf <- allocateBuffer (bufSize buf) WriteBuffer
return (mode, new_buf)
writeLines :: Handle -> Buffer -> String -> IO ()
writeLines hdl Buffer{ bufBuf=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> IO ()
-- check n == len first, to ensure that shoveString is strict in n.
shoveString n cs | n == len = do
new_buf <- commitBuffer hdl raw len n True{-needs flush-} False
writeLines hdl new_buf cs
shoveString n [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
return ()
shoveString n (c:cs) = do
n' <- writeCharIntoBuffer raw n c
if (c == '\n')
then do
new_buf <- commitBuffer hdl raw len n' True{-needs flush-} False
writeLines hdl new_buf cs
else
shoveString n' cs
in
shoveString 0 s
writeBlocks :: Handle -> Buffer -> String -> IO ()
writeBlocks hdl Buffer{ bufBuf=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> IO ()
-- check n == len first, to ensure that shoveString is strict in n.
shoveString n cs | n == len = do
new_buf <- commitBuffer hdl raw len n True{-needs flush-} False
writeBlocks hdl new_buf cs
shoveString n [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
return ()
shoveString n (c:cs) = do
n' <- writeCharIntoBuffer raw n c
shoveString n' cs
in
shoveString 0 s
-- -----------------------------------------------------------------------------
-- commitBuffer handle buf sz count flush release
--
-- Write the contents of the buffer 'buf' ('sz' bytes long, containing
-- 'count' bytes of data) to handle (handle must be block or line buffered).
--
-- Implementation:
--
-- for block/line buffering,
-- 1. If there isn't room in the handle buffer, flush the handle
-- buffer.
--
-- 2. If the handle buffer is empty,
-- if flush,
-- then write buf directly to the device.
-- else swap the handle buffer with buf.
--
-- 3. If the handle buffer is non-empty, copy buf into the
-- handle buffer. Then, if flush != 0, flush
-- the buffer.
commitBuffer
:: Handle -- handle to commit to
-> RawBuffer -> Int -- address and size (in bytes) of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- True <=> flush the handle afterward
-> Bool -- release the buffer?
-> IO Buffer
commitBuffer hdl raw sz@(I# _) count@(I# _) flush release = do
wantWritableHandle "commitAndReleaseBuffer" hdl $
commitBuffer' raw sz count flush release
-- Explicitly lambda-lift this function to subvert GHC's full laziness
-- optimisations, which otherwise tends to float out subexpressions
-- past the \handle, which is really a pessimisation in this case because
-- that lambda is a one-shot lambda.
--
-- Don't forget to export the function, to stop it being inlined too
-- (this appears to be better than NOINLINE, because the strictness
-- analyser still gets to worker-wrapper it).
--
-- This hack is a fairly big win for hPutStr performance. --SDM 18/9/2001
--
commitBuffer' raw sz@(I# _) count@(I# _) flush release
handle_@Handle__{ haFD=fd, haBuffer=ref, haBuffers=spare_buf_ref } = do
#ifdef DEBUG_DUMP
puts ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release ++"\n")
#endif
old_buf@Buffer{ bufBuf=old_raw, bufRPtr=r, bufWPtr=w, bufSize=size }
<- readIORef ref
buf_ret <-
-- enough room in handle buffer?
if (not flush && (size - w > count))
-- The > is to be sure that we never exactly fill
-- up the buffer, which would require a flush. So
-- if copying the new data into the buffer would
-- make the buffer full, we just flush the existing
-- buffer and the new data immediately, rather than
-- copying before flushing.
-- not flushing, and there's enough room in the buffer:
-- just copy the data in and update bufWPtr.
then do memcpy_baoff_ba old_raw w raw (fromIntegral count)
writeIORef ref old_buf{ bufWPtr = w + count }
return (newEmptyBuffer raw WriteBuffer sz)
-- else, we have to flush
else do flushed_buf <- flushWriteBuffer fd (haIsStream handle_) old_buf
let this_buf =
Buffer{ bufBuf=raw, bufState=WriteBuffer,
bufRPtr=0, bufWPtr=count, bufSize=sz }
-- if: (a) we don't have to flush, and
-- (b) size(new buffer) == size(old buffer), and
-- (c) new buffer is not full,
-- we can just just swap them over...
if (not flush && sz == size && count /= sz)
then do
writeIORef ref this_buf
return flushed_buf
-- otherwise, we have to flush the new data too,
-- and start with a fresh buffer
else do
flushWriteBuffer fd (haIsStream handle_) this_buf
writeIORef ref flushed_buf
-- if the sizes were different, then allocate
-- a new buffer of the correct size.
if sz == size
then return (newEmptyBuffer raw WriteBuffer sz)
else allocateBuffer size WriteBuffer
-- release the buffer if necessary
case buf_ret of
Buffer{ bufSize=buf_ret_sz, bufBuf=buf_ret_raw } -> do
if release && buf_ret_sz == size
then do
spare_bufs <- readIORef spare_buf_ref
writeIORef spare_buf_ref
(BufferListCons buf_ret_raw spare_bufs)
return buf_ret
else
return buf_ret
-- ---------------------------------------------------------------------------
-- Reading/writing sequences of bytes.
-- ---------------------------------------------------------------------------
-- hPutBuf
-- | 'hPutBuf' @hdl buf count@ writes @count@ 8-bit bytes from the
-- buffer @buf@ to the handle @hdl@. It returns ().
--
-- This operation may fail with:
--
-- * 'ResourceVanished' if the handle is a pipe or socket, and the
-- reading end is closed. (If this is a POSIX system, and the program
-- has not asked to ignore SIGPIPE, then a SIGPIPE may be delivered
-- instead, whose default action is to terminate the program).
hPutBuf :: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO ()
hPutBuf h ptr count = do hPutBuf' h ptr count True; return ()
hPutBufNonBlocking
:: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO Int -- returns: number of bytes written
hPutBufNonBlocking h ptr count = hPutBuf' h ptr count False
hPutBuf':: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- allow blocking?
-> IO Int
hPutBuf' handle ptr count can_block
| count == 0 = return 0
| count < 0 = illegalBufferSize handle "hPutBuf" count
| otherwise =
wantWritableHandle "hPutBuf" handle $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } ->
bufWrite fd ref is_stream ptr count can_block
bufWrite fd ref is_stream ptr count can_block =
seq count $ seq fd $ do -- strictness hack
old_buf@Buffer{ bufBuf=old_raw, bufRPtr=r, bufWPtr=w, bufSize=size }
<- readIORef ref
-- enough room in handle buffer?
if (size - w > count)
-- There's enough room in the buffer:
-- just copy the data in and update bufWPtr.
then do memcpy_baoff_ptr old_raw w ptr (fromIntegral count)
writeIORef ref old_buf{ bufWPtr = w + count }
return count
-- else, we have to flush
else do flushed_buf <- flushWriteBuffer fd is_stream old_buf
-- TODO: we should do a non-blocking flush here
writeIORef ref flushed_buf
-- if we can fit in the buffer, then just loop
if count < size
then bufWrite fd ref is_stream ptr count can_block
else if can_block
then do writeChunk fd is_stream (castPtr ptr) count
return count
else writeChunkNonBlocking fd is_stream ptr count
writeChunk :: FD -> Bool -> Ptr CChar -> Int -> IO ()
writeChunk fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO ()
loop _ bytes | bytes <= 0 = return ()
loop off bytes = do
r <- fromIntegral `liftM`
writeRawBufferPtr "writeChunk" (fromIntegral fd) is_stream ptr
off (fromIntegral bytes)
-- write can't return 0
loop (off + r) (bytes - r)
writeChunkNonBlocking :: FD -> Bool -> Ptr a -> Int -> IO Int
writeChunkNonBlocking fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO Int
loop off bytes | bytes <= 0 = return off
loop off bytes = do
#ifndef mingw32_HOST_OS
ssize <- c_write (fromIntegral fd) (ptr `plusPtr` off) (fromIntegral bytes)
let r = fromIntegral ssize :: Int
if (r == -1)
then do errno <- getErrno
if (errno == eAGAIN || errno == eWOULDBLOCK)
then return off
else throwErrno "writeChunk"
else loop (off + r) (bytes - r)
#else
(ssize, rc) <- asyncWrite fd (fromIntegral $ fromEnum is_stream)
(fromIntegral bytes)
(ptr `plusPtr` off)
let r = fromIntegral ssize :: Int
if r == (-1)
then ioError (errnoToIOError "hPutBufNonBlocking" (Errno (fromIntegral rc)) Nothing Nothing)
else loop (off + r) (bytes - r)
#endif
-- ---------------------------------------------------------------------------
-- hGetBuf
-- | 'hGetBuf' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached or
-- @count@ 8-bit bytes have been read.
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBuf' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBuf' will behave as if EOF was reached.
hGetBuf :: Handle -> Ptr a -> Int -> IO Int
hGetBuf h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBuf" count
| otherwise =
wantReadableHandle "hGetBuf" h $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } -> do
bufRead fd ref is_stream ptr 0 count
-- small reads go through the buffer, large reads are satisfied by
-- taking data first from the buffer and then direct from the file
-- descriptor.
bufRead fd ref is_stream ptr so_far count =
seq fd $ seq so_far $ seq count $ do -- strictness hack
buf@Buffer{ bufBuf=raw, bufWPtr=w, bufRPtr=r, bufSize=sz } <- readIORef ref
if bufferEmpty buf
then if count > sz -- small read?
then do rest <- readChunk fd is_stream ptr count
return (so_far + rest)
else do mb_buf <- maybeFillReadBuffer fd True is_stream buf
case mb_buf of
Nothing -> return so_far -- got nothing, we're done
Just buf' -> do
writeIORef ref buf'
bufRead fd ref is_stream ptr so_far count
else do
let avail = w - r
if (count == avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
return (so_far + count)
else do
if (count < avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufRPtr = r + count }
return (so_far + count)
else do
memcpy_ptr_baoff ptr raw r (fromIntegral avail)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining < sz
then bufRead fd ref is_stream ptr' so_far' remaining
else do
rest <- readChunk fd is_stream ptr' remaining
return (so_far' + rest)
readChunk :: FD -> Bool -> Ptr a -> Int -> IO Int
readChunk fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO Int
loop off bytes | bytes <= 0 = return off
loop off bytes = do
r <- fromIntegral `liftM`
readRawBufferPtr "readChunk" (fromIntegral fd) is_stream
(castPtr ptr) off (fromIntegral bytes)
if r == 0
then return off
else loop (off + r) (bytes - r)
-- | 'hGetBufNonBlocking' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached, or
-- @count@ 8-bit bytes have been read, or there is no more data available
-- to read immediately.
--
-- 'hGetBufNonBlocking' is identical to 'hGetBuf', except that it will
-- never block waiting for data to become available, instead it returns
-- only whatever data is available. To wait for data to arrive before
-- calling 'hGetBufNonBlocking', use 'hWaitForInput'.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufNonBlocking' will behave as if EOF was reached.
--
hGetBufNonBlocking :: Handle -> Ptr a -> Int -> IO Int
hGetBufNonBlocking h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufNonBlocking" count
| otherwise =
wantReadableHandle "hGetBufNonBlocking" h $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } -> do
bufReadNonBlocking fd ref is_stream ptr 0 count
bufReadNonBlocking fd ref is_stream ptr so_far count =
seq fd $ seq so_far $ seq count $ do -- strictness hack
buf@Buffer{ bufBuf=raw, bufWPtr=w, bufRPtr=r, bufSize=sz } <- readIORef ref
if bufferEmpty buf
then if count > sz -- large read?
then do rest <- readChunkNonBlocking fd is_stream ptr count
return (so_far + rest)
else do buf' <- fillReadBufferWithoutBlocking fd is_stream buf
case buf' of { Buffer{ bufWPtr=w } ->
if (w == 0)
then return so_far
else do writeIORef ref buf'
bufReadNonBlocking fd ref is_stream ptr
so_far (min count w)
-- NOTE: new count is 'min count w'
-- so we will just copy the contents of the
-- buffer in the recursive call, and not
-- loop again.
}
else do
let avail = w - r
if (count == avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
return (so_far + count)
else do
if (count < avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufRPtr = r + count }
return (so_far + count)
else do
memcpy_ptr_baoff ptr raw r (fromIntegral avail)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
-- we haven't attempted to read anything yet if we get to here.
if remaining < sz
then bufReadNonBlocking fd ref is_stream ptr' so_far' remaining
else do
rest <- readChunkNonBlocking fd is_stream ptr' remaining
return (so_far' + rest)
readChunkNonBlocking :: FD -> Bool -> Ptr a -> Int -> IO Int
readChunkNonBlocking fd is_stream ptr bytes = do
#ifndef mingw32_HOST_OS
ssize <- c_read (fromIntegral fd) (castPtr ptr) (fromIntegral bytes)
let r = fromIntegral ssize :: Int
if (r == -1)
then do errno <- getErrno
if (errno == eAGAIN || errno == eWOULDBLOCK)
then return 0
else throwErrno "readChunk"
else return r
#else
(ssize, rc) <- asyncRead fd (fromIntegral $ fromEnum is_stream)
(fromIntegral bytes) ptr
let r = fromIntegral ssize :: Int
if r == (-1)
then ioError (errnoToIOError "hGetBufNonBlocking" (Errno (fromIntegral rc)) Nothing Nothing)
else return r
#endif
slurpFile :: FilePath -> IO (Ptr (), Int)
slurpFile fname = do
handle <- openFile fname ReadMode
sz <- hFileSize handle
if sz > fromIntegral (maxBound::Int) then
ioError (userError "slurpFile: file too big")
else do
let sz_i = fromIntegral sz
if sz_i == 0 then return (nullPtr, 0) else do
chunk <- mallocBytes sz_i
r <- hGetBuf handle chunk sz_i
hClose handle
return (chunk, r)
-- ---------------------------------------------------------------------------
-- memcpy wrappers
foreign import ccall unsafe "__hscore_memcpy_src_off"
memcpy_ba_baoff :: RawBuffer -> RawBuffer -> Int -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_src_off"
memcpy_ptr_baoff :: Ptr a -> RawBuffer -> Int -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_dst_off"
memcpy_baoff_ba :: RawBuffer -> Int -> RawBuffer -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_dst_off"
memcpy_baoff_ptr :: RawBuffer -> Int -> Ptr a -> CSize -> IO (Ptr ())
-----------------------------------------------------------------------------
-- Internal Utils
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn (sz :: Int) =
ioException (IOError (Just handle)
InvalidArgument fn
("illegal buffer size " ++ showsPrec 9 sz [])
Nothing)
| FranklinChen/hugs98-plus-Sep2006 | packages/base/GHC/IO.hs | bsd-3-clause | 31,744 | 362 | 28 | 7,309 | 7,180 | 3,814 | 3,366 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
module Diagrams.Backend.OpenGL.TwoD.Attributes
( GLRenderM, GLRenderState (..), withStyleState , initialGLRenderState)
where
-- General Haskell
import Control.Monad.State
import Control.Lens (op, Lens', (.~))
import Control.Lens.TH
-- From Diagrams
import Diagrams.Prelude as D hiding (Attribute)
import Diagrams.TwoD.Path
import Diagrams.Backend.OpenGL.TwoD.Outlines (trlVertices, Convex(..))
import Diagrams.Backend.OpenGL.TwoD.Tesselate
type GLRenderM a = State GLRenderState a
data GLRenderState =
GLRenderState{ _currentLineColor :: AlphaColour Double
, _currentFillColor :: AlphaColour Double
, _currentOpacity :: Double
, _currentLineWidth :: Double
, _currentLineCap :: LineCap
, _currentLineJoin :: LineJoin
, _currentFillRule :: TessWinding
, _currentDashing :: Dashing
, _currentClip :: [Convex]
}
makeLenses ''GLRenderState
initialGLRenderState :: GLRenderState
initialGLRenderState = GLRenderState
{ _currentLineColor = (opaque black)
, _currentFillColor = transparent
, _currentOpacity = 1
, _currentLineWidth = 0.01
, _currentLineCap = LineCapButt
, _currentLineJoin = LineJoinMiter
, _currentFillRule = TessWindingNonzero
, _currentDashing = Dashing [] 0
, _currentClip = []
}
{- Style changes -}
withStyleState :: Style R2 -> GLRenderM a -> GLRenderM a
withStyleState s act = do
prev <- get
modify . foldr1 (.) . map ($ s) $
[ changeWith (toAlphaColour . getLineColor) currentLineColor
, changeWith (toAlphaColour . getFillColor) currentFillColor
, changeWith getOpacity currentOpacity
, changeWith getLineWidth currentLineWidth
, changeWith getLineCap currentLineCap
, changeWith getLineJoin currentLineJoin
, changeWith (fr . getFillRule) currentFillRule
, changeWith getDashing currentDashing
, changeClip
]
r <- act
put prev -- TODO restore only changed values?
return r
-- | @changeWith get set sty@ is @id@ if @sty@ does not have the
-- 'Attribute' specified by @get@. If the @Attribute@ is available,
-- @changeWith@ returns a function which sets it.
changeWith :: AttributeClass a =>
(a -> b) -> (Lens' GLRenderState b) -> Style R2 -> GLRenderState -> GLRenderState
changeWith g s sty = case g <$> getAttr sty of
Just v -> s .~ v
Nothing -> id
changeClip :: Style R2 -> GLRenderState -> GLRenderState
changeClip s = case op Clip <$> getAttr s of
Just (Path trs:_) ->
currentClip .~ (tessRegion TessWindingNonzero $ map trlVertices trs)
_ -> id
fr :: FillRule -> TessWinding
fr Winding = TessWindingNonzero
fr EvenOdd = TessWindingOdd
| bergey/diagrams-opengl | src/Diagrams/Backend/OpenGL/TwoD/Attributes.hs | bsd-3-clause | 3,142 | 0 | 11 | 942 | 655 | 367 | 288 | 64 | 2 |
{-# Language RankNTypes, ViewPatterns, PatternSynonyms, TypeOperators, ScopedTypeVariables,
KindSignatures, PolyKinds, DataKinds, TypeFamilies, TypeInType, GADTs #-}
module T14552 where
import Data.Kind
import Data.Proxy
data family Sing a
type a --> b = (a, b) -> Type
type family F (f::a --> b) (x::a) :: b
newtype Limit :: (k --> Type) -> Type where
Limit :: (forall xx. Proxy xx -> F f xx) -> Limit f
data Exp :: [Type] -> Type -> Type where
TLam :: (forall aa. Proxy aa -> Exp xs (F w aa))
-> Exp xs (Limit w)
pattern FOO f <- TLam (($ Proxy) -> f)
{-
TLam :: forall (xs::[Type]) (b::Type). -- Universal
forall k (w :: k --> Type). -- Existential
(b ~ Limit w) =>
=> (forall (aa :: k). Proxy aa -> Exp xs (F w aa))
-> Exp xs b
-}
{-
mfoo :: Exp xs b
-> (forall k (w :: k --> Type).
(b ~ Limit w)
=> Exp xs (F w aa)
-> r)
-> r
mfoo scrut k = case srcut of
TLam g -> k (g Proxy)
-}
| shlevy/ghc | testsuite/tests/patsyn/should_fail/T14552.hs | bsd-3-clause | 1,019 | 0 | 12 | 316 | 208 | 120 | 88 | -1 | -1 |
data Accessor t a = Accessor {
getVal :: t -> a,
setVal :: t -> a -> t
}
data Foo = Foo { p1score_ :: Bar, p2score_ :: Int } deriving Show
data Bar = Bar { bscore_ :: Int, bcheat_ :: Bool } deriving Show
accessor a b = Accessor (\x -> a x) (\x v -> b x v)
p1score = accessor p1score_ (\x v -> x { p1score_ = v })
p2score = accessor p2score_ (\x v -> x { p2score_ = v })
bscore = accessor bscore_ (\x v -> x { bscore_ = v })
bcheat = accessor bcheat_ (\x v -> x { bcheat_ = v })
(.:) :: Accessor a b -> Accessor b c -> Accessor a c
(.:) f g = Accessor (\x -> (getVal g) (getVal f x)) (\x v -> setVal f x (setVal g (getVal f x) v))
{-# INLINE (.:) #-}
(.@) d a = (getVal a) d
(.=) (d,a) v = (setVal a) d v
{-# INLINE (.=) #-}
(.->) = (,)
simple = Foo (Bar 3 False) 5
simple2 = simple .-> (p1score .: bcheat) .= True
main = do
let res = show simple2
putStrLn res
| gcfavorites/hiccup | attic/Accessor.hs | lgpl-2.1 | 905 | 0 | 12 | 249 | 467 | 257 | 210 | 22 | 1 |
module FFI
(module Fay.FFI)
where
import Fay.FFI
| fpco/fay-base | src/FFI.hs | bsd-3-clause | 54 | 0 | 5 | 12 | 17 | 11 | 6 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Test suite for GHCi like applications including both GHCi and Intero.
module Stack.GhciSpec where
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Distribution.License (License (BSD3))
import qualified Distribution.ModuleName as ModuleName
import Stack.Types.Package
import Stack.Types.PackageName
import Stack.Types.Version
import Test.Hspec
import NeatInterpolation
import Path
import Path.Extra (pathToText)
import qualified System.FilePath as FP
import Stack.Ghci
import Stack.Ghci.Script (scriptToLazyByteString)
import Stack.Ghci.PortableFakePaths
textToLazy :: Text -> LBS.ByteString
textToLazy = LBS.fromStrict . T.encodeUtf8
-- | Matches two strings, after converting line-ends in the second to Unix ones
-- (in a hacky way) and converting both to the same type. Workaround for
-- https://github.com/nikita-volkov/neat-interpolation/issues/14.
shouldBeLE :: LBS.ByteString -> Text -> Expectation
shouldBeLE actual expected = shouldBe actual (textToLazy $ T.filter (/= '\r') expected)
baseProjDir, projDirA, projDirB :: Path Abs Dir
baseProjDir = $(mkAbsDir $ defaultDrive FP.</> "Users" FP.</> "someone" FP.</> "src")
projDirA = baseProjDir </> $(mkRelDir "project-a")
projDirB = baseProjDir </> $(mkRelDir "project-b")
relFile :: Path Rel File
relFile = $(mkRelFile $ "exe" FP.</> "Main.hs")
absFile :: Path Abs File
absFile = projDirA </> relFile
projDirAT, projDirBT, relFileT, absFileT :: Text
projDirAT = pathToText projDirA
projDirBT = pathToText projDirB
relFileT = pathToText relFile
absFileT = pathToText absFile
spec :: Spec
spec = do
describe "GHCi" $ do
describe "Script rendering" $ do
describe "should render GHCi scripts" $ do
it "with one library package" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage Nothing
res `shouldBeLE` ghciScript_projectWithLib
it "with one main package" $ do
let res = scriptToLazyByteString $ renderScriptGhci []
(Just absFile)
res `shouldBeLE` ghciScript_projectWithMain
it "with one library and main package" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage
(Just absFile)
res `shouldBeLE` ghciScript_projectWithLibAndMain
it "with multiple library packages" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_multiplePackages Nothing
res `shouldBeLE` ghciScript_multipleProjectsWithLib
describe "should render intero scripts" $ do
it "with one library package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage Nothing
res `shouldBeLE` interoScript_projectWithLib
it "with one main package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage
(Just absFile)
res `shouldBeLE` interoScript_projectWithMain
it "with one library and main package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage
(Just absFile)
res `shouldBeLE` interoScript_projectWithLibAndMain
it "with multiple library packages" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_multiplePackages Nothing
res `shouldBeLE` interoScript_multipleProjectsWithLib
-- Exptected Intero scripts
interoScript_projectWithLib :: Text
interoScript_projectWithLib = [text|
:cd-ghc $projDirAT
:add Lib.A
:module + Lib.A
|]
interoScript_projectWithMain :: Text
interoScript_projectWithMain = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirAT
:add $absFileT
:module + Lib.A
|]
interoScript_projectWithLibAndMain :: Text
interoScript_projectWithLibAndMain = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirAT
:add $absFileT
:module + Lib.A
|]
interoScript_multipleProjectsWithLib :: Text
interoScript_multipleProjectsWithLib = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirBT
:add Lib.B
:module + Lib.A Lib.B
|]
-- Expected GHCi Scripts
ghciScript_projectWithLib :: Text
ghciScript_projectWithLib = [text|
:add Lib.A
:module + Lib.A
|]
ghciScript_projectWithMain :: Text
ghciScript_projectWithMain = [text|
:add $absFileT
:module +
|]
ghciScript_projectWithLibAndMain :: Text
ghciScript_projectWithLibAndMain = [text|
:add Lib.A
:add $absFileT
:module + Lib.A
|]
ghciScript_multipleProjectsWithLib :: Text
ghciScript_multipleProjectsWithLib = [text|
:add Lib.A
:add Lib.B
:module + Lib.A Lib.B
|]
-- Expected Legacy GHCi scripts
ghciLegacyScript_projectWithMain :: Text
ghciLegacyScript_projectWithMain = [text|
:add
:add $absFileT
:module +
|]
ghciLegacyScript_projectWithLibAndMain :: Text
ghciLegacyScript_projectWithLibAndMain = [text|
:add Lib.A
:add $absFileT
:module + Lib.A
|]
ghciLegacyScript_multipleProjectsWithLib :: Text
ghciLegacyScript_multipleProjectsWithLib = [text|
:add Lib.A Lib.B
:module + Lib.A Lib.B
|]
-- Sample GHCi load configs
packages_singlePackage :: [GhciPkgInfo]
packages_singlePackage =
[ GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"]
, ghciPkgDir = projDirA
, ghciPkgName = $(mkPackageName "package-a")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-a")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
]
packages_multiplePackages :: [GhciPkgInfo]
packages_multiplePackages =
[ GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"]
, ghciPkgDir = projDirA
, ghciPkgName = $(mkPackageName "package-a")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-a")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
, GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.B"]
, ghciPkgDir = projDirB
, ghciPkgName = $(mkPackageName "package-b")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-b")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
]
| AndreasPK/stack | src/test/Stack/GhciSpec.hs | bsd-3-clause | 8,765 | 0 | 25 | 2,097 | 1,657 | 965 | 692 | 187 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-
Created : 2015 Aug 26 (Wed) 11:56:37 by Harold Carr.
Last Modified : 2015 Sep 12 (Sat) 11:39:39 by Harold Carr.
-}
module Msg where
import Data.Aeson (FromJSON, ToJSON)
import GHC.Generics
type Name = String
type MsgId = Int
data Msg = Msg { name :: Name, msgId :: MsgId, txt :: String } deriving (Generic, Show)
instance ToJSON Msg
instance FromJSON Msg
| splodingsocks/utah-haskell | infrastructure/src/Msg.hs | apache-2.0 | 464 | 0 | 8 | 112 | 89 | 54 | 35 | 10 | 0 |
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE PolyKinds, ExplicitForAll #-}
module SAKS_015 where
import Data.Kind (Type)
type T :: forall k -> k -> Type
data T (k :: Type) (a :: k)
| sdiehl/ghc | testsuite/tests/saks/should_compile/saks015.hs | bsd-3-clause | 194 | 0 | 6 | 35 | 48 | 32 | 16 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>All In One Notes Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/allinonenotes/src/main/javahelp/org/zaproxy/zap/extension/allinonenotes/resources/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 968 | 77 | 67 | 159 | 417 | 211 | 206 | -1 | -1 |
{-# OPTIONS -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving, StandaloneDeriving, FlexibleInstances #-}
-- Test #2856
module T2856 where
import Data.Ratio
----------------------
class C a where
data D a
instance C Bool where
newtype D Bool = DInt Int deriving (Eq, Show, Num)
instance C a => C [a] where
newtype D [a] = DList (Ratio a) deriving (Eq, Show, Num)
----------------------
data family W a
newtype instance W Bool = WInt Int deriving( Eq, Show )
newtype instance W [a] = WList (Ratio a) deriving( Eq, Show )
deriving instance Num (W Bool)
deriving instance (Integral a, Num a) => Num (W [a])
-- Integral needed because superclass Eq needs it,
-- because of the stupid context on Ratio
| sdiehl/ghc | testsuite/tests/deriving/should_compile/T2856.hs | bsd-3-clause | 761 | 0 | 8 | 143 | 226 | 125 | 101 | 15 | 0 |
module WhereIn7 where
--A definition can be demoted to the local 'where' binding of a friend declaration,
--if it is only used by this friend declaration.
--Demoting a definition narrows down the scope of the definition.
--In this example, demote the top level 'sq' to 'sumSquares'
--This example also aims to test the split of type signature.
sumSquares x y = sq x + sq y
sq,anotherFun :: Int -> Int
sq 0 = 0
sq z = z^pow
where pow=2
anotherFun x = x^2
| kmate/HaRe | test/testdata/Demote/WhereIn7.hs | bsd-3-clause | 472 | 0 | 6 | 103 | 79 | 44 | 35 | 7 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeFamilies #-}
module Distribution.Types.ComponentLocalBuildInfo (
ComponentLocalBuildInfo(..),
componentIsIndefinite,
maybeComponentInstantiatedWith,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.ModuleName
import Distribution.Backpack
import Distribution.Compat.Graph
import Distribution.Types.ComponentId
import Distribution.Types.MungedPackageId
import Distribution.Types.UnitId
import Distribution.Types.ComponentName
import Distribution.Types.MungedPackageName
import Distribution.PackageDescription
import qualified Distribution.InstalledPackageInfo as Installed
-- | The first five fields are common across all algebraic variants.
data ComponentLocalBuildInfo
= LibComponentLocalBuildInfo {
-- | It would be very convenient to store the literal Library here,
-- but if we do that, it will get serialized (via the Binary)
-- instance twice. So instead we just provide the ComponentName,
-- which can be used to find the Component in the
-- PackageDescription. NB: eventually, this will NOT uniquely
-- identify the ComponentLocalBuildInfo.
componentLocalName :: ComponentName,
-- | The computed 'ComponentId' of this component.
componentComponentId :: ComponentId,
-- | The computed 'UnitId' which uniquely identifies this
-- component. Might be hashed.
componentUnitId :: UnitId,
-- | Is this an indefinite component (i.e. has unfilled holes)?
componentIsIndefinite_ :: Bool,
-- | How the component was instantiated
componentInstantiatedWith :: [(ModuleName, OpenModule)],
-- | Resolved internal and external package dependencies for this component.
-- The 'BuildInfo' specifies a set of build dependencies that must be
-- satisfied in terms of version ranges. This field fixes those dependencies
-- to the specific versions available on this machine for this compiler.
componentPackageDeps :: [(UnitId, MungedPackageId)],
-- | The set of packages that are brought into scope during
-- compilation, including a 'ModuleRenaming' which may used
-- to hide or rename modules. This is what gets translated into
-- @-package-id@ arguments. This is a modernized version of
-- 'componentPackageDeps', which is kept around for BC purposes.
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
-- | The internal dependencies which induce a graph on the
-- 'ComponentLocalBuildInfo' of this package. This does NOT
-- coincide with 'componentPackageDeps' because it ALSO records
-- 'build-tool' dependencies on executables. Maybe one day
-- @cabal-install@ will also handle these correctly too!
componentInternalDeps :: [UnitId],
-- | Compatibility "package key" that we pass to older versions of GHC.
componentCompatPackageKey :: String,
-- | Compatibility "package name" that we register this component as.
componentCompatPackageName :: MungedPackageName,
-- | A list of exposed modules (either defined in this component,
-- or reexported from another component.)
componentExposedModules :: [Installed.ExposedModule],
-- | Convenience field, specifying whether or not this is the
-- "public library" that has the same name as the package.
componentIsPublic :: Bool
}
-- TODO: refactor all these duplicates
| FLibComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| ExeComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| TestComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| BenchComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
deriving (Generic, Read, Show)
instance Binary ComponentLocalBuildInfo
instance IsNode ComponentLocalBuildInfo where
type Key ComponentLocalBuildInfo = UnitId
nodeKey = componentUnitId
nodeNeighbors = componentInternalDeps
componentIsIndefinite :: ComponentLocalBuildInfo -> Bool
componentIsIndefinite LibComponentLocalBuildInfo{ componentIsIndefinite_ = b } = b
componentIsIndefinite _ = False
maybeComponentInstantiatedWith :: ComponentLocalBuildInfo -> Maybe [(ModuleName, OpenModule)]
maybeComponentInstantiatedWith
LibComponentLocalBuildInfo { componentInstantiatedWith = insts } = Just insts
maybeComponentInstantiatedWith _ = Nothing
| mydaum/cabal | Cabal/Distribution/Types/ComponentLocalBuildInfo.hs | bsd-3-clause | 5,430 | 0 | 10 | 934 | 670 | 438 | 232 | 78 | 1 |
{-# LANGUAGE CPP #-}
#include "MachDeps.h"
module Main where
import Data.Bits
#if WORD_SIZE_IN_BITS != 64 && WORD_SIZE_IN_BITS != 32
# error unsupported WORD_SIZE_IN_BITS config
#endif
-- a negative integer the size of GMP_LIMB_BITS*2
negativeBigInteger :: Integer
negativeBigInteger = 1 - (1 `shiftL` (64 * 2))
main = do
-- rigt shift by GMP_LIMB_BITS
print $ negativeBigInteger `shiftR` 64
| ezyang/ghc | testsuite/tests/numeric/should_run/T12136.hs | bsd-3-clause | 406 | 0 | 9 | 72 | 64 | 41 | 23 | -1 | -1 |
module Geometry.SpatialHash where
import Algebra.Vector as V
import Data.List as List
import Data.List.Extensions as ListExt
import Data.Map as Map
import Data.Ratio as Ratio
import Data.Ratio.Extensions as RatioExt
import Data.Tuple.Extensions as TupleExt
import Geometry.AABB as AABB
type SpatialHash a = (Map Vector a, Vector, Vector)
positionMap = first3
origin = second3
binDimensions = third3
setPositionMap = setFirst3
setOrigin = setSecond3
setBinDimensions = setThird3
pointHash :: Vector -> Vector -> Vector
pointHash = \bin_dimensions point -> let
divided = (ListExt.map2 (/) (V.toList point) (V.toList bin_dimensions))
rounded = (List.map (RatioExt.setPrecision 1) divided)
in (V.fromList rounded)
aabbHash :: Vector -> AABB -> [Vector]
aabbHash = \bin_dimensions aabb -> let
min_hash = (V.toList (pointHash bin_dimensions (minCorner aabb)))
max_hash = (V.toList (pointHash bin_dimensions (maxCorner aabb)))
uniformSequence = \min max -> (ListExt.uniformSequence 1 min ((+) max ((%) 1 2)))
ranges = (ListExt.map2 uniformSequence min_hash max_hash)
hashes = (ListExt.crossProducts ranges)
in (List.map V.fromList hashes)
centeredPointHash = \spatial_hash point -> let
in (pointHash (binDimensions spatial_hash) (V.subtract point (origin spatial_hash)))
centeredAABBHash = \spatial_hash aabb -> let
centered_aabb = (AABB.translate aabb (V.negate (origin spatial_hash)))
in (aabbHash (binDimensions spatial_hash) centered_aabb)
insert = \position bin spatial_hash -> let
hash = (centeredPointHash spatial_hash position)
in (setPositionMap spatial_hash (Map.insert hash bin (positionMap spatial_hash)))
lookupPoint = \position spatial_hash -> let
in ((!) (positionMap spatial_hash) (centeredPointHash spatial_hash position))
lookupAABB = \aabb spatial_hash -> let
hashes = (centeredAABBHash spatial_hash aabb) :: [Vector]
in (List.map ((!) (positionMap spatial_hash)) hashes)
| stevedonnelly/haskell | code/Geometry/SpatialHash.hs | mit | 1,967 | 0 | 16 | 313 | 656 | 362 | 294 | 42 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Warning! This module is considered internal and may have breaking changes
module Routes.TH.Types
( -- * Data types
Resource (..)
, ResourceTree (..)
, Piece (..)
, Dispatch (..)
, CheckOverlap
, FlatResource (..)
-- ** Helper functions
, resourceMulti
, resourceTreePieces
, resourceTreeName
, flatten
) where
import Language.Haskell.TH.Syntax
data ResourceTree typ
= ResourceLeaf (Resource typ)
| ResourceParent String CheckOverlap [Piece typ] [ResourceTree typ]
deriving Functor
resourceTreePieces :: ResourceTree typ -> [Piece typ]
resourceTreePieces (ResourceLeaf r) = resourcePieces r
resourceTreePieces (ResourceParent _ _ x _) = x
resourceTreeName :: ResourceTree typ -> String
resourceTreeName (ResourceLeaf r) = resourceName r
resourceTreeName (ResourceParent x _ _ _) = x
instance Lift t => Lift (ResourceTree t) where
lift (ResourceLeaf r) = [|ResourceLeaf $(lift r)|]
lift (ResourceParent a b c d) = [|ResourceParent $(lift a) $(lift b) $(lift c) $(lift d)|]
data Resource typ = Resource
{ resourceName :: String
, resourcePieces :: [Piece typ]
, resourceDispatch :: Dispatch typ
, resourceAttrs :: [String]
, resourceCheck :: CheckOverlap
}
deriving (Show, Functor)
type CheckOverlap = Bool
instance Lift t => Lift (Resource t) where
lift (Resource a b c d e) = [|Resource a b c d e|]
data Piece typ = Static String | Dynamic typ
deriving Show
instance Functor Piece where
fmap _ (Static s) = Static s
fmap f (Dynamic t) = Dynamic (f t)
instance Lift t => Lift (Piece t) where
lift (Static s) = [|Static $(lift s)|]
lift (Dynamic t) = [|Dynamic $(lift t)|]
data Dispatch typ =
Methods
{ methodsMulti :: Maybe typ -- ^ type of the multi piece at the end
, methodsMethods :: [String] -- ^ supported request methods
}
| Subsite
{ subsiteType :: typ
, subsiteFunc :: String
}
deriving Show
instance Functor Dispatch where
fmap f (Methods a b) = Methods (fmap f a) b
fmap f (Subsite a b) = Subsite (f a) b
instance Lift t => Lift (Dispatch t) where
lift (Methods Nothing b) = [|Methods Nothing $(lift b)|]
lift (Methods (Just t) b) = [|Methods (Just $(lift t)) $(lift b)|]
lift (Subsite t b) = [|Subsite $(lift t) $(lift b)|]
resourceMulti :: Resource typ -> Maybe typ
resourceMulti Resource { resourceDispatch = Methods (Just t) _ } = Just t
resourceMulti _ = Nothing
data FlatResource a = FlatResource
{ frParentPieces :: [(String, [Piece a])]
, frName :: String
, frPieces :: [Piece a]
, frDispatch :: Dispatch a
, frCheck :: Bool
}
flatten :: [ResourceTree a] -> [FlatResource a]
flatten =
concatMap (go id True)
where
go front check' (ResourceLeaf (Resource a b c _ check)) = [FlatResource (front []) a b c (check' && check)]
go front check' (ResourceParent name check pieces children) =
concatMap (go (front . ((name, pieces):)) (check && check')) children
| ajnsit/snap-routes | src/Routes/TH/Types.hs | mit | 3,114 | 0 | 13 | 746 | 999 | 550 | 449 | 76 | 2 |
module Lambency.Shader.Var where
--------------------------------------------------------------------------------
import Lambency.Shader.Base
import Linear
--------------------------------------------------------------------------------
matrix2Ty :: ShaderVarTy (M22 Float)
matrix2Ty = ShaderVarTy Matrix2Ty
matrix3Ty :: ShaderVarTy (M33 Float)
matrix3Ty = ShaderVarTy Matrix3Ty
matrix4Ty :: ShaderVarTy (M44 Float)
matrix4Ty = ShaderVarTy Matrix4Ty
vector2fTy :: ShaderVarTy (V2 Float)
vector2fTy = ShaderVarTy Vector2Ty
vector3fTy :: ShaderVarTy (V3 Float)
vector3fTy = ShaderVarTy Vector3Ty
vector4fTy :: ShaderVarTy (V4 Float)
vector4fTy = ShaderVarTy Vector4Ty
vector2iTy :: ShaderVarTy (V2 Int)
vector2iTy = ShaderVarTy Vector2Ty
vector3iTy :: ShaderVarTy (V3 Int)
vector3iTy = ShaderVarTy Vector3Ty
vector4iTy :: ShaderVarTy (V4 Int)
vector4iTy = ShaderVarTy Vector4Ty
intTy :: ShaderVarTy Int
intTy = ShaderVarTy IntTy
floatTy :: ShaderVarTy Float
floatTy = ShaderVarTy FloatTy
sampler1DTy :: ShaderVarTy Sampler1D
sampler1DTy = ShaderVarTy Sampler1DTy
sampler2DTy :: ShaderVarTy Sampler2D
sampler2DTy = ShaderVarTy Sampler2DTy
sampler3DTy :: ShaderVarTy Sampler3D
sampler3DTy = ShaderVarTy Sampler3DTy
shadow2DTy :: ShaderVarTy Shadow2D
shadow2DTy = ShaderVarTy Shadow2DTy
| Mokosha/Lambency | lib/Lambency/Shader/Var.hs | mit | 1,299 | 0 | 7 | 155 | 312 | 160 | 152 | 33 | 1 |
module Physie.List(
maximumByNeighbors
, boolToList
) where
import Control.Lens (view, _2)
import Data.Function (on)
import Data.List (maximumBy)
maximumByNeighbors :: Ord a => (a -> a -> Ordering) -> [a] -> (a,a,a)
maximumByNeighbors f ls = let cls = cycle ls
in maximumBy (f `on` view _2) $ zip3 (drop (length ls - 1) cls) ls (drop 1 cls)
boolToList :: Bool -> a -> [a]
boolToList b a = [a | b]
| pmiddend/physie | src/Physie/List.hs | mit | 469 | 0 | 13 | 144 | 199 | 108 | 91 | 11 | 1 |
import Utils
nNumbers nDig = 9 * 10^(nDig-1)
lens = map nNumbers [1..]
relativeShifts = zipWith (*) lens [1..]
absoluteShifts = zip [1..] $ scanl (+) 0 relativeShifts
nthDigit n = digit
--nthDigit n = (nDigits, shift, numberShift, nAsDigits, digitShift, digit)
where (nDigits, shift) = last $ takeWhile (\(a,b) -> b<n) absoluteShifts
numberShift = ((n - shift - 1) `div` nDigits) + 1
nAsDigits = numberToDigits $ 10^(nDigits-1) + numberShift - 1
digitShift = (n - shift - 1) `mod` nDigits
digit = nAsDigits !! (fromIntegral digitShift)
answer = product digits
digits = map nthDigit [1, 10, 100, 1000, 10000, 100000, 1000000]
| arekfu/project_euler | p0040/p0040.hs | mit | 696 | 0 | 12 | 173 | 264 | 148 | 116 | 13 | 1 |
{-# LANGUAGE RankNTypes #-}
{- |
Module : Orville.PostgreSQL.Connection
Copyright : Flipstone Technology Partners 2016-2021
License : MIT
-}
module Orville.PostgreSQL.Connection
( Connection,
Pool,
ConnectionUsedAfterCloseError,
ConnectionError,
SqlExecutionError (..),
NoticeReporting (EnableNoticeReporting, DisableNoticeReporting),
createConnectionPool,
executeRaw,
executeRawVoid,
escapeStringLiteral,
)
where
import Control.Concurrent (threadWaitRead, threadWaitWrite)
import Control.Concurrent.MVar (MVar, newMVar, tryReadMVar, tryTakeMVar)
import Control.Exception (Exception, mask, throwIO)
import Control.Monad (void)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.Maybe (fromMaybe)
import Data.Pool (Pool, createPool)
import qualified Data.Text as T
import qualified Data.Text.Encoding as Enc
import Data.Time (NominalDiffTime)
import qualified Database.PostgreSQL.LibPQ as LibPQ
import Orville.PostgreSQL.Internal.PgTextFormatValue (NULByteFoundError (NULByteFoundError), PgTextFormatValue, toBytesForLibPQ)
{- |
An option for 'createConnectionPool' than indicates whether the LibPQ should
print notice reports for warnings to the console
-}
data NoticeReporting
= EnableNoticeReporting
| DisableNoticeReporting
{- |
'createConnectionPool' allocates a pool of connections to a PosgreSQL server.
-}
createConnectionPool ::
-- | Whether or not notice reporting from LibPQ should be enabled
NoticeReporting ->
-- | Number of stripes in the connection pool
Int ->
-- | Linger time before closing an idle connection
NominalDiffTime ->
-- | Max number of connections to allocate per stripe
Int ->
-- | A PostgreSQL connection string
BS.ByteString ->
IO (Pool Connection)
createConnectionPool noticeReporting stripes linger maxRes connectionString =
createPool (connect noticeReporting connectionString) close stripes linger maxRes
{- |
'executeRaw' runs a given SQL statement returning the raw underlying result.
All handling of stepping through the result set is left to the caller. This
potentially leaves connections open much longer than one would expect if all
of the results are not iterated through immediately *and* the data copied.
Use with caution.
-}
executeRaw ::
Connection ->
BS.ByteString ->
[Maybe PgTextFormatValue] ->
IO LibPQ.Result
executeRaw connection bs params =
case traverse (traverse toBytesForLibPQ) params of
Left NULByteFoundError ->
throwIO NULByteFoundError
Right paramBytes ->
underlyingExecute bs paramBytes connection
{- |
'executeRawVoid' a version of 'executeRaw' that completely ignores the result.
If an error occurs it is raised as an exception.
Use with caution.
-}
executeRawVoid :: Connection -> BS.ByteString -> [Maybe PgTextFormatValue] -> IO ()
executeRawVoid connection bs params =
void $ executeRaw connection bs params
{- |
The basic connection interface.
-}
newtype Connection = Connection (MVar LibPQ.Connection)
{- |
'connect' is the internal, primitive connection function.
This should not be exposed to end users, but instead wrapped in something to create a pool.
Note that handling the libpq connection with the polling is described at
<https://hackage.haskell.org/package/postgresql-libpq-0.9.4.2/docs/Database-PostgreSQL-LibPQ.html>.
-}
connect :: NoticeReporting -> BS.ByteString -> IO Connection
connect noticeReporting connectionString =
let checkSocketAndThreadWait conn threadWaitFn = do
fd <- LibPQ.socket conn
case fd of
Nothing -> do
throwConnectionError "connect: failed to get file descriptor for socket" conn
Just fd' -> do
threadWaitFn fd'
poll conn
poll conn = do
pollStatus <- LibPQ.connectPoll conn
case pollStatus of
LibPQ.PollingFailed -> do
throwConnectionError "connect: polling failed while connecting to database server" conn
LibPQ.PollingReading ->
checkSocketAndThreadWait conn threadWaitRead
LibPQ.PollingWriting ->
checkSocketAndThreadWait conn threadWaitWrite
LibPQ.PollingOk -> do
connectionHandle <- newMVar conn
pure (Connection connectionHandle)
in do
connection <- LibPQ.connectStart connectionString
case noticeReporting of
DisableNoticeReporting -> LibPQ.disableNoticeReporting connection
EnableNoticeReporting -> LibPQ.enableNoticeReporting connection
poll connection
{- |
'close' has many subtleties to it.
First note that async exceptions are masked. 'mask' though, only works for
things that are not interruptible
<https://www.stackage.org/haddock/lts-16.15/base-4.13.0.0/Control-Exception.html#g:13>
From the previous link, 'tryTakeMVar' is not interruptible, where 'takeMVar'
*is*. So by using 'tryTakeMVar' along with 'mask', we should be safe from
async exceptions causing us to not finish an underlying connection. Notice
that the only place the MVar is ever taken is here so 'tryTakeMVar' gives us
both the non-blocking semantics to protect from async exceptions with 'mask'
_and_ should never truly return an empty unless two threads were racing to
close the connection, in which case.. one of them will close the connection.
-}
close :: Connection -> IO ()
close (Connection handle') =
let underlyingFinish :: (forall a. IO a -> IO a) -> IO (Maybe ())
underlyingFinish restore = do
underlyingConnection <- tryTakeMVar handle'
restore (traverse LibPQ.finish underlyingConnection)
in void $ mask underlyingFinish
{- |
'underlyingExecute' is the internal, primitive execute function.
This is not intended to be directly exposed to end users, but instead wrapped
in something using a pool. Note there are potential dragons here in that
this calls `tryReadMvar` and then returns an error if the MVar is not full.
The intent is to never expose the ability to empty the `MVar` outside of this
module, so unless a connection has been closed it *should* never be empty.
And a connection should be closed upon removal from a resource pool (in which
case it can't be used for this function in the first place).
-}
underlyingExecute ::
BS.ByteString ->
[Maybe BS.ByteString] ->
Connection ->
IO LibPQ.Result
underlyingExecute bs params connection = do
libPQConn <- readLibPQConnectionOrFailIfClosed connection
mbResult <-
LibPQ.execParams libPQConn bs (map mkInferredTextParam params) LibPQ.Text
case mbResult of
Nothing -> do
throwConnectionError "No result returned from exec by libpq" libPQConn
Just result -> do
execStatus <- LibPQ.resultStatus result
if isRowReadableStatus execStatus
then pure result
else do
throwLibPQResultError result execStatus bs
{- |
Escapes a string for use as a literal within a SQL command that will be
execute on the given connection. This uses the @PQescapeStringConn@ function
from libpq, which takes the character encoding of the connection into
account. This function only escapes the characters to be used in as a string
literal -- it does not add the surrounding quotes.
-}
escapeStringLiteral :: Connection -> BS.ByteString -> IO BS.ByteString
escapeStringLiteral connection unescapedString = do
libPQConn <- readLibPQConnectionOrFailIfClosed connection
mbEscapedString <- LibPQ.escapeStringConn libPQConn unescapedString
case mbEscapedString of
Nothing ->
throwConnectionError "Error while escaping string literal" libPQConn
Just escapedString ->
pure escapedString
readLibPQConnectionOrFailIfClosed :: Connection -> IO LibPQ.Connection
readLibPQConnectionOrFailIfClosed (Connection handle) = do
mbConn <- tryReadMVar handle
case mbConn of
Nothing ->
throwIO ConnectionUsedAfterCloseError
Just conn ->
pure conn
throwConnectionError :: String -> LibPQ.Connection -> IO a
throwConnectionError message conn = do
mbLibPQError <- LibPQ.errorMessage conn
throwIO $
ConnectionError
{ connectionErrorMessage = message
, connectionErrorLibPQMessage = mbLibPQError
}
throwLibPQResultError ::
LibPQ.Result ->
LibPQ.ExecStatus ->
BS.ByteString ->
IO a
throwLibPQResultError result execStatus queryBS = do
mbLibPQError <- LibPQ.resultErrorMessage result
mbSqlState <- LibPQ.resultErrorField result LibPQ.DiagSqlstate
throwIO $
SqlExecutionError
{ sqlExecutionErrorExecStatus = execStatus
, sqlExecutionErrorMessage = fromMaybe (B8.pack "No error message available from LibPQ") mbLibPQError
, sqlExecutionErrorSqlState = mbSqlState
, sqlExecutionErrorSqlQuery = queryBS
}
isRowReadableStatus :: LibPQ.ExecStatus -> Bool
isRowReadableStatus status =
case status of
LibPQ.CommandOk -> True -- ??
LibPQ.TuplesOk -> True -- Returned on successful query, even if there are 0 rows.
LibPQ.SingleTuple -> True -- Only returned when a query is executed is single row mode
LibPQ.EmptyQuery -> False
LibPQ.CopyOut -> False
LibPQ.CopyIn -> False
LibPQ.CopyBoth -> False -- CopyBoth is only used for streaming replication, so should not occur in ordinary applications
LibPQ.BadResponse -> False
LibPQ.NonfatalError -> False -- NonfatalError never returned from LibPQ query execution functions. It passes them to the notice processor instead.
LibPQ.FatalError -> False
{- |
Packages a bytestring parameter value (which is assumed to be a value encoded
as text that the database can use) as a parameter for executing a query.
This uses Oid 0 to cause the database to infer the type of the paremeter and
explicitly marks the parameter as being in Text format.
-}
mkInferredTextParam :: Maybe BS.ByteString -> Maybe (LibPQ.Oid, BS.ByteString, LibPQ.Format)
mkInferredTextParam mbValue =
case mbValue of
Nothing ->
Nothing
Just value ->
Just (LibPQ.Oid 0, value, LibPQ.Text)
data ConnectionError = ConnectionError
{ connectionErrorMessage :: String
, connectionErrorLibPQMessage :: Maybe BS.ByteString
}
instance Show ConnectionError where
show err =
let libPQErrorMsg =
case connectionErrorLibPQMessage err of
Nothing ->
"<no underying error available>"
Just libPQMsg ->
case Enc.decodeUtf8' libPQMsg of
Right decoded ->
T.unpack decoded
Left decodingErr ->
"Error decoding libPQ messages as utf8: " <> show decodingErr
in connectionErrorMessage err <> ": " <> libPQErrorMsg
instance Exception ConnectionError
data SqlExecutionError = SqlExecutionError
{ sqlExecutionErrorExecStatus :: LibPQ.ExecStatus
, sqlExecutionErrorMessage :: BS.ByteString
, sqlExecutionErrorSqlState :: Maybe BS.ByteString
, sqlExecutionErrorSqlQuery :: BS.ByteString
}
deriving (Show)
instance Exception SqlExecutionError
data ConnectionUsedAfterCloseError
= ConnectionUsedAfterCloseError
deriving (Show)
instance Exception ConnectionUsedAfterCloseError
| flipstone/orville | orville-postgresql-libpq/src/Orville/PostgreSQL/Connection.hs | mit | 11,198 | 0 | 18 | 2,243 | 1,687 | 862 | 825 | 194 | 10 |
{-
Copyright (c) 2008, 2013
Russell O'Connor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
-- |Defines the Y'CbCr and Y'PbPr colour spaces in accordance with
-- ITU-R Recommendation BT.601 used for 625-line (PAL) standard
-- definition television (SDTV).
--
-- For high definition television (HDTV) see "Data.Colour.HDTV".
module Data.Colour.SDTV625
{-
(Colour
,luma
,y'PbPr, toY'PbPr
,y'CbCr, toY'CbCr
)
-}
where
import Data.Word
import Data.Colour.RGBSpace
import Data.Colour.SRGB (sRGBSpace)
import Data.Colour.CIE.Illuminant (d65)
import Data.Colour.CIE
import Data.Colour.SDTV
import qualified Data.Colour.Luma as L
space :: (Ord a, Floating a) => RGBSpace a
space = mkRGBSpace gamut transfer
where
gamut = mkRGBGamut (RGB (mkChromaticity 0.64 0.33)
(mkChromaticity 0.29 0.60)
(mkChromaticity 0.15 0.06))
d65
transfer = transferFunction sRGBSpace
{- rec 601 luma -}
-- |Luma (Y') approximates the 'Data.Colour.CIE.lightness' of a 'Colour'.
luma :: (Ord a, Floating a) => Colour a -> a
luma = L.luma lumaCoef space
-- |Construct a 'Colour' from Y'PbPr coordinates.
y'PbPr :: (Ord a, Floating a) => a -> a -> a -> Colour a
y'PbPr = L.y'PbPr lumaCoef space
-- |Returns the Y'PbPr coordinates of a 'Colour'.
toY'PbPr :: (Ord a, Floating a) => Colour a -> (a, a, a)
toY'PbPr = L.toY'PbPr lumaCoef space
-- |Construct a 'Colour' from Y'CbRr studio 8-bit coordinates.
y'CbCr :: (Floating a, RealFrac a) => Word8 -> Word8 -> Word8 -> Colour a
y'CbCr = L.y'CbCr lumaCoef space
-- |Returns the Y'CbCr studio 8-bit coordinates of a 'Colour'.
toY'CbCr :: (Floating a, RealFrac a) => Colour a -> (Word8, Word8, Word8)
toY'CbCr = L.toY'CbCr lumaCoef space
-- |Construct a 'Colour' from R'G'B' studio 8-bit coordinates.
r'g'b' :: (Floating a, RealFrac a) => Word8 -> Word8 -> Word8 -> Colour a
r'g'b' = L.r'g'b' space
-- |Returns the Y'CbCr studio 8-bit coordinates of a 'Colour'.
toR'G'B' :: (Floating a, RealFrac a) => Colour a -> RGB Word8
toR'G'B' = L.toR'G'B' space
| haasn/colour | Data/Colour/SDTV625.hs | mit | 3,037 | 0 | 11 | 559 | 478 | 264 | 214 | 29 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-- | This module creates a class for automating the construction and
-- destruction of JSON objects
module JSON where
import Haste
import Haste.Graphics.Canvas
import Haste.JSON
import Prelude hiding (head, tail, init, last, read, (!!))
import Safe (atMay,headMay)
-- | Values which can be converted back and forth from JSON. The main
-- class laws is that
--
-- > fromJSON . toJSON = id
--
-- Note that that
--
-- > toJSON . fromJSON == id
--
-- does *NOT* hold, as that would imply preserving whitespace and
-- ordering of generic JSON files.
class JSONable a where
toJSON :: a -> JSON -- ^ Convert the value into JSON
fromJSON :: JSON -> Maybe a -- ^ Extract a value from JSON or return Nothing on a failure
-- | Turns a Double into a generic JSON number
instance JSONable Double where
toJSON = Num
fromJSON (Num x) = Just x
fromJSON _ = Nothing
-- | Turns a string into a generic JSON number. Note that it doesn't
-- actually work, since String = [Char], so Haskell can't distinguish
-- from a [Char], even though [Char] is not an instance of JSONable
instance JSONable String where
toJSON x = Str $ toJSString x
fromJSON (Str x) = Just $ fromJSStr x
fromJSON _ = Nothing
-- | Turns a list of JSONable objects into a JSON array
instance JSONable a => JSONable [a] where
toJSON = Arr . map toJSON
fromJSON (Arr x) = mapM fromJSON x
fromJSON _ = Nothing
-- | Turns a Point into a two element JSON array
instance JSONable Point where
toJSON (x,y) = Arr . map toJSON $ [x,y]
fromJSON (Arr ps) = (,) <$> (headMay ps >>= fromJSON)
<*> (ps `atMay` 1 >>= fromJSON)
fromJSON _ = Nothing
-- | Pull a value from a JSON object
(~~>) :: (JSONable a) => JSON -> JSString -> Maybe a
d ~~> key = do
v <- d ~> key
fromJSON v
| rprospero/PhotoAlign | JSON.hs | mit | 1,880 | 0 | 10 | 434 | 406 | 226 | 180 | 32 | 1 |
-- Double all integers in a list.
module Double where
double :: [Integer] -> [Integer]
double [] = []
double (integer : remainingIntegers)
= (2 * integer) : double remainingIntegers
{- GHCi>
double []
double [1]
double [1, 1]
-}
-- []
-- [2]
-- [2, 2]
| pascal-knodel/haskell-craft | Examples/· Recursion/· Primitive Recursion/Lists/Double.hs | mit | 266 | 0 | 7 | 61 | 65 | 38 | 27 | 5 | 1 |
-- | Understanding the article about profunctor
-- source: https://www.fpcomplete.com/user/liyang/profunctors
module Profunctors where
-- import Data.Functor
-- class Functor f where
-- fmap :: (a -> b) -> f a -> f b
-- import Data.Functor.Contravariant
class Contravariant f where
contramap :: (b -> a) -> f a -> f b
-- type Predicate a = a -> Bool
newtype Predicate a = Predicate { getPredicate :: a -> Bool }
instance Contravariant Predicate where
contramap g (Predicate p) = Predicate (p . g)
veryOdd :: Predicate Integer
veryOdd = contramap (`div` 2) (Predicate odd)
main :: IO ()
main = print $ getPredicate veryOdd <$> [0..11]
| ardumont/haskell-lab | src/Profunctors.hs | gpl-2.0 | 659 | 0 | 9 | 132 | 159 | 89 | 70 | 10 | 1 |
module Logic.FirstOrderLogic where
import Notes
import qualified Prelude as P (map)
import Functions.Application.Macro
import Sets.Basics.Terms
import Logic.AbstractLogic.Macro
import Logic.AbstractLogic.Terms
import Logic.PropositionalLogic.Macro
import Logic.PropositionalLogic.Resolution
import Logic.PropositionalLogic.Sentence
import Logic.PropositionalLogic.Terms
import Logic.PropositionalLogic.TruthTables
import Logic.FirstOrderLogic.Macro
import Logic.FirstOrderLogic.Terms
firstOrderLogicS :: Note
firstOrderLogicS = section "First Order Logic" $ do
firstOrderLogicDefinition
termDefinition
atomicSentence
quantifiers
modelFOL
translationExamples
situationalCalculusSS
inferenceInFOL
firstOrderLogicDefinition :: Note
firstOrderLogicDefinition = de $ do
lab firstOrderLogicDefinitionLabel
s ["While propositional logic is about simple facts, first order logic is about complex facts involving objects, relations, functions, etc..."]
s [firstOrderLogic', " is a ", theory]
s ["It is an extension of propositional logic with predicates, functions, variables and their quantifiers"]
s ["Remember that these symbols are just that, symbols"]
termDefinition :: Note
termDefinition = do
de $ s ["A ", defineTerm "defineTerm", " in first order logic is either a constant symbol, a variable or a ", m "k", "-ary function symbol applied to terms"]
ex $ s [cs $ P.map (dquoted . m) [1, 2, 3, x, f x], " are terms in first order logic"]
where
x = "x"
f = fn "f"
atomicSentence :: Note
atomicSentence = do
de $ do
lab atomicDefinitionLabel
s ["A sentence in first order logic is called ", atomic', " if it is a constant symbol or a function of only constant symbols"]
ex $ s [cs $ P.map (dquoted . m) [1, small 1, smaller 1 2], " are atomic sentences in ", firstOrderLogic]
where
small = fn "Small"
smaller = fn2 "Smaller"
quantifiers :: Note
quantifiers = subsection "Quantifiers" $ do
s ["Quantifiers bind free variables"]
existentialQuantifierDefinition
universalQuantifierDefinition
note "composite-sentence" $ do
compositeSentence
compositeSentenceExamples
propertiesOfQuantifiers
x, y :: Note
x = "x"
y = "y"
pp :: Note -> Note
pp = fn "P"
ppp :: Note -> Note -> Note
ppp x y = fn p $ cs [x, y]
existentialQuantifierDefinition :: Note
existentialQuantifierDefinition = de $ do
s ["The ", existentialQuantifier', " ", m thereExistsSign, " "]
s ["A sentence ", m (te x $ pp x), ", in the context of a model ", m "m", " is defined to hold true if there exists a ", m x, " in ", m "m", " such that the predicate ", m p, " holds for ", m x]
universalQuantifierDefinition :: Note
universalQuantifierDefinition = de $ do
s ["The ", universalQuantifier', " ", m forallSign, " "]
s ["A sentence ", m (fa x $ pp x), ", in the context of a model ", m "m", " is defined to hold true if the predicate ", m p, " holds for every instantiation of ", m x, " in ", m "m"]
compositeSentence :: Note
compositeSentence = do
de $ s ["A sentence in first order logic is called ", composite', " if it atomic, contains free variables an quantifiers, or is composed of composite sentences joined by connectives"]
ex $ s [cs $ P.map (dquoted . m) [1, greater 2 1, great x, fa y (great x ∨ greater x y)], " are composite sentences in first order logic"]
where
x = "x"
great = fn "Great"
greater = fn2 "Greater"
compositeSentenceExamples :: Note
compositeSentenceExamples = do
ex $ do
let (mom, mmoc) = ("ManOnMoon", "MoonMadeOfCheese")
s ["The following is the transation to ", firstOrderLogic, " of the ", composite, " sentence ", dquoted $ s ["If there is a man on the moon, then the moon is maedo of cheese"]]
ma $ mom ⇒ mmoc
ex $ do
let (x, par, joan, fem) = ("x", fn2 "Parent", "Joan", fn "Female")
s ["The following is the transation to ", firstOrderLogic, " of the ", composite, " sentence ", dquoted $ s ["Joan has a daughter"]]
ma $ te x $ par joan x ∧ fem x
propertiesOfQuantifiers :: Note
propertiesOfQuantifiers = subsubsection "Properties of quantifiers" $ do
switchExistentials
switchUniversals
switchMixed
dualityOfQuantifiers
switchExistentials :: Note
switchExistentials = thm $ do
s ["The order of multiple contiguous existential quantifiers does not matter"]
ma $ (pars $ te x $ te y $ ppp x y) ⇔ (pars $ te y $ te x $ ppp x y)
toprove
switchUniversals :: Note
switchUniversals = thm $ do
s ["The order of multiple contiguous universal quantifiers does not matter"]
ma $ (pars $ fa x $ fa y $ ppp x y) ⇔ (pars $ fa y $ fa x $ ppp x y)
toprove
switchMixed :: Note
switchMixed = cex $ do
s ["The order of different quantifiers ", textbf "does", " matter"]
ma $ not . pars $ (pars $ te x $ fa y $ ppp x y) ⇔ (pars $ fa y $ te x $ ppp x y)
toprove
dualityOfQuantifiers :: Note
dualityOfQuantifiers = thm $ do
s ["Each quantifier can be expressed in terms of the other"]
ma $ (pars $ fa x $ pp x) ⇔ (pars $ not $ te x $ not $ pp x)
toprove
modelFOL :: Note
modelFOL = de $ do
s ["A ", model, " in first order logic consists of instantiations of objects, relations and functions and their interpretations in terms of their symbols"]
s ["Often any constants not in the model is asserted to be false"]
translationExamples :: Note
translationExamples = do
ex $ do
examq eth "Probabillistic Artificial Intelligence" "January 2014"
let mh = fn "StudiesMath"
cs = fn "StudiesCS"
ts = fn2 "Teaches"
itemize $ do
item $ do
s ["Given that all elements of the universe are students, express the following facts in first-order logic"]
enumerate $ do
let t = "t"
let u = "u"
let v = "v"
item $ do
s ["Every student teaches some student"]
ma $ fa u $ te t $ ts u t
item $ do
s ["Every student teaches exactly one student"]
ma $ fa u $ te t $ (ts u t) ∧ (pars $ fa v (ts u v) ⇒ t =: v)
item $ do
s ["No one is both a maths student and a computer science student"]
ma $ fa t $ neg $ (pars $ mh t ∧ cs t)
item $ do
s ["Describe a model that satisfies the above statements and the following"]
ma $ do
let (x, y, z) = ("x", "y", "z")
fa x $ fa y $ fa z $ pars $ (((ts x z) ∧ (ts y z)) ⇒ x =: y)
s ["In this model, every student is taught by at most one student"]
situationalCalculusSS :: Note
situationalCalculusSS = subsection "Situational Calculus" $ do
s ["The use of ", situationalCalculus', " is to model situations"]
s ["In situational calculus, facts hold at a certain moment and/or in a certain situation"]
s ["This is modeled by adding a situational argument to every non-eternal ", predicate]
s ["Situational calculus can be used to model change, non-change, actions, perceptions, etc..."]
frameProblem
planning
frameProblem :: Note
frameProblem = subsubsection "The frame problem" $ do
s ["Now that we can model situations using frames, there is a need for so called ", effectAxiom', "s that model changes due to actions"]
s ["In addition to modeling change, one must also model non-change"]
s ["The frame problem is that the number of frame axioms can be become large and even infinite"]
s ["This poses problems in automated inference"]
s ["To solve the problem, we will use so called ", successorStateAxiom', "s that model how each non-eternal predicate is affected or not affected by actions"]
s ["These successor state axioms model the fact that a predicate is true if and only if an action made it true or it was already true and no action made it false"]
planning :: Note
planning = subsubsection "Planning using first order logic" $ do
s ["First order logic can be used to plan actions based on a knowledge base of known facts"]
s ["The idea is to decide what the goal situation is and to model it"]
s ["Then, automated inference can be used to find out whether the given knowledge base entails the goal situation"]
inferenceInFOL :: Note
inferenceInFOL = subsection "Inference in first order logic" $ do
s ["Inference in first order logic is more complicated than inference in ", propositionalLogic]
s ["In general, there are two approaches: Propositionalisation and ", dquoted "lifted", " inference"]
propositionalisationSS
liftedInferenceSS
propositionalisationSS :: Note
propositionalisationSS = subsubsection "propositionalisation" $ do
de $ do
s [propositionalisation', " is an ", inference, " in first order logic"]
s ["It consists of replacing all quantified variables with so called ", groundingVariable', "s using each possible defineTerm"]
s ["This turns the problem into a propositional logic problem and it can then be solved as discussed before"]
s ["The problem with proportionalisation is that the solver may need to create a lot of unnecessary symbols"]
s ["Even worse, the amount of created symbols could be infinite"]
propositionalisationExamples
herbrandTheorem
s ["Given this theorem, we can propose a naive algorithm to test whether a given sentence ", m lsen, " is entailed by a given first order logic knowledge base"]
s ["The algorithm consists of enumerating all finite subsets of the propositionalised knowledge base ", m (lkb ∪ not lsen), " and checking whether they are satisfiable one by one using propositional resolution"]
s ["Note that this algorithm will stop if the given sentence is entailed by the given knowledge base but will never stop otherwise"]
s ["This is intrinsic to the problem"]
s ["First order logic is only semi-decidable"]
herbrandReference :: Reference
herbrandReference = Reference article "herbrand-theorem" $
[
("author" , "Jacques Herbrand")
, ("title" , "Recherches sur la theorie de la demonstration.")
, ("year" , "1930")
, ("journal", "Travaux de la Societe des Sciences et des Lettres de Varsovie")
, ("volume" , "3")
, ("number" , "33")
]
propositionalisationExamples :: Note
propositionalisationExamples = do
propositionalisationExampleBirds
propositionalisationExampleCompany
propositionalisationExampleBirds :: Note
propositionalisationExampleBirds = ex $ do
s ["Suppose we are given this set of facts"]
itemize $ do
item $ s ["Birds are winged animals"]
item $ s ["Birds are characterized by feathers and beaks"]
item $ s ["Bob is a bird watcher, but he doesn't like birds with long beaks"]
item $ s ["All woodpeckers have long beaks"]
item $ s ["A bird with a read beak is either a woodpecker or a cardinal"]
s ["One day, Bob finds a read-beaked bird"]
s ["He likes it very much and names it Aly"]
s ["To find out whether Aly is a Cardinal, we first have to transform these sentences into a ", firstOrderLogic, " ", knowledgeBase]
let x = "x"
bird = fn "Bird"
bob = "Bob"
likes = fn2 "Likes"
lb = fn "LongBeak"
rb = fn "RedBeak"
wp = fn "Woodpecker"
cd = fn "Cardinal"
aly = "Aly"
itemize $ do
item $ m $ neg $ bird bob
item $ m $ fa x $ bird x ∧ lb x ⇒ neg (bob `likes` x)
item $ m $ fa x $ bird x ∧ wp x ⇒ lb x
item $ m $ fa x $ bird x ∧ rb x ⇒ (wp x ∨ cd x)
item $ m $ bird aly
item $ m $ rb aly
item $ m $ bob `likes` aly
s ["Next, we apply ", propositionalisation, " to obtain sentences in propositonal logic"]
s ["Remember: quantified variables must be replaced by all possible constants"]
s ["However, for the sake of the example, we will not try to replace the quantified variables by ", m bob, " as all of the resulting sentences would evaluate to ", m true]
s ["A system without this knowledge would still have to try replacing the variables with ", m bob]
s ["Sentences without quantifiers stay the same"]
enumerate $ do
item $ m $ bird aly ∧ lb aly ⇒ neg (bob `likes` aly)
item $ m $ bird aly ∧ wp aly ⇒ lb aly
item $ m $ bird aly ∧ rb aly ⇒ (wp aly ∨ cd aly)
s ["Now we are left with the following propositional knowledge base"]
let sym = Literal . Symbol
s1 = Implies (And (sym "B(A)") (sym "LB(A)")) (Not (sym "L(Bob, A)"))
s2 = Implies (And (sym "B(A)") (sym "WP(A)")) (sym "LB(A)")
s3 = Implies (And (sym "B(A)") (sym "RB(A)")) (Or (sym "WP(A)") (sym "C(A)"))
s4 = sym "B(A)"
s5 = sym "RB(A)"
s6 = sym "L(Bob, A)"
query = sym "C(A)"
itemize $ do
item $ m $ renderSentence s1 --bird aly ∧ lb aly ⇒ neg (bob `likes` aly)
item $ m $ renderSentence s2
item $ m $ renderSentence s3
item $ m $ renderSentence s4
item $ m $ renderSentence s5
item $ m $ renderSentence s6
s ["The query is ", m $ renderSentence query]
s ["To answer it, we first have to tranfrom the sentences in the ", knowledgeBase, " to ", conjunctiveNormalForm]
renderTransformation s1
renderTransformation s2
renderTransformation s3
let kb = [s1, s2, s3, s4, s5, s6]
s ["Now we can solve this with resolution"]
proofUnsatisfiable 20.0 kb query
propositionalisationExampleCompany :: Note
propositionalisationExampleCompany = ex $ do
examq eth "Probabillistic Artificial Intelligence" "January 2013"
s ["James, Henry and David are working in a company"]
s ["We know that they hold the jobs of manager, programmer and engineer; but we don’t know which person has which job"]
itemize $ do
let james = "James"
henry = "Henry"
david = "David"
bor = fn2 "Borrowed"
mar = fn "Married"
job = fn2 "Job"
prog = "Programmer"
man = "Manager"
x = "x"
item $ do
s ["Generate a ", firstOrderLogic, " ", knowledgeBase, " considering the information below"]
itemize $ do
item $ do
s ["James has borrowed money from the programmer"]
ma $ bor james prog
item $ do
s ["The manager is married"]
ma $ fa x $ job x man ⇒ mar x
item $ do
s ["The Manager doesn’t like to borrow money from somebody else"]
newline
s ["There is no useful translation of this sentence"]
item $ do
s ["David is single"]
ma $ neg $ mar $ david
item $ do
s ["Prove, using resolution, that David is not the manager"]
s ["First we propositionalise the sentence with quantifiers"]
s [m $ fa x $ job x man ⇒ mar x, " becomes the conjunction of the following three sentences"]
itemize $ do
item $ m $ job david man ⇒ mar david
item $ m $ job henry man ⇒ mar henry
item $ m $ job james man ⇒ mar james
let kb =
[
"B(J,P)"
, Implies "Job(D, M)" "M(D)"
-- , Implies "Job(H, M)" "M(H)"
-- , Implies "Job(J, M)" "M(J)"
, Not "M(D)"
]
let query = Not "Job(D, M)"
s ["Now we can apply resolution to prove ", m $ renderSentence query]
s ["Two of the propositionalised sentences are omited for the sake of brevity"]
proofUnsatisfiable 3 kb query
herbrandTheorem :: Note
herbrandTheorem = thm $ do
s [herbrandsTheorem']
newline
s ["If a sentence in entailed by a first order logic knowledge base, then there exists a proof using only a finite subset of the propositionalized knowledge base"]
cite herbrandReference
liftedInferenceSS :: Note
liftedInferenceSS = subsubsection "Lifted inference" $ do
s [the, liftedInference', "s are a ", set, " of ", inference, "s in first order logic"]
s ["It consists of trying to infer sentences ", emph "without", " instantiating variables at all using propositional inference by lifting its inferences"]
de $ do
s [the, generalizedModusPonens', " is an ", inference, " in first order logic"]
s ["Let ", m (cs [pp 1, dotsc, pp n]), and, m (cs [p 1, dotsc, p n]), " be sentences in first order logic"]
s ["Let ", m t, " be a substitution and ", m (subst t q), " its application to ", m q]
s ["Suppose ", m (subst t (pp i) =: subst t (p i)), " holds"]
ma $ linf [cs [pp 1, dotsc, pp n], ((p 1) ∧ dotsb ∧ (p n)) ⇒ q] $ subst t q
thm $ do
s ["The generalized modus ponens is not ", complete]
noproof
s ["There also exists a lifted variant of resolution"]
todo "Describe this variant"
s ["It is ", sound, " and refutation-complete but not ", complete]
todo "define refutation-complete"
where
subst_ = "Subst"
subst = fn2 subst_
n = "n"
t = theta
i = "i"
q = "q"
p n = "p" !: n
pp n = "p'" !: n
| NorfairKing/the-notes | src/Logic/FirstOrderLogic.hs | gpl-2.0 | 17,782 | 15 | 28 | 5,142 | 4,301 | 2,124 | 2,177 | 341 | 1 |
{- |
Module : $Header$
Description : Interface to the Vampire theorem prover via MathServe.
Copyright : (c) Rene Wagner, Klaus Luettich, Rainer Grabbe,
Uni Bremen 2005-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : needs POSIX
Interface for the Vampire service, uses GUI.GenericATP.
See <http://spass.mpi-sb.mpg.de/> for details on SPASS.
-}
module SoftFOL.ProveVampire (vampire, vampireCMDLautomaticBatch) where
import Logic.Prover
import SoftFOL.Sign
import SoftFOL.Translate
import SoftFOL.MathServMapping
import SoftFOL.MathServParsing
import SoftFOL.ProverState
import qualified Common.AS_Annotation as AS_Anno
import qualified Common.Result as Result
import Common.ProofTree
import Control.Monad (when)
import qualified Control.Concurrent as Concurrent
import qualified Control.Exception as Exception
import GUI.GenericATP
import Interfaces.GenericATPState
import Proofs.BatchProcessing
-- * Prover implementation
{- |
The Prover implementation. First runs the batch prover (with graphical
feedback), then starts the GUI prover.
-}
vampire :: Prover Sign Sentence SoftFOLMorphism () ProofTree
vampire = mkAutomaticProver "Vampire" () vampireGUI vampireCMDLautomaticBatch
vampireHelpText :: String
vampireHelpText =
"No help yet available.\n" ++
"email [email protected] " ++
"for more information.\n"
-- * Main prover functions
-- ** Utility functions
{- |
Record for prover specific functions. This is used by both GUI and command
line interface.
-}
atpFun :: String -- ^ theory name
-> ATPFunctions Sign Sentence SoftFOLMorphism ProofTree SoftFOLProverState
atpFun thName = ATPFunctions
{ initialProverState = spassProverState,
atpTransSenName = transSenName,
atpInsertSentence = insertSentenceGen,
goalOutput = showTPTPProblem thName,
proverHelpText = vampireHelpText,
batchTimeEnv = "HETS_SPASS_BATCH_TIME_LIMIT",
fileExtensions = FileExtensions {problemOutput = ".tptp",
proverOutput = ".vamp",
theoryConfiguration = ".spcf"},
runProver = runVampire,
createProverOptions = extraOpts}
-- ** GUI
{- |
Invokes the generic prover GUI. SPASS specific functions are omitted by
data type ATPFunctions.
-}
vampireGUI :: String -- ^ theory name
-> Theory Sign Sentence ProofTree
{- ^ theory consisting of a SoftFOL.Sign.Sign
and a list of Named SoftFOL.Sign.Sentence -}
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO [ProofStatus ProofTree] -- ^ proof status for each goal
vampireGUI thName th freedefs =
genericATPgui (atpFun thName) True (proverName vampire) thName th
freedefs emptyProofTree
-- ** command line function
{- |
Implementation of 'Logic.Prover.proveCMDLautomaticBatch' which provides an
automatic command line interface to the Vampire prover via MathServe.
Vampire specific functions are omitted by data type ATPFunctions.
-}
vampireCMDLautomaticBatch ::
Bool -- ^ True means include proved theorems
-> Bool -- ^ True means save problem file
-> Concurrent.MVar (Result.Result [ProofStatus ProofTree])
-- ^ used to store the result of the batch run
-> String -- ^ theory name
-> TacticScript -- ^ default tactic script
-> Theory Sign Sentence ProofTree {- ^ theory consisting of a
'SoftFOL.Sign.Sign' and a list of Named 'SoftFOL.Sign.Sentence' -}
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO (Concurrent.ThreadId, Concurrent.MVar ())
{- ^ fst: identifier of the batch thread for killing it
snd: MVar to wait for the end of the thread -}
vampireCMDLautomaticBatch inclProvedThs saveProblem_batch resultMVar
thName defTS th freedefs =
genericCMDLautomaticBatch (atpFun thName) inclProvedThs saveProblem_batch
resultMVar (proverName vampire) thName
(parseTacticScript batchTimeLimit [] defTS) th freedefs emptyProofTree
{- |
Runs the Vampire service.
-}
runVampire :: SoftFOLProverState
{- ^ logical part containing the input Sign and axioms and possibly
goals that have been proved earlier as additional axioms -}
-> GenericConfig ProofTree -- ^ configuration to use
-> Bool -- ^ True means save TPTP file
-> String -- ^ name of the theory in the DevGraph
-> AS_Anno.Named SPTerm -- ^ goal to prove
-> IO (ATPRetval, GenericConfig ProofTree)
-- ^ (retval, configuration with proof status and complete output)
runVampire sps cfg saveTPTP thName nGoal =
Exception.catch (do
prob <- showTPTPProblem thName sps nGoal $ extraOpts cfg ++
["Requested prover: Vampire"]
when saveTPTP
(writeFile (thName ++ '_' : AS_Anno.senAttr nGoal ++ ".tptp") prob)
mathServOut <- callMathServ
MathServCall { mathServService = VampireService,
mathServOperation = TPTPProblem,
problem = prob,
proverTimeLimit = configTimeLimit cfg,
extraOptions = Just $ unwords $ extraOpts cfg}
msResponse <- parseMathServOut mathServOut
return (mapMathServResponse (getAxioms sps) msResponse cfg nGoal
$ proverName vampire))
$ excepToATPResult (proverName vampire) $ AS_Anno.senAttr nGoal
| nevrenato/HetsAlloy | SoftFOL/ProveVampire.hs | gpl-2.0 | 5,647 | 0 | 18 | 1,373 | 777 | 427 | 350 | 80 | 1 |
module PigLatin (plugin) where
-- This plugin converts a page to pig latin if the 'language' metadata
-- field is set to 'pig latin'. This demonstrates how to get access to
-- metadata in a plugin.
import Network.Gitit.Interface
import Data.Char (toLower, toUpper, isLower, isUpper, isLetter)
plugin :: Plugin
plugin = PageTransform $ \doc -> do
meta <- askMeta
case lookup "language" meta of
Just s | map toLower s == "pig latin" ->
return $ processWith pigLatinStr doc
_ -> return doc
pigLatinStr :: Inline -> Inline
pigLatinStr (Str "") = Str ""
pigLatinStr (Str (c:cs)) | isLower c && isConsonant c =
Str (cs ++ (c : "ay"))
pigLatinStr (Str (c:cs)) | isUpper c && isConsonant c =
Str (capitalize cs ++ (toLower c : "ay"))
pigLatinStr (Str x@(c:cs)) | isLetter c = Str (x ++ "yay")
pigLatinStr x = x
isConsonant c = c `notElem` "aeiouAEIOU"
capitalize :: String -> String
capitalize "" = ""
capitalize (c:cs) = toUpper c : cs
| tphyahoo/gititpt | plugins/PigLatin.hs | gpl-2.0 | 973 | 0 | 16 | 208 | 364 | 184 | 180 | 22 | 2 |
{-|
Module : Config
Description : Functions for handling Config files
Copyright : (c) Jason Mittertreiner, 2015
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : Unix
This module contains various functions for reading and handling config files
-}
module Config where
import Utils
import System.IO
import System.IO.Error
import Control.Exception
import Data.Monoid
-- | The list of possible locations for the config file
files = ["/etc/shush/shush.conf"]
-- | Key type
type Key = String
-- | Value type
type Value = String
-- | A Config is a list of String String tuples
type Config = [(Key,Value)]
-- | Returns the Value of a Config for a given Key
getValue :: Config -> Key -> Value
getValue config key =
let match = filter (\(x,_) -> key == x) config in
(if null match then "" else (snd.head) match)
-- | Returns a default config
defaultConfig :: Config
defaultConfig = [("http_version","1.0"),("http_path","/var/www/html")]
-- | Given a File name, returns a Config
parseConfigFiles :: IO Config
parseConfigFiles = do
configs <- getConfigs
case getFirst . mconcat . map First $ configs of
Just a -> return a
Nothing -> do
hPutStrLn stderr "WARNING: No config found, using defaults"
return defaultConfig
-- | Returns a list of possible config files
getConfigs :: IO [Maybe Config]
getConfigs = mapM parseConfigFile files
-- | Given a file name, parses it into a config
parseConfigFile :: Filename -> IO (Maybe Config)
parseConfigFile file = do
text <- tryIOError (readFile file)
case text of
Right config -> do
hPutStrLn stderr $ "Loaded config: " ++ file
return $ Just $ (parseLines.lines) config
Left err -> return Nothing
-- | Helper method to parse a lines into a Config
parseLines :: [String] -> Config
parseLines ls = map parseLine (filter (\x -> (not.null) x && (head x /= '#')) ls)
| jmittert/shush | src/Config.hs | gpl-3.0 | 1,960 | 0 | 15 | 433 | 445 | 237 | 208 | 36 | 2 |
module Main where
import Prelude hiding (Left, Right)
import Control.Monad
import Data.List
import Data.Tree
import System.Directory
import System.Environment
import System.Exit
import System.IO
import Data.Board
import Data.WayTree
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
boardNames <- getArgs
existingBoardNames <- filterM doesFileExist boardNames
hPrint stderr existingBoardNames
mapM solve existingBoardNames
return ()
solve :: FilePath -> IO ()
solve fp = do
putStrLn fp
thePath <- liftM (toList . cut . growTree) $ parseFile fp
mapM (putStrLn . (\ (y,x) -> show x ++ "," ++ show y)) thePath
return ()
parseFile :: FilePath -> IO Board
parseFile fp = liftM (parseBoard . lines) $ readFile fp
parseBoard :: [String] -> Board
parseBoard [[]] = []
parseBoard (l:[]) = [parseLine l]
parseBoard (l:ls) = [parseLine l] ++ parseBoard ls
parseLine l = map parseTile l
parseTile t = case t of
'S' -> Start
'E' -> Exit
'X' -> Wall
' ' -> Way
growTree :: Board -> WayTree
growTree b = Node (Walkable (findStart b) Start) (map (growTree' b (findStart b)) [Up, Right, Down, Left] )
where
findStart b = head $ [(x,y) | x <- [0..length b], y <- [0..length (b!!x)], b!!x!!y == Start]
growTree' b (x,y) dir | x' < 0 || y' < 0 = Node (NotWalkable (x',y')) []
| b!!x'!!y' == Wall = Node (NotWalkable (x',y')) []
| otherwise = Node (Walkable (x',y') (b!!x'!!y')) (map (growTree' b (x',y')) [Up, Right, Down, Left] )
where (x',y') = (x,y) +: dir
cut :: WayTree -> WayTree
cut (Node (Walkable c Start) dirs) = cut'' (Node (Walkable c Start) (map (cut' [c]) dirs))
where
cut' cs (Node (NotWalkable (x,y)) dirs) = Node (NotWalkable (x,y)) []
cut' cs (Node (Walkable (x,y) Exit) _) = Node (Walkable (x,y) Exit) []
cut' cs (Node (Walkable (x,y) t) dirs) | x < 0 || y < 0 = Node { rootLabel = NotWalkable (x,y)
, subForest = []
}
| (x,y) `elem` cs = Node { rootLabel = NotWalkable (x,y)
, subForest = []
}
| otherwise = Node { rootLabel = Walkable (x,y) t
, subForest = map (cut' ((x,y):cs)) dirs -- $ filter reachesExit dirs
}
cut'' (Node (Walkable (x,y) t) dirs) = Node { rootLabel = Walkable (x,y) t
, subForest = filter reachesExit dirs
}
cut'' t = t
reachesExit :: WayTree -> Bool
reachesExit (Node (Walkable _ Exit) _) = True
reachesExit (Node (NotWalkable _) _) = False
reachesExit (Node (Walkable _ _) dirs) = any reachesExit dirs
toList :: WayTree -> [Coords]
toList (Node (NotWalkable c) _) = []
toList (Node (Walkable c Exit) _) = [c]
toList (Node (Walkable c t) dirs) = c:toList ((maximumBy depthOrd) . filter reachesExit $ filter isWalkable dirs)
depthOrd :: Tree a -> Tree a -> Ordering
depthOrd l r | depth l < depth r = LT
| depth l == depth r = EQ
| otherwise = GT
depth :: Tree a -> Int
depth (Node _ []) = 0
depth (Node _ ts) = 1 + maximum (map depth ts)
isWalkable :: WayTree -> Bool
isWalkable (Node (NotWalkable _) _) = False
isWalkable (Node (Walkable _ _) _) = True
| NobbZ/pathfinder | Main.hs | gpl-3.0 | 3,742 | 0 | 15 | 1,380 | 1,551 | 799 | 752 | 75 | 4 |
module Language.Dockerfile.Parser where
import Control.Monad (void)
import Data.ByteString.Char8 (pack)
import Data.String
import Text.Parsec hiding (label)
import Text.Parsec.String (Parser)
import qualified Text.Parsec.Token as Token
import Language.Dockerfile.Lexer
import Language.Dockerfile.Normalize
import Language.Dockerfile.Syntax
comment :: Parser Instruction
comment = do
void $ char '#'
text <- untilEol
return $ Comment text
taggedImage :: Parser BaseImage
taggedImage = do
name <- untilOccurrence ":\n"
void $ oneOf ":"
tag <- untilEol
return $ TaggedImage name tag
digestedImage :: Parser BaseImage
digestedImage = do
name <- untilOccurrence "@\n"
void $ oneOf "@"
digest <- untilEol
return $ DigestedImage name (pack digest)
untaggedImage :: Parser BaseImage
untaggedImage = do
name <- many (noneOf "\n")
return $ UntaggedImage name
baseImage :: Parser BaseImage
baseImage = try taggedImage
<|> try digestedImage
<|> try untaggedImage
from :: Parser Instruction
from = do
reserved "FROM"
image <- baseImage
return $ From image
cmd :: Parser Instruction
cmd = do
reserved "CMD"
args <- arguments
return $ Cmd args
copy :: Parser Instruction
copy = do
reserved "COPY"
src <- many (noneOf " ")
Token.whiteSpace lexer
dst <- many (noneOf "\n")
return $ Copy src dst
stopsignal :: Parser Instruction
stopsignal = do
reserved "STOPSIGNAL"
args <- many (noneOf "\n")
return $ Stopsignal args
-- We cannot use string literal because it swallows space
-- and therefore have to implement quoted values by ourselves
quotedValue:: Parser String
quotedValue = do
void $ char '"'
literal <- untilOccurrence "\""
void $ char '"'
return literal
rawValue :: Parser String
rawValue = many1 (noneOf [' ','=','\n'])
singleValue :: Parser String
singleValue = try quotedValue <|> try rawValue
pair :: Parser (String, String)
pair = do
key <- rawValue
void $ oneOf "= "
value <- singleValue
return (key, value)
pairs :: Parser Pairs
pairs = do
first <- pair
next <- remainingPairs
return (first:next)
remainingPairs :: Parser Pairs
remainingPairs =
try (char ' ' >> pairs)
<|> try (return [])
label :: Parser Instruction
label = do
reserved "LABEL"
p <- pairs
return $ Label p
arg :: Parser Instruction
arg = do
reserved "ARG"
p <- untilEol
return $ Arg p
env :: Parser Instruction
env = do
reserved "ENV"
p <- pairs
return $ Env p
user :: Parser Instruction
user = do
reserved "USER"
username <- untilEol
return $ User username
add :: Parser Instruction
add = do
reserved "ADD"
src <- untilOccurrence " "
Token.whiteSpace lexer
dst <- untilOccurrence "\n"
return $ Add src dst
expose :: Parser Instruction
expose = do
reserved "EXPOSE"
sports <- untilEol
let port = fromString sports
return $ Expose port
run :: Parser Instruction
run = do
reserved "RUN"
c <- arguments
return $ Run c
-- Parse value until end of line is reached
untilEol :: Parser String
untilEol = many (noneOf "\n")
untilOccurrence :: String -> Parser String
untilOccurrence t = many $ noneOf t
workdir :: Parser Instruction
workdir = do
reserved "WORKDIR"
directory <- many (noneOf "\n")
return $ Workdir directory
volume :: Parser Instruction
volume = do
reserved "VOLUME"
directory <- many (noneOf "\n")
return $ Volume directory
maintainer :: Parser Instruction
maintainer = do
reserved "MAINTAINER"
name <- untilEol
return $ Maintainer name
-- Parse arguments of a command in the exec form
argumentsExec :: Parser Arguments
argumentsExec = brackets $ commaSep stringLiteral
-- Parse arguments of a command in the shell form
argumentsShell :: Parser Arguments
argumentsShell = do
args <- untilEol
return $ words args
arguments :: Parser Arguments
arguments = try argumentsExec <|> try argumentsShell
entrypoint :: Parser Instruction
entrypoint = do
reserved "ENTRYPOINT"
args <- arguments
return $ Entrypoint args
onbuild :: Parser Instruction
onbuild = do
reserved "ONBUILD"
i <- parseInstruction
return $ OnBuild i
eolInstruction :: Parser Instruction
eolInstruction = do
eol
return EOL
parseInstruction :: Parser Instruction
parseInstruction
= try onbuild
<|> try from
<|> try copy
<|> try run
<|> try workdir
<|> try entrypoint
<|> try volume
<|> try expose
<|> try env
<|> try arg
<|> try user
<|> try label
<|> try stopsignal
<|> try cmd
<|> try maintainer
<|> try add
<|> try comment
<|> try eolInstruction
contents :: Parser a -> Parser a
contents p = do
Token.whiteSpace lexer
r <- p
eof
return r
eol :: Parser ()
eol = void $ char '\n' <|> (char '\r' >> option '\n' (char '\n'))
dockerfile :: Parser Dockerfile
dockerfile = many $ do
-- deal with empty lines that only contain spaces or tabs
-- skipMany space
-- skipMany $ char '\t'
pos <- getPosition
i <- parseInstruction
optional eol
-- skipMany eol
return $ InstructionPos i (sourceName pos) (sourceLine pos)
parseString :: String -> Either ParseError Dockerfile
parseString s = parse (contents dockerfile) "<string>" $ normalizeEscapedLines s
parseFile :: String -> IO (Either ParseError Dockerfile)
parseFile file = do
program <- readFile file
return $ parse (contents dockerfile) file $ normalizeEscapedLines program
| beijaflor-io/haskell-language-dockerfile | src/Language/Dockerfile/Parser.hs | gpl-3.0 | 5,563 | 0 | 22 | 1,316 | 1,761 | 822 | 939 | 201 | 1 |
area d = pi * (r * r)
where r = d / 2
| dkensinger/haskell | haskellbook/test.hs | gpl-3.0 | 41 | 0 | 7 | 17 | 31 | 16 | 15 | 2 | 1 |
{-
Âîñïîëüçîâàâøèñü ôóíêöèÿìè map è concatMap, îïðåäåëèòå ôóíêöèþ perms, êîòîðàÿ âîçâðàùàåò âñå ïåðåñòàíîâêè, êîòîðûå ìîæíî ïîëó÷èòü èç äàííîãî ñïèñêà, â ëþáîì ïîðÿäêå.
GHCi> perms [1,2,3]
[[1,2,3],[1,3,2],[2,1,3],[2,3,1],[3,1,2],[3,2,1]]
Ñ÷èòàéòå, ÷òî âñå ýëåìåíòû â ñïèñêå óíèêàëüíû, è ÷òî äëÿ ïóñòîãî ñïèñêà èìååòñÿ îäíà ïåðåñòàíîâêà.
-}
module Demo where
perms :: [a] -> [[a]]
perms [] = [[]]
perms (x:xs) = [y | p <- perms xs, y <- interleave p]
where
interleave [] = [[x]]
interleave (y:ys) = (x:y:ys) : map (y:) (interleave ys)
-- http://stackoverflow.com/a/24564307/2289640 | devtype-blogspot-com/Haskell-Examples | Perms/Demo.hs | gpl-3.0 | 604 | 0 | 10 | 105 | 142 | 78 | 64 | 6 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Data.FNLP.Common
( Corpus
, corpus
, UBlock
, ublock
, UBlocks (uBlockList)
, TriGram
, trigram
, TriGrams (triGramList)
) where
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Set as S
import Data.CharSet.Unicode.Block (Block(..), blocks)
import Data.CharSet (member)
import Data.Char (isAlpha, toLower)
import Data.FNLP
----------------------------------------------------------------------
newtype Corpus = Corpus Text
deriving (Show, Read, Eq, Ord)
corpus = Corpus
instance Convertible Corpus Text where
safeConvert (Corpus text) = Right text
instance Convertible Text Corpus where
safeConvert = Right . corpus
----------------------------------------------------------------------
-- Unicode Blocks
----------------------------------------------------------------------
blocksUsed :: Char -> [String]
blocksUsed c =
if isAlpha c
then (fmap blockName
. filter (\b -> member c (blockCharSet b))) (blocks)
else []
data UBlock = UBlock String deriving (Show, Read, Eq, Ord)
ublock = UBlock
newtype UBlocks = UBlocks { uBlockList :: [UBlock] }
instance PState Corpus UBlocks PClosed
instance AutoLink Corpus UBlocks where
linkstep = UBlocks
. fmap UBlock
. foldr (\s -> (++) (blocksUsed s)) []
. T.unpack
. convert
----------------------------------------------------------------------
-- TriGrams
----------------------------------------------------------------------
newtype TriGram = TriGram Text deriving (Show, Read, Eq, Ord)
trigram = TriGram
instance Convertible Text TriGram where
safeConvert text = if T.length text == 3
then Right (TriGram text)
else convError "Not Three Characters" text
instance Convertible TriGram Text where
safeConvert (TriGram text) = Right text
newtype TriGrams = TriGrams { triGramList :: [TriGram] }
instance PState Corpus TriGrams PClosed
instance AutoLink Corpus TriGrams where
linkstep = TriGrams . concat . map trigrams . prepWords . convert
newtype PrepWord = PrepWord Text
trigrams :: PrepWord -> [TriGram]
trigrams (PrepWord w) =
if T.compareLength w 3 == LT
then []
else TriGram (T.take 3 w) : trigrams (PrepWord (T.drop 1 w))
prepWords :: Text -> [PrepWord]
prepWords = map PrepWord
. map ends
. map (T.map toLower)
. map (T.filter ok)
. T.words
where ok :: Char -> Bool
ok c = isAlpha c || c == '\'' || c == '-'
ends :: Text -> Text
ends w = T.cons '^' (T.snoc w '$')
| RoboNickBot/fnlp | src/Data/FNLP/Common.hs | gpl-3.0 | 2,675 | 0 | 14 | 621 | 788 | 430 | 358 | 73 | 2 |
{-# OPTIONS -cpp #-}
module MD5Compare (plugin) where
import Data.ByteString.Lazy as BSL
import Data.ByteString as BS
import Crypto.Hash.MD5
import System.IO
import PluginAPI
import Control.Monad
import Data.Dynamic
import Strings as S
plugin = PluginI {
magic = None,
pluginName = "MD5 Compare",
getFileRepresentation = (liftM $ S.b2s) . hashFile,
displayFile = printFileName
}
hashFile :: FilePath -> IO BS.ByteString
hashFile = liftM hashlazy . BSL.readFile
compareMd5 :: FilePath -> FilePath -> IO Bool
compareMd5 fp1 fp2 = liftM2 (==) (hashFile fp1) (hashFile fp2)
printFileName :: FilePath -> IO ()
printFileName fp = putStrLn (show fp)
| glueckself/mhaskell-ss14 | MD5ComparePlugin.hs | gpl-3.0 | 665 | 0 | 10 | 116 | 204 | 117 | 87 | 21 | 1 |
{-# LANGUAGE ExistentialQuantification, TemplateHaskell, TypeSynonymInstances, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module HEphem.Data where
import Data.Angle
import Test.QuickCheck
import Data.Vector.V3
import Control.Lens hiding (element)
import Text.Printf
import Data.Vector.Class
import Data.Fixed (mod', div')
type Deg = Degrees Double
instance Arbitrary Deg where
arbitrary =
do
d <- suchThat arbitrary (\x -> x >= 0 && x <= 360)
return $ Degrees d
type Interval = (Deg, Deg)
standardizeDeg:: Deg -> Deg
standardizeDeg (Degrees d) = Degrees $ d `mod'` 360
undeg:: Deg -> Double
undeg (Degrees s) = s
toMinutesSeconds :: Deg -> (Int, Int, Int)
toMinutesSeconds (Degrees d) = (i, m, s)
where
i = floor d
r = d - fromIntegral i
m = r `div'` (1 / 60)
r' = r - fromIntegral m * (1 / 60)
s = r' `div'` (1 / 3600)
printDeg :: Deg -> String
printDeg deg = printf "%d\x00B0 %d\"%d'" d m s
where
(d, m, s) = toMinutesSeconds deg
printDegAsTime :: Deg -> String
printDegAsTime deg = printf "%dh %dm%ds" d m s
where
(d, m, s) = toMinutesSeconds (deg/15)
data EqPos = EqPos { eRA,eDec :: Deg }
deriving (Eq, Show)
instance Arbitrary EqPos where
arbitrary = do
ra <- suchThat arbitrary (\x -> x >= 0 && x <= 360)
d <- suchThat arbitrary (\x -> x >= -89 && x < 89)
return $ EqPos (Degrees ra) (Degrees d)
data HorPos = HorPos { _hAzimuth,_hAltitude :: Deg }
deriving (Eq, Show)
instance Arbitrary HorPos where
arbitrary = do
az <- suchThat arbitrary (\x -> x >= 0 && x <= 360)
al <- suchThat arbitrary (\x -> x >= 0 && x <= 90)
return $ HorPos (Degrees az) (Degrees al)
makeLenses ''HorPos
instance Ord HorPos where
compare a b = compare (a ^. hAltitude) (b ^. hAltitude)
data GeoLoc = GeoLoc { _gLatitude,_gLongitude :: Deg }
deriving (Eq, Show)
instance Arbitrary GeoLoc where
arbitrary = do
az <- suchThat arbitrary (\x -> x >= -90 && x <= 90)
al <- suchThat arbitrary (\x -> x >= -180 && x <= 180)
return $ GeoLoc (Degrees az) (Degrees al)
makeLenses ''GeoLoc
data BrightStar = BrightStar
{ bFlamsteed::Maybe Int
, bBayer::String
, bConst::String
, bHRNo :: Int
, bRA :: Deg
, bDec :: Deg
, bNotes :: String
, bMag :: Float
, bUminB :: Maybe Float
, bBminV :: Float
, bSpectralType :: String
}
deriving (Show,Eq)
data NGCObject = NGCObject
{ nID :: String
, nPGC:: String
, nMessier:: String
, nType:: String
, nClass:: String
, nRA :: Deg
, nDec :: Deg
, nMag :: Float
} deriving (Show,Eq)
data SkyObject = NGC NGCObject|Star BrightStar deriving (Show)
instance Eq SkyObject where
Star a == Star b = a == b
NGC a == NGC b = a == b
_ == _ = False
equatorial:: SkyObject -> EqPos
equatorial (Star (BrightStar _ _ _ _ r d _ _ _ _ _)) = EqPos r d
equatorial (NGC (NGCObject _ _ _ _ _ r d _ )) = EqPos r d
magnitude:: SkyObject -> Float
magnitude (Star a) = bMag a
magnitude (NGC a) = nMag a
description :: SkyObject -> String
description (Star(BrightStar f b c hr _ _ _ m _ _ _)) =
case f of Nothing -> if b == "" then if c == "" then printf "HR# %v Mag %.1f" hr m
else printf "%s HR# %v Mag %.1f" c hr m
else printf "%s %s HR# %v Mag %.1f" b c hr m
Just fl -> printf "%d %s %s HR# %v Mag %.1f" fl b c hr m
description (NGC (NGCObject i _ me t _ _ _ m )) =
if me == "" then printf "%s Type %s Mag %.1f" i t m
else printf "%s %s Type %s Mag %.1f" me i t m
data Rectangle = Rectangle{
_rAzimuth::Interval,
_rAltitude::Interval
} deriving (Show)
makeLenses ''Rectangle
instance Arbitrary Rectangle where
arbitrary =
do
az0 <- suchThat arbitrary (\x -> x >= 0 && x < 360)
az1 <- suchThat arbitrary (\x -> x >= 0 && x < 360)
al0 <- suchThat arbitrary (\x -> x >= 0 && x < 90)
al1 <- suchThat arbitrary (\x -> x >= al0 && x < 90)
return $ Rectangle (az0, az1) (al0, al1)
class AEq a where
(=~) :: a -> a -> Bool
instance AEq Double where -- for solveTrigonom is it so big
x =~ y = abs (x - y) < (1.0e-2 :: Double)
instance AEq Deg where
x =~ y = abs (x - y) < (1.0e-4 :: Deg)
instance AEq HorPos where
(HorPos x y) =~ (HorPos x' y') =
vmag (cartesian (HorPos x y, 1) - cartesian (HorPos x' y', 1)) < d
where
-- has too be so big for manual test data
d = 1e-2
instance AEq EqPos where
(EqPos x y) =~ (EqPos x' y') =
vmag (cartesian (HorPos x y, 1) - cartesian (HorPos x' y', 1)) < d
where
d = 1e-4
instance AEq Vector3 where
v =~ w = vmag (v - w) < d
where
d = 1.0e-8 :: Double
instance AEq Float where
x =~ y = abs (x - y) < (1.0e-4 :: Float)
instance (AEq a) => AEq (Radians a) where
(Radians x) =~ (Radians y) = x =~ y
{--| Given cos A and sin A solve A --}
solveAngle :: Double -> Double -> Radians Double
solveAngle c s = solveAngle' (cutoff c) (cutoff s)
solveAngle' :: Double -> Double -> Radians Double
solveAngle' c s
| s > 0 = arccosine c
| c > 0 = Radians (2 * pi) + arcsine s
| otherwise = Radians (2 * pi) - arccosine c
cutoff :: Double -> Double
cutoff d | d < -1 = -1
| d > 1 = 1
| otherwise = d
cartesian :: (HorPos, Double) -> Vector3
cartesian (HorPos az al, r) = Vector3
{ v3x = r * sine incl * cosine az
, v3y = r * sine incl * sine az
, v3z = r * cosine incl
}
where
incl = Degrees 90 - al
polair :: Vector3 -> (HorPos, Double)
polair v = (HorPos (degrees (solveAngle cosfi sinfi)) al, r)
where
r = vmag v
incl = arccosine (v3z v/r)
cosfi = v3x v/ sqrt (v3x v * v3x v + v3y v * v3y v)
sinfi = v3y v/ sqrt (v3x v * v3x v + v3y v * v3y v)
al = 90 - incl
| slspeek/hephem | src/HEphem/Data.hs | gpl-3.0 | 5,980 | 0 | 15 | 1,792 | 2,503 | 1,301 | 1,202 | 160 | 5 |
{-# LANGUAGE CPP #-}
{-|
Module : CmdArgs
Description : Command line options of the voogie executable.
Copyright : (c) Evgenii Kotelnikov, 2019
License : GPL-3
Maintainer : [email protected]
Stability : provisional
-}
module CmdArgs (
Action(..),
CmdArgs(..),
cmdArgsParserInfo
) where
import Control.Applicative ((<|>))
import Options.Applicative (
Parser, ParserInfo, ParseError(..),
headerDoc, fullDesc, long, short, info, helper, help, hidden, switch, value,
maybeReader, metavar, (<**>), option, abortOption, strArgument, flag'
)
import Options.Applicative.Help.Pretty (Doc, vsep, text)
#if !MIN_VERSION_base(4, 11, 0)
import Data.Semigroup ((<>))
#endif
import Paths_voogie (version)
import Data.Version (showVersion)
data Action
= Parse
| Check
| Translate
deriving (Show, Eq, Ord, Enum, Bounded)
actions :: [(String, Action)]
actions = [("parse", Parse), ("check", Check), ("translate", Translate)]
data CmdArgs = CmdArgs {
filePath :: Maybe FilePath,
action :: Action,
noArrayTheory :: Bool
} deriving (Show, Eq, Ord)
parser :: Parser CmdArgs
parser = CmdArgs
<$> filePathOption
<*> actionOption
<*> noArrayTheoryOption
where
filePathOption = file <|> stdIn
file = Just <$> strArgument (metavar "FILE")
stdIn = flag' Nothing
$ long "stdin"
<> help "Read from the standart input rather than a file"
actionOption = option (maybeReader $ \s -> lookup s actions)
$ long "action"
<> metavar "ACTION"
<> value Translate
<> help ("Action to perform, can be one of the following: " ++
"parse, check, translate (default)")
noArrayTheoryOption = switch
$ long "no_array_theory"
<> help "Do not use polymorhic theory of arrays"
banner :: Doc
banner = vsep $ fmap text [
" _ ",
" __ _____ ___ __ _(_) ___ ",
" \\ \\ / / _ \\ / _ \\ / _` | |/ _ \\",
" \\ V / (_) | (_) | (_| | | __/",
" \\_/ \\___/ \\___/ \\__, |_|\\___|",
" |___/ ",
"",
"Voogie - a verification conditions generator for simple Boogie programs"
]
cmdArgsParserInfo :: ParserInfo CmdArgs
cmdArgsParserInfo = info (parser <**> versionOption <**> helper)
$ fullDesc
<> headerDoc (Just banner)
where
versionOption = abortOption (InfoMsg $ showVersion version)
$ long "version"
<> short 'v'
<> help "Display the version number"
<> hidden
| aztek/voogie | src/Voogie/Executable/CmdArgs.hs | gpl-3.0 | 2,512 | 0 | 15 | 643 | 574 | 329 | 245 | 64 | 1 |
{-
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
module FormatSpec (spec) where
import Data.Ratio ((%))
import Test.Hspec (Spec, context, describe, it, shouldBe)
import Mtlstats.Format
import Mtlstats.Types
spec :: Spec
spec = describe "Mtlstats.Format" $ do
padNumSpec
leftSpec
rightSpec
centreSpec
padRightSpec
overlaySpec
monthSpec
labelTableSpec
numTableSpec
tableWithSpec
complexTableSpec
overlayLastSpec
showFloatingSpec
padNumSpec :: Spec
padNumSpec = describe "padNum" $ do
context "zero, four digits" $
it "should be 0000" $
padNum 4 0 `shouldBe` "0000"
context "123, four digits" $
it "should be 0123" $
padNum 4 123 `shouldBe` "0123"
context "12345, four digits" $
it "should be 12345" $
padNum 4 12345 `shouldBe` "12345"
context "-12, four digits" $
it "should be -012" $
padNum 4 (-12) `shouldBe` "-012"
context "-1234, four digits" $
it "should be -1234" $
padNum 4 (-1234) `shouldBe` "-1234"
leftSpec :: Spec
leftSpec = describe "left" $ do
context "fit" $
it "should pad the text" $
left 5 "foo" `shouldBe` "foo "
context "overflow" $
it "should truncate the text" $
left 2 "foo" `shouldBe` "fo"
rightSpec :: Spec
rightSpec = describe "right" $ do
context "fit" $
it "should pad the text" $
right 5 "foo" `shouldBe` " foo"
context "overflow" $
it "should truncate the text" $
right 2 "foo" `shouldBe` "oo"
centreSpec :: Spec
centreSpec = describe "centre" $ do
context "fit" $
it "should pad the text" $
centre 5 "foo" `shouldBe` " foo "
context "overflow" $
it "should truncate the text" $
centre 2 "foo" `shouldBe` "fo"
padRightSpec :: Spec
padRightSpec = describe "padRight" $ mapM_
(\(label, width, str, expected) -> context label $
it ("should be " ++ show expected) $
padRight width str `shouldBe` expected)
-- label, width, input string, expected
[ ( "text shorter", 5, "foo", "foo " )
, ( "text longer", 3, "foobar", "foobar" )
]
overlaySpec :: Spec
overlaySpec = describe "overlay" $ do
context "first string shorter" $
it "should overlay" $
overlay "foo" "abc123" `shouldBe` "foo123"
context "first string longer" $
it "should overlay" $
overlay "abc123" "foo" `shouldBe` "abc123"
monthSpec :: Spec
monthSpec = describe "month" $ do
context "January" $
it "should return \"JAN\"" $
month 1 `shouldBe` "JAN"
context "invalid" $
it "should return an empty string" $
month 0 `shouldBe` ""
labelTableSpec :: Spec
labelTableSpec = describe "labelTable" $
it "should format the table" $ let
input =
[ ( "foo", "bar" )
, ( "baz", "quux" )
, ( "longer", "x" )
]
expected =
[ " foo: bar "
, " baz: quux"
, "longer: x "
]
in labelTable input `shouldBe` expected
numTableSpec :: Spec
numTableSpec = describe "numTable" $
it "should format the table" $ let
headers = ["foo", "bar", "baz"]
rows =
[ ( "quux", [ 1, 2, 3 ] )
, ( "xyzzy", [ 9, 99, 999 ] )
]
expected =
[ " foo bar baz"
, " quux 1 2 3"
, "xyzzy 9 99 999"
]
in numTable headers rows `shouldBe` expected
tableWithSpec :: Spec
tableWithSpec = describe "tableWith" $ let
vals =
[ [ "foo", "bar", "baz" ]
, [ "quux", "xyzzy", "x" ]
]
in mapM_
(\(label, func, expected) -> context label $
it "should format the table" $
tableWith func vals `shouldBe` expected)
[ ( "align left"
, left
, [ "foo bar baz"
, "quux xyzzy x "
]
)
, ( "align right"
, right
, [ " foo bar baz"
, "quux xyzzy x"
]
)
]
complexTableSpec :: Spec
complexTableSpec = describe "complexTable" $ mapM_
(\(label, pFuncs, cells, expected) -> context label $
it "should format correctly" $
complexTable pFuncs cells `shouldBe` expected)
[ ( "no fill"
, [left, right]
, [ [ CellText "foo", CellText "bar" ]
, [ CellText "baaz", CellText "quux" ]
]
, [ "foo bar"
, "baaz quux"
]
)
, ( "with fill"
, [left, left, left]
, [ [ CellText "foo", CellText "bar", CellText "baz" ]
, [ CellText "quux", CellFill '-', CellFill '@' ]
]
, [ "foo bar baz"
, "quux ----@@@"
]
)
]
overlayLastSpec :: Spec
overlayLastSpec = describe "overlayLast" $ let
text = "foo"
sample =
[ "line 1"
, "line 2"
]
edited =
[ "line 1"
, "fooe 2"
]
in mapM_
(\(label, input, expected) -> context label $
it ("should be " ++ show expected) $
overlayLast text input `shouldBe` expected)
-- label, input, expected
[ ( "empty list", [], [] )
, ( "non-empty list", sample, edited )
]
showFloatingSpec :: Spec
showFloatingSpec = describe "showFloating" $ let
input = 3 % 2 :: Rational
expected = "1.50"
in it ("should be " ++ expected) $
showFloating input `shouldBe` expected
| mtlstats/mtlstats | test/FormatSpec.hs | gpl-3.0 | 5,835 | 0 | 17 | 1,684 | 1,471 | 787 | 684 | 163 | 1 |
module Language.Untyped.Lexer
(
reserved
, parens
, dot
, identifier
) where
import Data.Functor.Identity
import Text.Parsec
import qualified Text.Parsec.Token as T
languageDef
= T.LanguageDef { T.commentStart = ""
, T.commentEnd = ""
, T.commentLine = ""
, T.nestedComments = False
, T.identStart = letter
, T.identLetter = alphaNum
, T.opStart = letter
, T.opLetter = alphaNum
, T.reservedOpNames = ["\\", "λ"]
, T.reservedNames = []
, T.caseSensitive = True
}
lexer :: T.GenTokenParser String u Identity
lexer = T.makeTokenParser languageDef
--dot :: ParsecT String u Identity String
dot = T.dot lexer
--identifier :: ParsecT String u Identity String
identifier = T.identifier lexer
--parens :: ParsecT String u Identity a -> ParsecT String u Identity a
parens = T.parens lexer
--reserved :: String -> ParsecT String u Identity ()
reserved = T.reserved lexer
| juanbono/tapl-haskell | untyped/src/Language/Untyped/Lexer.hs | gpl-3.0 | 1,109 | 0 | 7 | 378 | 214 | 129 | 85 | 27 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Roles.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates a role.
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.roles.update@.
module Network.Google.Resource.Directory.Roles.Update
(
-- * REST Resource
RolesUpdateResource
-- * Creating a Request
, rolesUpdate
, RolesUpdate
-- * Request Lenses
, ruPayload
, ruRoleId
, ruCustomer
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.roles.update@ method which the
-- 'RolesUpdate' request conforms to.
type RolesUpdateResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customer" Text :>
"roles" :>
Capture "roleId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Role :> Put '[JSON] Role
-- | Updates a role.
--
-- /See:/ 'rolesUpdate' smart constructor.
data RolesUpdate = RolesUpdate'
{ _ruPayload :: !Role
, _ruRoleId :: !Text
, _ruCustomer :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RolesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ruPayload'
--
-- * 'ruRoleId'
--
-- * 'ruCustomer'
rolesUpdate
:: Role -- ^ 'ruPayload'
-> Text -- ^ 'ruRoleId'
-> Text -- ^ 'ruCustomer'
-> RolesUpdate
rolesUpdate pRuPayload_ pRuRoleId_ pRuCustomer_ =
RolesUpdate'
{ _ruPayload = pRuPayload_
, _ruRoleId = pRuRoleId_
, _ruCustomer = pRuCustomer_
}
-- | Multipart request metadata.
ruPayload :: Lens' RolesUpdate Role
ruPayload
= lens _ruPayload (\ s a -> s{_ruPayload = a})
-- | Immutable ID of the role.
ruRoleId :: Lens' RolesUpdate Text
ruRoleId = lens _ruRoleId (\ s a -> s{_ruRoleId = a})
-- | Immutable ID of the Google Apps account.
ruCustomer :: Lens' RolesUpdate Text
ruCustomer
= lens _ruCustomer (\ s a -> s{_ruCustomer = a})
instance GoogleRequest RolesUpdate where
type Rs RolesUpdate = Role
type Scopes RolesUpdate =
'["https://www.googleapis.com/auth/admin.directory.rolemanagement"]
requestClient RolesUpdate'{..}
= go _ruCustomer _ruRoleId (Just AltJSON) _ruPayload
directoryService
where go
= buildClient (Proxy :: Proxy RolesUpdateResource)
mempty
| rueshyna/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/Roles/Update.hs | mpl-2.0 | 3,266 | 0 | 16 | 809 | 465 | 277 | 188 | 71 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Chrononaut.Schema (
-- * Queries
tableExists
, createTable
, getRevision
, setRevision
-- -- * Load/Run Files
-- , runFile
) where
import Chrononaut.Types
import Control.Monad
import Control.Monad.CatchIO
import Data.Int
import Data.Maybe
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField (FromField)
import Safe
tableExists :: HasDB m => m Bool
tableExists = fromMaybe False `liftM` maybeQuery sql ()
where
sql = "SELECT true FROM pg_tables \
\WHERE schemaname = 'public' \
\AND tablename = 'migration_revision';"
createTable :: HasDB m => m Bool
createTable = (1 ==) `liftM` dbExecute sql ()
where
sql = "CREATE TABLE IF NOT EXISTS migration_revision (\
\revision bigint NOT NULL);"
getRevision :: HasDB m => m (Maybe Int)
getRevision = maybeQuery sql ()
where
sql = "SELECT COALESCE (revision, 0) \
\FROM migration_revision \
\LIMIT 1;"
setRevision :: HasDB m => Int -> m Bool
setRevision n = (1 ==) `liftM` dbExecute sql (Only n)
where
sql = "DELETE FROM migration_revision; \
\INSERT INTO migration_revision VALUES (?);"
maybeQuery :: (HasDB m, ToRow q, FromField r) => Query -> q -> m (Maybe r)
maybeQuery q ps = do
rs <- dbQuery q ps
return $! (\(Only n) -> n) `liftM` headMay rs
dbQuery :: (HasDB m, ToRow q, FromRow r) => Query -> q -> m [r]
dbQuery q ps = dbTransaction (withDB $ \c -> query c q ps)
dbExecute :: (HasDB m, ToRow q) => Query -> q -> m Int64
dbExecute q ps = dbTransaction (withDB $ \c -> execute c q ps)
dbTransaction :: HasDB m => m a -> m a
dbTransaction io = do
withDB begin
r <- io `onException` withDB rollback
withDB commit
return $! r
| brendanhay/chrononaut | src/Chrononaut/Schema.hs | mpl-2.0 | 1,794 | 0 | 12 | 434 | 548 | 288 | 260 | 40 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateVolume
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates an Amazon EBS volume that can be attached to an instance in the same
-- Availability Zone. The volume is created in the regional endpoint that you
-- send the HTTP request to. For more information see <http://docs.aws.amazon.com/general/latest/gr/rande.html Regions and Endpoints>.
--
-- You can create a new empty volume or restore a volume from an Amazon EBS
-- snapshot. Any AWS Marketplace product codes from the snapshot are propagated
-- to the volume.
--
-- You can create encrypted volumes with the 'Encrypted' parameter. Encrypted
-- volumes may only be attached to instances that support Amazon EBS encryption.
-- Volumes that are created from encrypted snapshots are also automatically
-- encrypted. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html Amazon EBS Encryption> in the /AmazonElastic Compute Cloud User Guide for Linux/.
--
-- For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-creating-volume.html Creating or Restoring an Amazon EBS Volume> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateVolume.html>
module Network.AWS.EC2.CreateVolume
(
-- * Request
CreateVolume
-- ** Request constructor
, createVolume
-- ** Request lenses
, cv1AvailabilityZone
, cv1DryRun
, cv1Encrypted
, cv1Iops
, cv1KmsKeyId
, cv1Size
, cv1SnapshotId
, cv1VolumeType
-- * Response
, CreateVolumeResponse
-- ** Response constructor
, createVolumeResponse
-- ** Response lenses
, cvrAttachments
, cvrAvailabilityZone
, cvrCreateTime
, cvrEncrypted
, cvrIops
, cvrKmsKeyId
, cvrSize
, cvrSnapshotId
, cvrState
, cvrTags
, cvrVolumeId
, cvrVolumeType
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateVolume = CreateVolume
{ _cv1AvailabilityZone :: Text
, _cv1DryRun :: Maybe Bool
, _cv1Encrypted :: Maybe Bool
, _cv1Iops :: Maybe Int
, _cv1KmsKeyId :: Maybe Text
, _cv1Size :: Maybe Int
, _cv1SnapshotId :: Maybe Text
, _cv1VolumeType :: Maybe VolumeType
} deriving (Eq, Read, Show)
-- | 'CreateVolume' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cv1AvailabilityZone' @::@ 'Text'
--
-- * 'cv1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'cv1Encrypted' @::@ 'Maybe' 'Bool'
--
-- * 'cv1Iops' @::@ 'Maybe' 'Int'
--
-- * 'cv1KmsKeyId' @::@ 'Maybe' 'Text'
--
-- * 'cv1Size' @::@ 'Maybe' 'Int'
--
-- * 'cv1SnapshotId' @::@ 'Maybe' 'Text'
--
-- * 'cv1VolumeType' @::@ 'Maybe' 'VolumeType'
--
createVolume :: Text -- ^ 'cv1AvailabilityZone'
-> CreateVolume
createVolume p1 = CreateVolume
{ _cv1AvailabilityZone = p1
, _cv1DryRun = Nothing
, _cv1Size = Nothing
, _cv1SnapshotId = Nothing
, _cv1VolumeType = Nothing
, _cv1Iops = Nothing
, _cv1Encrypted = Nothing
, _cv1KmsKeyId = Nothing
}
-- | The Availability Zone in which to create the volume. Use 'DescribeAvailabilityZones' to list the Availability Zones that are currently available to you.
cv1AvailabilityZone :: Lens' CreateVolume Text
cv1AvailabilityZone =
lens _cv1AvailabilityZone (\s a -> s { _cv1AvailabilityZone = a })
cv1DryRun :: Lens' CreateVolume (Maybe Bool)
cv1DryRun = lens _cv1DryRun (\s a -> s { _cv1DryRun = a })
-- | Specifies whether the volume should be encrypted. Encrypted Amazon EBS
-- volumes may only be attached to instances that support Amazon EBS encryption.
-- Volumes that are created from encrypted snapshots are automatically
-- encrypted. There is no way to create an encrypted volume from an unencrypted
-- snapshot or vice versa. If your AMI uses encrypted volumes, you can only
-- launch it on supported instance types. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html Amazon EBSEncryption> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
cv1Encrypted :: Lens' CreateVolume (Maybe Bool)
cv1Encrypted = lens _cv1Encrypted (\s a -> s { _cv1Encrypted = a })
-- | Only valid for Provisioned IOPS (SSD) volumes. The number of I/O operations
-- per second (IOPS) to provision for the volume, with a maximum ratio of 30
-- IOPS/GiB.
--
-- Constraint: Range is 100 to 20000 for Provisioned IOPS (SSD) volumes
cv1Iops :: Lens' CreateVolume (Maybe Int)
cv1Iops = lens _cv1Iops (\s a -> s { _cv1Iops = a })
-- | The full ARN of the AWS Key Management Service (KMS) master key to use when
-- creating the encrypted volume. This parameter is only required if you want to
-- use a non-default master key; if this parameter is not specified, the default
-- master key is used. The ARN contains the 'arn:aws:kms' namespace, followed by
-- the region of the master key, the AWS account ID of the master key owner, the 'key' namespace, and then the master key ID. For example, arn:aws:kms:/us-east-1/:/012345678910/:key//abcd1234-a123-456a-a12b-a123b4cd56ef/.
cv1KmsKeyId :: Lens' CreateVolume (Maybe Text)
cv1KmsKeyId = lens _cv1KmsKeyId (\s a -> s { _cv1KmsKeyId = a })
-- | The size of the volume, in GiBs.
--
-- Constraints: '1-1024' for 'standard' volumes, '1-16384' for 'gp2' volumes, and '4-16384' for 'io1' volumes. If you specify a snapshot, the volume size must be equal to
-- or larger than the snapshot size.
--
-- Default: If you're creating the volume from a snapshot and don't specify a
-- volume size, the default is the snapshot size.
cv1Size :: Lens' CreateVolume (Maybe Int)
cv1Size = lens _cv1Size (\s a -> s { _cv1Size = a })
-- | The snapshot from which to create the volume.
cv1SnapshotId :: Lens' CreateVolume (Maybe Text)
cv1SnapshotId = lens _cv1SnapshotId (\s a -> s { _cv1SnapshotId = a })
-- | The volume type. This can be 'gp2' for General Purpose (SSD) volumes, 'io1' for
-- Provisioned IOPS (SSD) volumes, or 'standard' for Magnetic volumes.
--
-- Default: 'standard'
cv1VolumeType :: Lens' CreateVolume (Maybe VolumeType)
cv1VolumeType = lens _cv1VolumeType (\s a -> s { _cv1VolumeType = a })
data CreateVolumeResponse = CreateVolumeResponse
{ _cvrAttachments :: List "item" VolumeAttachment
, _cvrAvailabilityZone :: Text
, _cvrCreateTime :: ISO8601
, _cvrEncrypted :: Bool
, _cvrIops :: Maybe Int
, _cvrKmsKeyId :: Maybe Text
, _cvrSize :: Int
, _cvrSnapshotId :: Text
, _cvrState :: VolumeState
, _cvrTags :: List "item" Tag
, _cvrVolumeId :: Text
, _cvrVolumeType :: VolumeType
} deriving (Eq, Read, Show)
-- | 'CreateVolumeResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cvrAttachments' @::@ ['VolumeAttachment']
--
-- * 'cvrAvailabilityZone' @::@ 'Text'
--
-- * 'cvrCreateTime' @::@ 'UTCTime'
--
-- * 'cvrEncrypted' @::@ 'Bool'
--
-- * 'cvrIops' @::@ 'Maybe' 'Int'
--
-- * 'cvrKmsKeyId' @::@ 'Maybe' 'Text'
--
-- * 'cvrSize' @::@ 'Int'
--
-- * 'cvrSnapshotId' @::@ 'Text'
--
-- * 'cvrState' @::@ 'VolumeState'
--
-- * 'cvrTags' @::@ ['Tag']
--
-- * 'cvrVolumeId' @::@ 'Text'
--
-- * 'cvrVolumeType' @::@ 'VolumeType'
--
createVolumeResponse :: Text -- ^ 'cvrVolumeId'
-> Int -- ^ 'cvrSize'
-> Text -- ^ 'cvrSnapshotId'
-> Text -- ^ 'cvrAvailabilityZone'
-> VolumeState -- ^ 'cvrState'
-> UTCTime -- ^ 'cvrCreateTime'
-> VolumeType -- ^ 'cvrVolumeType'
-> Bool -- ^ 'cvrEncrypted'
-> CreateVolumeResponse
createVolumeResponse p1 p2 p3 p4 p5 p6 p7 p8 = CreateVolumeResponse
{ _cvrVolumeId = p1
, _cvrSize = p2
, _cvrSnapshotId = p3
, _cvrAvailabilityZone = p4
, _cvrState = p5
, _cvrCreateTime = withIso _Time (const id) p6
, _cvrVolumeType = p7
, _cvrEncrypted = p8
, _cvrAttachments = mempty
, _cvrTags = mempty
, _cvrIops = Nothing
, _cvrKmsKeyId = Nothing
}
cvrAttachments :: Lens' CreateVolumeResponse [VolumeAttachment]
cvrAttachments = lens _cvrAttachments (\s a -> s { _cvrAttachments = a }) . _List
-- | The Availability Zone for the volume.
cvrAvailabilityZone :: Lens' CreateVolumeResponse Text
cvrAvailabilityZone =
lens _cvrAvailabilityZone (\s a -> s { _cvrAvailabilityZone = a })
-- | The time stamp when volume creation was initiated.
cvrCreateTime :: Lens' CreateVolumeResponse UTCTime
cvrCreateTime = lens _cvrCreateTime (\s a -> s { _cvrCreateTime = a }) . _Time
-- | Indicates whether the volume will be encrypted.
cvrEncrypted :: Lens' CreateVolumeResponse Bool
cvrEncrypted = lens _cvrEncrypted (\s a -> s { _cvrEncrypted = a })
-- | The number of I/O operations per second (IOPS) that the volume supports. For
-- Provisioned IOPS (SSD) volumes, this represents the number of IOPS that are
-- provisioned for the volume. For General Purpose (SSD) volumes, this
-- represents the baseline performance of the volume and the rate at which the
-- volume accumulates I/O credits for bursting. For more information on General
-- Purpose (SSD) baseline performance, I/O credits, and bursting, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html Amazon EBSVolume Types> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- Constraint: Range is 100 to 20000 for Provisioned IOPS (SSD) volumes and 3
-- to 10000 for General Purpose (SSD) volumes.
--
-- Condition: This parameter is required for requests to create 'io1' volumes; it
-- is not used in requests to create 'standard' or 'gp2' volumes.
cvrIops :: Lens' CreateVolumeResponse (Maybe Int)
cvrIops = lens _cvrIops (\s a -> s { _cvrIops = a })
-- | The full ARN of the AWS Key Management Service (KMS) master key that was used
-- to protect the volume encryption key for the volume.
cvrKmsKeyId :: Lens' CreateVolumeResponse (Maybe Text)
cvrKmsKeyId = lens _cvrKmsKeyId (\s a -> s { _cvrKmsKeyId = a })
-- | The size of the volume, in GiBs.
cvrSize :: Lens' CreateVolumeResponse Int
cvrSize = lens _cvrSize (\s a -> s { _cvrSize = a })
-- | The snapshot from which the volume was created, if applicable.
cvrSnapshotId :: Lens' CreateVolumeResponse Text
cvrSnapshotId = lens _cvrSnapshotId (\s a -> s { _cvrSnapshotId = a })
-- | The volume state.
cvrState :: Lens' CreateVolumeResponse VolumeState
cvrState = lens _cvrState (\s a -> s { _cvrState = a })
-- | Any tags assigned to the volume.
cvrTags :: Lens' CreateVolumeResponse [Tag]
cvrTags = lens _cvrTags (\s a -> s { _cvrTags = a }) . _List
-- | The ID of the volume.
cvrVolumeId :: Lens' CreateVolumeResponse Text
cvrVolumeId = lens _cvrVolumeId (\s a -> s { _cvrVolumeId = a })
-- | The volume type. This can be 'gp2' for General Purpose (SSD) volumes, 'io1' for
-- Provisioned IOPS (SSD) volumes, or 'standard' for Magnetic volumes.
cvrVolumeType :: Lens' CreateVolumeResponse VolumeType
cvrVolumeType = lens _cvrVolumeType (\s a -> s { _cvrVolumeType = a })
instance ToPath CreateVolume where
toPath = const "/"
instance ToQuery CreateVolume where
toQuery CreateVolume{..} = mconcat
[ "AvailabilityZone" =? _cv1AvailabilityZone
, "DryRun" =? _cv1DryRun
, "Encrypted" =? _cv1Encrypted
, "Iops" =? _cv1Iops
, "KmsKeyId" =? _cv1KmsKeyId
, "Size" =? _cv1Size
, "SnapshotId" =? _cv1SnapshotId
, "VolumeType" =? _cv1VolumeType
]
instance ToHeaders CreateVolume
instance AWSRequest CreateVolume where
type Sv CreateVolume = EC2
type Rs CreateVolume = CreateVolumeResponse
request = post "CreateVolume"
response = xmlResponse
instance FromXML CreateVolumeResponse where
parseXML x = CreateVolumeResponse
<$> x .@? "attachmentSet" .!@ mempty
<*> x .@ "availabilityZone"
<*> x .@ "createTime"
<*> x .@ "encrypted"
<*> x .@? "iops"
<*> x .@? "kmsKeyId"
<*> x .@ "size"
<*> x .@ "snapshotId"
<*> x .@ "status"
<*> x .@? "tagSet" .!@ mempty
<*> x .@ "volumeId"
<*> x .@ "volumeType"
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateVolume.hs | mpl-2.0 | 13,576 | 0 | 31 | 3,165 | 1,774 | 1,065 | 709 | 171 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceNetworking.Services.DisableVPCServiceControls
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Disables VPC service controls for a connection.
--
-- /See:/ <https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started Service Networking API Reference> for @servicenetworking.services.disableVpcServiceControls@.
module Network.Google.Resource.ServiceNetworking.Services.DisableVPCServiceControls
(
-- * REST Resource
ServicesDisableVPCServiceControlsResource
-- * Creating a Request
, servicesDisableVPCServiceControls
, ServicesDisableVPCServiceControls
-- * Request Lenses
, sdvscParent
, sdvscXgafv
, sdvscUploadProtocol
, sdvscAccessToken
, sdvscUploadType
, sdvscPayload
, sdvscCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceNetworking.Types
-- | A resource alias for @servicenetworking.services.disableVpcServiceControls@ method which the
-- 'ServicesDisableVPCServiceControls' request conforms to.
type ServicesDisableVPCServiceControlsResource =
"v1" :>
CaptureMode "parent" "disableVpcServiceControls" Text
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DisableVPCServiceControlsRequest :>
Patch '[JSON] Operation
-- | Disables VPC service controls for a connection.
--
-- /See:/ 'servicesDisableVPCServiceControls' smart constructor.
data ServicesDisableVPCServiceControls =
ServicesDisableVPCServiceControls'
{ _sdvscParent :: !Text
, _sdvscXgafv :: !(Maybe Xgafv)
, _sdvscUploadProtocol :: !(Maybe Text)
, _sdvscAccessToken :: !(Maybe Text)
, _sdvscUploadType :: !(Maybe Text)
, _sdvscPayload :: !DisableVPCServiceControlsRequest
, _sdvscCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesDisableVPCServiceControls' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdvscParent'
--
-- * 'sdvscXgafv'
--
-- * 'sdvscUploadProtocol'
--
-- * 'sdvscAccessToken'
--
-- * 'sdvscUploadType'
--
-- * 'sdvscPayload'
--
-- * 'sdvscCallback'
servicesDisableVPCServiceControls
:: Text -- ^ 'sdvscParent'
-> DisableVPCServiceControlsRequest -- ^ 'sdvscPayload'
-> ServicesDisableVPCServiceControls
servicesDisableVPCServiceControls pSdvscParent_ pSdvscPayload_ =
ServicesDisableVPCServiceControls'
{ _sdvscParent = pSdvscParent_
, _sdvscXgafv = Nothing
, _sdvscUploadProtocol = Nothing
, _sdvscAccessToken = Nothing
, _sdvscUploadType = Nothing
, _sdvscPayload = pSdvscPayload_
, _sdvscCallback = Nothing
}
-- | The service that is managing peering connectivity for a service
-- producer\'s organization. For Google services that support this
-- functionality, this value is
-- \`services\/servicenetworking.googleapis.com\`.
sdvscParent :: Lens' ServicesDisableVPCServiceControls Text
sdvscParent
= lens _sdvscParent (\ s a -> s{_sdvscParent = a})
-- | V1 error format.
sdvscXgafv :: Lens' ServicesDisableVPCServiceControls (Maybe Xgafv)
sdvscXgafv
= lens _sdvscXgafv (\ s a -> s{_sdvscXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sdvscUploadProtocol :: Lens' ServicesDisableVPCServiceControls (Maybe Text)
sdvscUploadProtocol
= lens _sdvscUploadProtocol
(\ s a -> s{_sdvscUploadProtocol = a})
-- | OAuth access token.
sdvscAccessToken :: Lens' ServicesDisableVPCServiceControls (Maybe Text)
sdvscAccessToken
= lens _sdvscAccessToken
(\ s a -> s{_sdvscAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sdvscUploadType :: Lens' ServicesDisableVPCServiceControls (Maybe Text)
sdvscUploadType
= lens _sdvscUploadType
(\ s a -> s{_sdvscUploadType = a})
-- | Multipart request metadata.
sdvscPayload :: Lens' ServicesDisableVPCServiceControls DisableVPCServiceControlsRequest
sdvscPayload
= lens _sdvscPayload (\ s a -> s{_sdvscPayload = a})
-- | JSONP
sdvscCallback :: Lens' ServicesDisableVPCServiceControls (Maybe Text)
sdvscCallback
= lens _sdvscCallback
(\ s a -> s{_sdvscCallback = a})
instance GoogleRequest
ServicesDisableVPCServiceControls
where
type Rs ServicesDisableVPCServiceControls = Operation
type Scopes ServicesDisableVPCServiceControls =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient ServicesDisableVPCServiceControls'{..}
= go _sdvscParent _sdvscXgafv _sdvscUploadProtocol
_sdvscAccessToken
_sdvscUploadType
_sdvscCallback
(Just AltJSON)
_sdvscPayload
serviceNetworkingService
where go
= buildClient
(Proxy ::
Proxy ServicesDisableVPCServiceControlsResource)
mempty
| brendanhay/gogol | gogol-servicenetworking/gen/Network/Google/Resource/ServiceNetworking/Services/DisableVPCServiceControls.hs | mpl-2.0 | 5,996 | 0 | 16 | 1,250 | 782 | 457 | 325 | 119 | 1 |
-- gold 28 -> (5, 23)
-- goldbach conjecture that all n > 2 is n = p + q for prime p, q; even n
module Pr40 (gold) where
import Pr31
import Pr35
import Text.Printf
gold :: Int -> (Int, Int)
gold n =
if not (n `mod` 2 == 0) then
error (printf "%d not even" n)
else
let
ps = takeWhile (< (n `div` 2)) primes
ss = filter (\x -> isprime (n-x)) ps
s = head ss -- lazily evaluated -> only apply filter until accepted
in
(s, n-s)
-- issumprime n p = isprime (n-p)
| ekalosak/haskell-practice | Pr40.hs | lgpl-3.0 | 540 | 0 | 15 | 183 | 157 | 90 | 67 | 13 | 2 |
module Identity where
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
newtype Identity a = Identity a
deriving (Eq, Ord, Show)
instance Functor Identity where
fmap f (Identity a) = Identity (f a)
instance Applicative Identity where
pure = Identity
(Identity f) <*> (Identity a) = Identity (f a)
instance Monad Identity where
return = pure
(>>=) (Identity a) f = f a
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = do
a <- arbitrary
return $ Identity a
instance Eq a => EqProp (Identity a) where
(=-=) = eq
main = do
let trigger = undefined :: Identity (Int, String, Int)
quickBatch $ functor trigger
quickBatch $ applicative trigger
quickBatch $ monad trigger
| thewoolleyman/haskellbook | 18/07/haskell-club/Identity.hs | unlicense | 756 | 0 | 11 | 155 | 287 | 147 | 140 | 25 | 1 |
-- Core.hs: The core λ calculus of simpl.
-- Copyright 2014 Jack Pugmire
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Simpl.Core where
import Data.List (find)
type Name = String
type Result a = Either String a
data Global = Global { gName :: Name
, gValue :: Value
, gType :: Type
}
type Env = [Global]
data Term = EAnn Term Type
| EApp Term Term
| EGlobal Name
| ELam Term
| EVar Int
| EUnit Bool
data Type = TUnit
| TFun Type Type
| TVar Name
deriving (Eq)
instance Show Type where
show TUnit = "Unit"
show (TFun t t') = case t of
(TFun _ _) -> "(" ++ show t ++ ") -> " ++ show t'
_ -> show t ++ " -> " ++ show t'
show (TVar n) = n
data Value = VUnit Bool
| VLam (Value -> Result Value)
instance Show Value where
show (VUnit True) = "T"
show (VUnit False) = "F"
show (VLam _) = "λ"
-- Look up the value of a global
lookupGlobal :: String -> Env -> Result Value
lookupGlobal n = f . find (\(Global n' _ _) -> n == n')
where f = maybe (Left ("No such global " ++ show n)) (Right . gValue)
-- Look up the type of a global
lookupGlobalT :: Name -> Env -> Result Type
lookupGlobalT n = f . find (\(Global n' _ _) -> n == n')
where f = maybe (Left ("No such global " ++ show n)) (Right . gType)
| jepugs/simpl | Simpl/Core.hs | apache-2.0 | 1,932 | 0 | 12 | 566 | 502 | 274 | 228 | 36 | 1 |
{- |
Module : Bio.Motions.Utils.Geometry
Description : Utility geometry functions.
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE RecordWildCards #-}
module Bio.Motions.Utils.Geometry where
import Control.Monad
import Control.Applicative
import Data.Maybe
import Linear
type Point = V3 Int
data Triangle = Triangle
{ p1 :: Point
, p2 :: Point
, p3 :: Point
}
data Segment = Segment
{ p :: Point
, q :: Point
}
data Angle = Angle
{ v1 :: V3 Int
, v2 :: V3 Int
}
data Ray = Ray
{ o :: Point
, dir :: V3 Int
}
data Cube = Cube
{ minCorner :: Point
, maxCorner :: Point
}
-- |Tests whether a segment intersects a triangle in 3D.
-- Gives correct results when the triangle is actually a segment.
-- Works well with all nondegenerate edge cases, e.g. intersection at a vertex.
-- Gives unspecified results in more degenerate cases, i.e. the triangle or the segment is a point.
-- Warning: possible integer overflows with large distances.
intersectsTriangle :: Triangle -> Segment -> Bool
intersectsTriangle tri@Triangle{..} seg
| w /= 0 = isJust $ do
let sgn = signum w
guard $ sgn * s <= 0
let w2 = a `cross` d
t = w2 `dot` c
guard $ sgn * t >= 0
let u = - w2 `dot` b
guard $ sgn * u >= 0
let v = w - s - t - u
guard $ sgn * v >= 0
| s /= 0 = isJust $ do
let sgn = signum s
let w2 = d `cross` a
t = w2 `dot` c
guard $ sgn * t >= 0
let u = - w2 `dot` b
guard $ sgn * u >= 0
let v = s - w - t - u
guard $ sgn * v >= 0
| otherwise = any (intersectsSegment seg) [Segment p1 p2, Segment p1 p3, Segment p2 p3]
|| pointInsideTriangle (p seg) tri
where
a = p seg - p3
b = p1 - p3
c = p2 - p3
d = q seg - p3
w1 = b `cross` c
w = a `dot` w1
s = d `dot` w1
-- |Tests whether two segments in 3D intersect.
-- Returns true for all nondegenerate edge cases, e.g. intersection at a vertex.
-- Gives unspecified results in degenerate cases.
-- Warning: possible integer overflows with large distances.
intersectsSegment :: Segment -> Segment -> Bool
intersectsSegment s1 s2 =
let r = q s1 - p s1
s = q s2 - p s2
rxs = r `cross` s
p1p2 = p s2 - p s1
in if rxs == 0 && p1p2 `cross` r == 0 then
let t0' = p1p2 `dot` r
t1' = t0' + s `dot` r
(t0, t1) = if s `dot` r < 0 then (t1', t0') else (t0', t1')
dr = r `dot` r
in (0 <= t0 && t0 <= dr) || (0 <= t1 && t1 <= dr) || (t0 <= 0 && dr <= t1)
else
let d1 = (p1p2 `cross` s) `dot` rxs
d2 = (p1p2 `cross` r) `dot` rxs
drxs = rxs `dot` rxs
in rxs /= 0 && 0 <= d1 && d1 <= drxs && 0 <= d2 && d2 <= drxs
&& drxs *^ p1p2 == d1 *^ r - d2 *^ s
-- |Tests whether a segment goes through a point.
-- Assumes that the segment is nondegenerate, i.e. it is not a point.
-- Warning: possible integer overflows with large distances.
pointInsideSegment :: Point -> Segment -> Bool
pointInsideSegment v Segment{..} =
pv `cross` pq == 0 && 0 <= dpvpq && dpvpq <= dpqpq
where
pv = v - p
pq = q - p
dpvpq = pv `dot` pq
dpqpq = pq `dot` pq
-- |Tests whether a point is (not necessarily strictly) inside a triangle.
-- Assumes that the three points defining the triangle are pairwise different.
-- Works well when the triangle is actually a segment.
-- Warning: possible integer overflows with large distances.
pointInsideTriangle :: Point -> Triangle -> Bool
pointInsideTriangle p t@Triangle{..} =
det33 (V3 (p2 - p1) (p3 - p1) (p - p1)) == 0 && pointInsideTriangle2D p t
-- |Tests whether a point is (not necessarily strictly) inside a triangle.
-- Assumes that the three points defining the triangle are pairwise different.
-- Works well when the triangle is actually a segment.
-- Assumes that the point and the triangle are coplanar.
-- Warning: possible integer overflows with large distances.
pointInsideTriangle2D :: Point -> Triangle -> Bool
pointInsideTriangle2D p Triangle{..} =
vectorInsideAngle p1p (Angle p1p2 p1p3) &&
vectorInsideAngle p2p (Angle p2p1 p2p3) &&
vectorInsideAngle p3p (Angle p3p1 p3p2)
where
p1p = p - p1
p2p = p - p2
p3p = p - p3
p1p2 = p2 - p1
p1p3 = p3 - p1
p2p3 = p3 - p2
p2p1 = - p1p2
p3p1 = - p1p3
p3p2 = - p2p3
-- |Tests whether a vector is (not necessarily strictly) inside an angle.
-- When any of the angle vectors is zero or the angle vectors have opposing directions,
-- returns true for all vectors.
-- Warning: possible integer overflows with large distances.
vectorInsideAngle :: V3 Int -> Angle -> Bool
vectorInsideAngle v a@Angle{..} = det33 (V3 v v1 v2) == 0 && vectorInsideAngle2D v a
-- |Tests whether a vector is (not necessarily strictly) inside an angle.
-- When any of the angle vectors is zero or the angle vectors have opposing directions,
-- returns true for all vectors.
-- Assumes that the vector and the angle are coplanar.
-- Warning: possible integer overflows with large distances.
vectorInsideAngle2D :: V3 Int -> Angle -> Bool
vectorInsideAngle2D v Angle{..} =
v1cv2 * v1cv >= 0 && v2cv1 * v2cv >= 0 && v1cv * v2cv <= 0 &&
(v1cv2 /= 0 || v `dot` v1 >= 0 || v `dot` v2 >= 0)
where
v1cv2 = v1 `cross` v2
v1cv = v1 `cross` v
v2cv = v2 `cross` v
v2cv1 = - v1cv2
-- |Tests whether the given ray goes through the given point.
-- Assumes that the ray's vector is nonzero.
-- Warning: possible integer overflows with large distances.
pointInsideRay :: Point -> Ray -> Bool
pointInsideRay p Ray{..} =
op `cross` dir == 0 && 0 <= op `dot` dir
where op = p - o
-- |The bounding cube for a set of points.
-- Warning: possible integer overflows with large distances.
boundingCube :: [Point] -> Cube
boundingCube ps = Cube{..}
where
minCorner = foldr1 (liftA2 min) ps
maxCorner = foldr1 (liftA2 max) ps
-- |A cube extended in each direction by the given radius.
-- Warning: possible integer overflows with large distances.
extendedCube :: Int -> Cube -> Cube
extendedCube radius Cube{..} = Cube minCorner' maxCorner'
where
minCorner' = fmap (\x -> x - radius) minCorner
maxCorner' = fmap (+ radius) maxCorner
| Motions/motions | src/Bio/Motions/Utils/Geometry.hs | apache-2.0 | 6,405 | 0 | 24 | 1,768 | 1,746 | 953 | 793 | 116 | 3 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Network.Wai.Middleware.RequestLogger (logStdout, logStdoutDev)
import qualified Database.Persist.Store
import Network.HTTP.Conduit (newManager, def)
import Database.Persist.GenericSql (runMigration)
import Data.HashMap.Strict as H
import Data.Aeson.Types as AT
#ifndef DEVELOPMENT
import qualified Web.Heroku
#endif
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
import Handler.Feedings
import Handler.User
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO Application
makeApplication conf = do
foundation <- makeFoundation conf
app <- toWaiAppPlain foundation
return $ logWare app
where
logWare = if development then logStdoutDev
else logStdout
{- commented out for heroku
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/mongoDB.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
return $ App conf s p manager dbconf
-}
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader makeApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
s <- staticSite
hconfig <- loadHerokuConfig
dbconf <- withYamlEnvironment "config/postgresql.yml" (appEnv conf)
(Database.Persist.Store.loadConfig . combineMappings hconfig) >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
Database.Persist.Store.runPool dbconf (runMigration migrateAll) p
return $ App conf s p manager dbconf
#ifndef DEVELOPMENT
canonicalizeKey :: (Text, val) -> (Text, val)
canonicalizeKey ("dbname", val) = ("database", val)
canonicalizeKey pair = pair
toMapping :: [(Text, Text)] -> AT.Value
toMapping xs = AT.Object $ H.fromList $ Import.map (\(key, val) -> (key, AT.String val)) xs
#endif
combineMappings :: AT.Value -> AT.Value -> AT.Value
combineMappings (AT.Object m1) (AT.Object m2) = AT.Object $ m1 `H.union` m2
combineMappings _ _ = error "Data.Object is not a Mapping."
loadHerokuConfig :: IO AT.Value
loadHerokuConfig = do
#ifdef DEVELOPMENT
return $ AT.Object H.empty
#else
Web.Heroku.dbConnParams >>= return . toMapping . Import.map canonicalizeKey
#endif
| svdberg/yesod-milk | Application.hs | bsd-2-clause | 3,427 | 0 | 12 | 602 | 646 | 360 | 286 | -1 | -1 |
module Main where
import System.Console.CmdArgs
import Network.HTTP.Neon.ProgType
import Network.HTTP.Neon.Command
main :: IO ()
main = do
putStrLn "hneon"
param <- cmdArgs mode
commandLineProcess param | wavewave/hneon | exe/hneon.hs | bsd-2-clause | 214 | 0 | 8 | 35 | 61 | 33 | 28 | 9 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_ChatServer (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\bin"
libdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0-I7LJzhEVoAY7jb523Kzp1o"
dynlibdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1"
datadir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0"
libexecdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\ChatServer-0.1.0.0-I7LJzhEVoAY7jb523Kzp1o\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0"
sysconfdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "ChatServer_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "ChatServer_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "ChatServer_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "ChatServer_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "ChatServer_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "ChatServer_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "\\" ++ name)
| mcdonndi/ChatServer | dist/dist-sandbox-a117d482/build/ChatServer/autogen/Paths_ChatServer.hs | bsd-3-clause | 2,170 | 0 | 10 | 239 | 410 | 238 | 172 | 33 | 1 |
{-# Language OverloadedStrings #-}
--------------------------------------------------------------------
-- |
-- Module : Utils.Katt.Upload
--
-- Upload submodule providing submissions of solutions and parsing of results.
--
-- A submission is done by including all recursively found files and filtering
-- using a file filter given as an argument.
-- This is followed by polling for a submission result until some final
-- submission state has been reached (e.g. accepted).
--
-- Currently multipart data upload is implemented since https-streams
-- (the HTTP client being used) does not support it (yet?).
module Utils.Katt.Upload
(makeSubmission)
where
import Control.Applicative ((<$>))
import Control.Concurrent (threadDelay)
import Control.Error hiding (tryIO)
import Control.Lens
import Control.Monad (join, liftM2, void)
import qualified Control.Monad.State as S
import qualified Data.ByteString.Char8 as B
import Data.List ((\\), union, findIndex)
import Data.Maybe (fromJust)
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Network.Wreq as W
import qualified Network.Wreq.Session as WS
import Text.Parsec hiding (token)
import Text.Parsec.ByteString
import qualified Utils.Katt.Configuration as C
import Utils.Katt.SourceHandler
import Utils.Katt.Utils
-- | Submission page URL, relative 'Utils.host', from which specific submission can be requested.
submissionPage :: B.ByteString
submissionPage = "submission"
-- | Possible states of a submission, with unknowns being grouped into 'Other'.
data SubmissionState
-- | Submission is queued.
= Queued
-- | Submission is compiling.
| Compiling
-- | Submission is running.
| Running
-- | Wrong answer.
| WrongAnswer
-- | Time limit exceeded.
| TimeLimitExceeded
-- | Submission was accepted (only success state).
| Accepted
-- | Compile error.
| CompileError
-- | Run time error.
| RunTimeError
-- | Some other, unmatched error code. Only used when parsing fails.
| Other
deriving (Eq, Show)
-- | Possible states of a single test case, i.e. an (input, output) data pair.
data TestCase
-- | Test case passed.
= TestPassed
-- | Test case failed (state /= Accepted)
| TestFailed
-- | Test case has not been executed.
| NotTested
deriving (Eq, Show)
-- | Check if a given state is final, i.e. won't transition into some other.
-- Note that 'Other' is listed as final.
finalSubmissionState :: SubmissionState -> Bool
finalSubmissionState s = elem s
[WrongAnswer, TimeLimitExceeded, Accepted, CompileError, RunTimeError, Other]
-- | Make a submission of the project in the working directory.
-- Accepts a list of filters on the form /+file1 -file2 ../, which are
-- taken into account when locating all the source files.
-- /+file/ implies adding the specified file.
-- /-file/ implies removing the specified file.
--
-- In addition to the filters, all recursively found source code files
-- will be included in the submission.
makeSubmission :: [String] -> ConfigEnv IO ()
makeSubmission filterArguments = do
exists <- tryIO C.projectConfigExists
tryAssert "No project configuration could be found."
exists
C.loadProjectConfig
problem <- fromJust <$> S.gets project
conf <- S.get
-- Locate all source files, filter based on filter list.
files <- tryIOMsg "Failed to locate source files" findFiles
let adjusted = adjust (parseFilter filterArguments) files
tryIO $ mapM_ (putStrLn . ("Adding file: "++)) adjusted
-- Authenticate, submit files, and retrieve submission id.
let url = buildURL (host conf) (submitPage conf)
toState sess = (sess, host conf)
submission <- withAuth $ \sess ->
submitSolution (toState sess) url (problem, adjusted)
tryIO $ do
putStrLn $ "Made submission: " <> show submission
threadDelay initialTimeout
withAuth $ \sess ->
checkSubmission (toState sess) submission
where
adjust Nothing files = files
adjust (Just (add, sub)) files = union (files \\ sub) add
-- Initial timeout before requesting updates is 2 s.
initialTimeout = 2000000
-- | Poll kattis for updates on a submission.
-- This function returns when the submission has reached one of the final states.
-- TODO: Consider exponential back-off and timeout
checkSubmission :: Session -> SubmissionId -> EitherT ErrorDesc IO ()
checkSubmission sess submission = do
page <- retrievePrivatePage sess $
submissionPage <> "?id=" <> B.pack (show submission)
let (state, tests) = parseSubmission page
if finalSubmissionState state
then
tryIO $ printResult tests state
else do
tryIO $ putStrLn "Waiting for completion.." >> threadDelay interval
checkSubmission sess submission
where
-- Default poll interval is 1 s.
interval = 1000000
-- | Parse the supplied submission page into:
-- (1) Current submission state
-- (2) Status of all test cases
parseSubmission :: B.ByteString -> (SubmissionState, [TestCase])
parseSubmission contents =
case res of
Left err' -> error $ "Internal parser error" <> show err'
Right res' -> res'
where
res = parse parser "Submission parser" contents
parser = liftM2 (,) parseStatus parseTestCases
-- | String separator parser.
strSep :: GenParser Char st ()
strSep = void (char '\'' <|> char '"')
-- | End-of-tag parser, ignores everything up to the end of the current tag.
endTag :: GenParser Char st ()
endTag = void $ manyTill anyChar (char '>')
-- | Parse the submission status field, beginning from any offset in the page data.
parseStatus :: GenParser Char st SubmissionState
parseStatus = skip >> status
where
beginStatus = do
void $ string "<td class="
strSep >> string "status" >> strSep >> endTag
void $ string "<span class=" >> strSep
-- Skip to the appropiate <td> tag.
skip = manyTill anyChar (void (try beginStatus) <|> eof)
-- Parse contents in <td>...</td>.
-- TODO: check if manyTill can be rewritten to the endTag pattern
status = do
void $ manyTill anyChar strSep
endTag
statusStr <- manyTill (letter <|> space) (char '<')
return $ conv statusStr
conv "Time Limit Exceeded" = TimeLimitExceeded
conv "Wrong Answer" = WrongAnswer
conv "Accepted" = Accepted
conv "Memory Limit Exceeded" = Other
conv "Compiling" = Compiling
conv "Running" = Running
conv "Compile Error" = CompileError
conv "Run Time Error" = RunTimeError
conv _ = Other
-- | Parse the status of all test cases, beginning from any offset in the page data.
-- May return zero test cases when a submission fails
-- with certain status values, e.g. /Compile Error/.
parseTestCases :: GenParser Char st [TestCase]
parseTestCases = skip >> tests
where
beginTests = do
void $ string "<div class="
strSep >> string "testcases" >> strSep
endTag
-- Locate surrounding div tag.
skip = manyTill anyChar (void (try beginTests) <|> eof)
-- Parse all test cases.
tests = many testCase
-- Each test case is basically <span [class="status"]>...</span>
-- where a missing class attribute implies that it hasn't been executed.
testCase = do
void . try $ string "<span "
classResult <- optionMaybe $ do
string "class=" >> strSep
manyTill anyChar strSep
void . manyTill anyChar $ string "</span>"
fromMaybe (return NotTested) (mapResult <$> classResult)
mapResult "accepted" = return TestPassed
mapResult "rejected" = return TestFailed
mapResult _ = parserZero
-- | Print the result of a submission.
-- Will also take care of the special case when no test cases were parsed.
printResult :: [TestCase] -> SubmissionState -> IO ()
printResult tests state
| state == Accepted = putStrLn $ "Accepted, " <> numTests <> " test(s) passed."
| null tests = putStrLn resultStr
| otherwise = putStrLn $ resultStr <> testCaseStr
where
numTests = show $ length tests
firstFailed = show . (+1) . fromMaybe 0 $ findIndex (/= TestPassed) tests
resultStr = "Result: " <> show state
testCaseStr = ", failed on test case " <> firstFailed <> " of " <> numTests
-- | Submit a solution, given problem name and source code files.
submitSolution :: Session -> String -> Submission -> EitherT ErrorDesc IO SubmissionId
submitSolution (sess, _) url (problem, files) = do
-- Determine language in submission.
language <- noteT ("\nFailed to decide submission language\n" <>
"Please use either Java or some union of C++ and C")
. hoistMaybe $ determineLanguage files
let languageStr = languageKattisName language
-- Locate main class, if any
mainClassStr <- join . tryIO $
(noteT "Failed to locate the \"public static void main\" method - is there any?" . hoistMaybe)
<$> findMainClass (files, language)
-- Construct POST data
problemName <- tryIO $ retrieveProblemName problem
let files' = map (W.partFile "sub_file[]") files
conv = T.pack . B.unpack
postFields = [W.partText "submit" "true"]
<> [W.partText "submit_ctr" "2"]
<> [W.partText "language" (conv languageStr)]
<> [W.partText "mainclass" (T.pack mainClassStr)]
<> [W.partText "problem" (conv problemName)]
<> [W.partText "tag" ""]
<> [W.partText "script" "true"]
-- Submit the request
reply <- tryIO $ WS.postWith
defaultOpts
sess
url
(files' <> postFields)
-- Extract the submission ID
let body = reply ^. W.responseBody
(EitherT . return . fmapL (B.pack . show)) $
parse parseSubmissionId "Submission ID parser" body
where
parseSubmissionId = manyTill anyChar (lookAhead identifier) >> identifier
identifier = read <$> many1 digit
| davnils/katt | katt-lib/src/Utils/Katt/Upload.hs | bsd-3-clause | 9,827 | 0 | 18 | 2,110 | 1,985 | 1,047 | 938 | 164 | 9 |
module IOFunc
( ioPrimitives
) where
import Control.Applicative ((<$>))
import Control.Monad.Trans
import Env (bindVars)
import Eval (apply, eval)
import Lib (liftThrows)
import Parser (readExpr, readExprList)
import System.IO
import Types
discardEnv :: ([LispVal] -> LispEval) -> (Env -> [LispVal] -> LispEval)
discardEnv func _ = func
ioPrimitives :: [(String, Env -> [LispVal] -> LispEval)]
ioPrimitives = [("apply", applyProc),
("open-input-file", discardEnv $ makePort ReadMode),
("open-output-file", discardEnv $ makePort WriteMode),
("close-input-port", discardEnv closePort),
("close-output-port", discardEnv closePort),
("read", discardEnv readProc),
("write", discardEnv writeProc),
("read-contents", discardEnv readContents),
("read-all", discardEnv readAll),
("load", loadFrm),
("display", displayFrm)]
displayFrm :: Env -> [LispVal] -> LispEval
displayFrm env [arg] = do
_ <- liftIO $ print arg
return Unit
loadFrm :: Env -> [LispVal] -> LispEval
loadFrm env [String filename] = lift (load filename) >>= fmap last . mapM (eval env)
applyProc :: Env -> [LispVal] -> LispEval
applyProc env [func, List args] = apply env func args
applyProc env (func : args) = apply env func args
makePort :: IOMode -> [LispVal] -> LispEval
makePort mode [String filename] = fmap Port $ liftIO $ openFile filename mode
closePort :: [LispVal] -> LispEval
closePort [Port port] = liftIO $ hClose port >> return (Bool True)
closePort _ = return $ Bool False
readProc :: [LispVal] -> LispEval
readProc [] = readProc [Port stdin]
readProc [Port port] = liftIO (hGetLine port) >>= (lift . liftThrows) . readExpr
writeProc :: [LispVal] -> LispEval
writeProc [obj] = writeProc [obj, Port stdout]
writeProc [obj, Port port] = liftIO $ hPrint port obj >> return (Bool True)
readContents :: [LispVal] -> LispEval
readContents [String filename] = fmap String $ liftIO $ readFile filename
load :: String -> IOThrowsError [LispVal]
load filename = liftIO (readFile filename) >>= liftThrows . readExprList
readAll :: [LispVal] -> LispEval
readAll [String filename] = List <$> lift (load filename)
| cheng81/schemish | src/IOFunc.hs | bsd-3-clause | 2,435 | 0 | 9 | 657 | 824 | 442 | 382 | 50 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Polycephaly.TH (
units
) where
import Test.Tasty
import Test.Tasty.HUnit
import Language.Haskell.TH
import Language.Haskell.TH.Alpha ( areExpAEq )
import Language.Haskell.Polycephaly.TH
units = units_mkFlagDC
units_mkFlagDC =
[ testCase "mkFlagDC adds flag var to class declaration" $
do
(clsD:_) <- runQ [d| class Print a where { print :: a -> IO ()}|]
let ClassD [] clsName tyVarBndrs [] decs = mkFlagDC defaultRules clsD
assertBool "Incorrect class name!" (show clsName == "Print'")
-- TODO: where clause check (fix th-alpha)
]
| jkarni/polycephalous-instances | tests/Polycephaly/TH.hs | bsd-3-clause | 676 | 0 | 13 | 186 | 138 | 77 | 61 | 15 | 1 |
{-------------------------------------------------------------------------------
MorphGrammar.Hofm.Transf.TTransf
Transformation function represented as a tree of replacement operators
Jan Snajder <[email protected]>
(c) 2009 TakeLab
University of Zagreb
Faculty of Electrical Engineering and Computing
-------------------------------------------------------------------------------}
module MorphGrammar.Hofm.Transf.TTransf (TTransf) where
import MorphGrammar.Hofm.Transf
import MorphGrammar.Hofm.Transf.StringOp
import Data.Maybe
import Data.List
import Control.Monad
import Data.Ord (comparing)
--------------------------------------------------------------------------------
-- TTransf instance: Transformation in tree-form
--------------------------------------------------------------------------------
data Op =
RP String String |
RS String String |
RI String String |
NO
deriving (Eq,Show)
data TTransf =
Else TTransf TTransf |
Ilse TTransf TTransf |
Or TTransf TTransf |
Comp TTransf TTransf |
Tip Op |
Fail
deriving (Eq,Show)
applyOp :: (MonadPlus m) => Op -> String -> m String
applyOp (RP p1 p2) = replacePrefixCS p1 p2
applyOp (RS s1 s2) = replaceSuffix s1 s2
applyOp (RI i1 i2) = replaceInfixes i1 i2
applyOp NO = return
invOp :: Op -> Op
invOp (RP p1 p2) = RP p2 p1
invOp (RS s1 s2) = RS s2 s1
invOp (RI i1 i2) = RI i2 i1
invOp NO = NO
apply :: (MonadPlus m) => TTransf -> String -> m String
apply t x = msum . map return $ app t
where app Fail = mzero
app (Tip op) = applyOp op x
app (Else t1 t2) = case app t1 of
[] -> app t2
ys -> ys
app (Or t1 t2) = app t1 `mplus` app t2
app (Comp t1 t2) = apply t2 x >>= apply t1
app (Ilse t1 t2) =
app t1 `mplus` [r | r <- apply t2 x, apply (inv t1) x == []]
-- NB: t1 i t2 s već invertirani
-- (t1||t2)^-1(s) = t1^-1(s) U t2^-1(s) ako (t1 . t2^-1)(s) = []
-- = t1^-1(s) inače
-- tj. ako bi se na t2^-1(s) mogao primijeniti t1, onda bi se sigurno
-- primjenio, pa je dakle u tom slucaju t2 zasjenjen s t1.
-- ali ako to nije slucaj, onda je i t2^-1(s) moguci inverz
--
-- npr:
-- > let t = rsfx "a" "x" .||. nul :: TTransf
-- > t $$ "bra" :: [] String
-- ["brx"]
-- > t $$ "brx" :: [] String
-- ["brx"]
-- > t $$ "bry" :: [] String
-- ["bry"]
--
-- > (inv t) $$ "bra" :: [] String
-- []
-- *Hofm.Transf.TTransf> (inv t) $$ "brx" :: [] String
-- ["bra","brx"]
-- *Hofm.Transf.TTransf> (inv t) $$ "bry" :: [] String
-- ["bry"]
compose :: TTransf -> TTransf -> TTransf
compose (Tip NO) t2 = t2
compose t1 (Tip NO) = t1
compose t1 t2 = t1 `Comp` t2
orelse :: TTransf -> TTransf -> TTransf
orelse t1 t2 = t1 `Else` t2
inverse :: TTransf -> TTransf
inverse Fail = Fail
inverse (Tip op) = Tip $ invOp op
inverse (Else t1 t2) = Ilse (inverse t1) (inverse t2) -- !!!
inverse (Ilse t1 t2) = Else (inverse t1) (inverse t2)
inverse (Or t1 t2) = Or (inverse t1) (inverse t2)
inverse (Comp t1 t2) = Comp (inverse t2) (inverse t1)
-- instance definitions
instance Transf TTransf where
t $$ s = apply t s
(&) = Comp
rsfx s1 s2 = Tip $ RS s1 s2
rpfx p1 p2 = Tip $ RP p1 p2
rifx "" _ = Fail
rifx _ "" = Fail
rifx i1 i2 = Tip $ RI i1 i2
nul = Tip NO
fail = Fail
instance InvTransf TTransf where
inv = inverse
instance OptTransf TTransf where
(.||.) = Else
(.|.) = Or
| jsnajder/hofm | src/MorphGrammar/Hofm/Transf/TTransf.hs | bsd-3-clause | 3,477 | 0 | 13 | 849 | 997 | 525 | 472 | 70 | 7 |
-----------------------------------------------------------
-- |
-- Module : Database.HaskellDB.Sql.Print
-- Copyright : Daan Leijen (c) 1999, [email protected]
-- HWT Group (c) 2003, [email protected]
-- License : BSD-style
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Pretty-print SQL
--
-----------------------------------------------------------
module Database.HaskellDB.Sql.Print (
ppSql,
ppUpdate,
ppDelete,
ppInsert,
ppCreate,
ppDrop,
ppSqlExpr
) where
import Prelude hiding ((<>))
import Database.HaskellDB.Sql
import Data.List (intersperse)
import Text.PrettyPrint.HughesPJ
-- * SELECT
-- | Pretty prints a 'SqlSelect'
ppSql :: SqlSelect -> Doc
ppSql (SqlSelect options attrs tables criteria groupby orderby extra)
= text "SELECT"
<+> hsep (map text options)
<+> ppAttrs attrs
$$ ppTables tables
$$ ppWhere criteria
$$ maybe empty ppGroupBy groupby
$$ ppOrderBy orderby
$$ hsep (map text extra)
ppSql (SqlBin op q1 q2) = parens (ppSql q1) $$ text op $$ parens (ppSql q2)
ppSql (SqlTable name) = text name
ppSql (SqlEmpty) = text ""
ppAttrs :: [(SqlColumn,SqlExpr)] -> Doc
ppAttrs [] = text "*"
ppAttrs xs = commaV nameAs xs
where
-- | Print a name-value binding, or just the name if
-- name and value are the same.
nameAs :: (SqlColumn,SqlExpr) -> Doc
nameAs (name, ColumnSqlExpr c) | name == c = text name
nameAs (name,expr) = ppAs name (ppSqlExpr expr)
-- FIXME: table aliases start from 1 in every select, which means that
-- with binary RelOps we can get table alias clashes.
ppTables :: [(SqlTable,SqlSelect)] -> Doc
ppTables [] = empty
ppTables ts = text "FROM" <+> commaV ppTable (zipWith tableAlias [1..] ts)
where
tableAlias :: Int -> (SqlTable,SqlSelect) -> (SqlTable,SqlSelect)
tableAlias i (_,sql) = ("T" ++ show i,sql)
ppTable :: (SqlTable,SqlSelect) -> Doc
ppTable (alias,(SqlTable name)) = ppAs alias (text name)
ppTable (alias,sql) = ppAs alias (parens (ppSql sql))
ppWhere :: [SqlExpr] -> Doc
ppWhere [] = empty
ppWhere es = text "WHERE"
<+> hsep (intersperse (text "AND")
(map (parens . ppSqlExpr) es))
ppGroupBy :: Mark -> Doc
ppGroupBy All = error "Should not ever print GroupBy all."
ppGroupBy (Columns es) = text "GROUP BY" <+> ppGroupAttrs es
where
ppGroupAttrs :: [(SqlColumn, SqlExpr)] -> Doc
ppGroupAttrs cs = commaV nameOrExpr cs
nameOrExpr :: (SqlColumn, SqlExpr) -> Doc
nameOrExpr (_, ColumnSqlExpr col) = text col
nameOrExpr (_, expr) = parens (ppSqlExpr expr)
ppOrderBy :: [(SqlExpr,SqlOrder)] -> Doc
ppOrderBy [] = empty
ppOrderBy ord = text "ORDER BY" <+> commaV ppOrd ord
where
ppOrd (e,o) = ppSqlExpr e <+> ppSqlOrder o
ppSqlOrder :: SqlOrder -> Doc
ppSqlOrder SqlAsc = text "ASC"
ppSqlOrder SqlDesc = text "DESC"
ppAs :: String -> Doc -> Doc
ppAs alias expr | null alias = expr
| otherwise = expr <+> (hsep . map text) ["as",alias]
-- * UPDATE
-- | Pretty prints a 'SqlUpdate'
ppUpdate :: SqlUpdate -> Doc
ppUpdate (SqlUpdate name assigns criteria)
= text "UPDATE" <+> text name
$$ text "SET" <+> commaV ppAssign assigns
$$ ppWhere criteria
where
ppAssign (c,e) = text c <+> equals <+> ppSqlExpr e
-- * DELETE
-- | Pretty prints a 'SqlDelete'
ppDelete :: SqlDelete -> Doc
ppDelete (SqlDelete name criteria) =
text "DELETE FROM" <+> text name $$ ppWhere criteria
-- * INSERT
ppInsert :: SqlInsert -> Doc
ppInsert (SqlInsert table names values)
= text "INSERT INTO" <+> text table
<+> parens (commaV text names)
$$ text "VALUES" <+> parens (commaV ppSqlExpr values)
ppInsert (SqlInsertQuery table names select)
= text "INSERT INTO" <+> text table
<+> parens (commaV text names)
$$ ppSql select
-- * CREATE
-- | Pretty prints a 'SqlCreate'.
ppCreate :: SqlCreate -> Doc
ppCreate (SqlCreateDB name) = text "CREATE DATABASE" <+> text name
ppCreate (SqlCreateTable name xs)
= text "CREATE TABLE" <+> text name
<+> parens (commaV ppF xs)
where
ppF (fname,t) = text fname <+> ppSqlTypeNull t
ppSqlTypeNull :: (SqlType,Bool) -> Doc
ppSqlTypeNull (t,nullable) = ppSqlType t <+> text (if nullable then " null" else " not null")
ppSqlType :: SqlType -> Doc
ppSqlType (SqlType t) = text t
ppSqlType (SqlType1 t x) = text t <> parens (int x)
ppSqlType (SqlType2 t x y) = text t <> parens (commaH int [x,y])
-- * DROP
-- | Pretty prints a 'SqlDrop'.
ppDrop :: SqlDrop -> Doc
ppDrop (SqlDropDB name) = text "DROP DATABASE" <+> text name
ppDrop (SqlDropTable name) = text "DROP TABLE" <+> text name
-- * Expressions
-- | Pretty prints a 'SqlExpr'
ppSqlExpr :: SqlExpr -> Doc
ppSqlExpr e =
case e of
ColumnSqlExpr c -> text c
ParensSqlExpr e -> parens (ppSqlExpr e)
BinSqlExpr op e1 e2 -> ppSqlExpr e1 <+> text op <+> ppSqlExpr e2
PrefixSqlExpr op e -> text op <+> ppSqlExpr e
PostfixSqlExpr op e -> ppSqlExpr e <+> text op
FunSqlExpr f es -> text f <> parens (commaH ppSqlExpr es)
AggrFunSqlExpr f es -> text f <> parens (commaH ppSqlExpr es)
ConstSqlExpr c -> text c
CaseSqlExpr cs el -> text "CASE" <+> vcat (map ppWhen cs)
<+> text "ELSE" <+> ppSqlExpr el <+> text "END"
where ppWhen (w,t) = text "WHEN" <+> ppSqlExpr w
<+> text "THEN" <+> ppSqlExpr t
ListSqlExpr es -> parens (commaH ppSqlExpr es)
ExistsSqlExpr s -> text "EXISTS" <+> parens (ppSql s)
ParamSqlExpr n v -> ppSqlExpr v
PlaceHolderSqlExpr -> text "?"
CastSqlExpr typ expr -> text "CAST" <> parens (ppSqlExpr expr <+> text "AS" <+> text typ)
commaH :: (a -> Doc) -> [a] -> Doc
commaH f = hcat . punctuate comma . map f
commaV :: (a -> Doc) -> [a] -> Doc
commaV f = vcat . punctuate comma . map f
| m4dc4p/haskelldb | src/Database/HaskellDB/Sql/Print.hs | bsd-3-clause | 6,411 | 6 | 14 | 1,833 | 2,014 | 1,012 | 1,002 | 119 | 14 |
module Generator (
generateSource
) where
import Control.Monad hiding (join)
import Control.Monad.State hiding (join)
import Data.Char
import Data.List
import qualified Data.Map as M
import qualified Data.Set as S
import Data.String.Utils
import Data.Tuple
import Module
import Parser
import Registry
import Text.Printf
data Entry = F String
| E String
deriving (Eq, Ord, Show)
data Category = C String (S.Set String)
deriving (Eq, Ord, Show)
saneEnum :: String -> String
saneEnum = ("gl_"++) . join "_" . tail . split "_"
saneModule :: String -> String
saneModule "422Pixels" = "FourTwoTwoPixels"
saneModule x = x
sanePrefix :: String -> String
sanePrefix "3DFX" = "ThreeDFX"
sanePrefix x = x
commandSignature :: String -> Command -> String
commandSignature monad command =
join " -> " $
(parameterSignature $ commandParameters command) ++
[returnSignature $ commandType command]
where
parameterSignature :: [(Type, String)] -> [String]
parameterSignature params = map (typeSignature . fst) params
returnSignature :: Type -> String
returnSignature t = wrap monad True . wrap "Ptr" (typePointer t) $
case typeName t of
Nothing -> "()"
Just "GLvoid" -> "()"
Just x -> x
wrap :: String -> Bool -> String -> String
wrap w True s | any isSpace s = printf "%s (%s)" w s
wrap w True s = printf "%s %s" w s
wrap w False s = s
typeSignature :: Type -> String
typeSignature t = wrap "Ptr" (typePointer t) $
case typeName t of
Nothing -> "()"
Just "GLvoid" -> "()"
Just "struct _cl_context" -> "()"
Just "struct _cl_event" -> "()"
Just x -> x
ffiCommandName :: String -> String
ffiCommandName
= ("ffi"++)
. join ""
. split "GL"
. join ""
. map (filter isAlphaNum)
. map (replace "()" "V")
. split " -> "
. replace "m (" "IO ("
. replace "m GL" "IO GL"
ffiCommandSignature :: String -> String
ffiCommandSignature cmd = printf "FunPtr (%s) -> %s" x x
where x
= replace "m (" "IO ("
$ replace "m GL" "IO GL" cmd
extensionModuleName :: String -> String
extensionModuleName name =
printf "Graphics.OpenGL.Extension.%s.%s"
(sanePrefix prefix) (saneModule $ camelCase (join "_" rest))
where
(gl:prefix:rest) = split "_" name
camelCase :: String -> String
camelCase str = concat . map (\(x:xs) -> toUpper x : xs) $
split "_" str
profileModuleName :: String -> String -> (String, Maybe String)
profileModuleName feature profile =
( printf "Graphics.OpenGL.Profile.%s" $ fst submodule
, snd submodule >>= return . printf "Graphics.OpenGL.Profile.%s"
)
where
submodule = case (feature, profile) of
("GL_VERSION_1_0", _) -> ("Standard10", Nothing)
("GL_VERSION_1_1", _) -> ("Standard11", Nothing)
("GL_VERSION_1_2", _) -> ("Standard12", Nothing)
("GL_VERSION_1_3", _) -> ("Standard13", Nothing)
("GL_VERSION_1_4", _) -> ("Standard14", Nothing)
("GL_VERSION_1_5", _) -> ("Standard15", Nothing)
("GL_VERSION_2_0", _) -> ("Standard20", Nothing)
("GL_VERSION_2_1", _) -> ("Standard21", Nothing)
("GL_VERSION_3_0", _) -> ("Standard30", Nothing)
("GL_VERSION_3_1", _) -> ("Standard31", Nothing)
("GL_VERSION_3_2", "core") -> ("Core32", Just "Compatibility32")
("GL_VERSION_3_2", "compatibility") -> ("Compatibility32", Nothing)
("GL_VERSION_3_2", _) -> ("Core32", Nothing)
("GL_VERSION_3_3", "core") -> ("Core33", Just "Compatibility33")
("GL_VERSION_3_3", "compatibility") -> ("Compatibility33", Nothing)
("GL_VERSION_3_3", _) -> ("Core33", Nothing)
("GL_VERSION_4_0", "core") -> ("Core40", Just "Compatibility40")
("GL_VERSION_4_0", "compatibility") -> ("Compatibility40", Nothing)
("GL_VERSION_4_0", _) -> ("Core40", Nothing)
("GL_VERSION_4_1", "core") -> ("Core41", Just "Compatibility41")
("GL_VERSION_4_1", "compatibility") -> ("Compatibility41", Nothing)
("GL_VERSION_4_1", _) -> ("Core41", Nothing)
("GL_VERSION_4_2", "core") -> ("Core42", Just "Compatibility42")
("GL_VERSION_4_2", "compatibility") -> ("Compatibility42", Nothing)
("GL_VERSION_4_2", _) -> ("Core42", Nothing)
("GL_VERSION_4_3", "core") -> ("Core43", Just "Compatibility43")
("GL_VERSION_4_3", "compatibility") -> ("Compatibility43", Nothing)
("GL_VERSION_4_3", _) -> ("Core43", Nothing)
("GL_VERSION_4_4", "core") -> ("Core44", Just "Compatibility44")
("GL_VERSION_4_4", "compatibility") -> ("Compatibility44", Nothing)
("GL_VERSION_4_4", _) -> ("Core44", Nothing)
("GL_VERSION_4_5", "core") -> ("Core45", Just "Compatibility45")
("GL_VERSION_4_5", "compatibility") -> ("Compatibility45", Nothing)
("GL_VERSION_4_5", _) -> ("Core45", Nothing)
("GL_VERSION_ES_CM_1_0", "common") -> ("EmbeddedCommon10", Nothing)
("GL_VERSION_ES_CM_1_0", _) -> ("EmbeddedLite10", Nothing)
("GL_ES_VERSION_2_0", _) -> ("Embedded20", Nothing)
("GL_ES_VERSION_3_0", _) -> ("Embedded30", Nothing)
("GL_ES_VERSION_3_1", _) -> ("Embedded31", Nothing)
a -> error $ show a
implicitPrelude :: String -> ([String], [Body])
implicitPrelude m = case m of
"Graphics.OpenGL.Profile.Compatibility32" -> mk [
"Graphics.OpenGL.Profile.Core32"
]
"Graphics.OpenGL.Profile.Compatibility33" -> mk [
"Graphics.OpenGL.Profile.Compatibility32"
, "Graphics.OpenGL.Profile.Core33"
]
"Graphics.OpenGL.Profile.Compatibility40" -> mk [
"Graphics.OpenGL.Profile.Compatibility32"
, "Graphics.OpenGL.Profile.Core40"
]
"Graphics.OpenGL.Profile.Compatibility41" -> mk [
"Graphics.OpenGL.Profile.Compatibility40"
, "Graphics.OpenGL.Profile.Core41"
]
"Graphics.OpenGL.Profile.Compatibility42" -> mk [
"Graphics.OpenGL.Profile.Compatibility41"
, "Graphics.OpenGL.Profile.Core42"
]
"Graphics.OpenGL.Profile.Compatibility43" -> mk [
"Graphics.OpenGL.Profile.Compatibility42"
, "Graphics.OpenGL.Profile.Core43"
]
"Graphics.OpenGL.Profile.Compatibility44" -> mk [
"Graphics.OpenGL.Profile.Compatibility43"
, "Graphics.OpenGL.Profile.Core44"
]
"Graphics.OpenGL.Profile.Compatibility45" -> mk [
"Graphics.OpenGL.Profile.Compatibility44"
, "Graphics.OpenGL.Profile.Core45"
]
"Graphics.OpenGL.Profile.Core33" -> mk [
"Graphics.OpenGL.Profile.Core32"
]
"Graphics.OpenGL.Profile.Core40" -> mk [
"Graphics.OpenGL.Profile.Core33"
]
"Graphics.OpenGL.Profile.Core41" -> mk [
"Graphics.OpenGL.Profile.Core40"
]
"Graphics.OpenGL.Profile.Core42" -> mk [
"Graphics.OpenGL.Profile.Core41"
]
"Graphics.OpenGL.Profile.Core43" -> mk [
"Graphics.OpenGL.Profile.Core42"
]
"Graphics.OpenGL.Profile.Core44" -> mk [
"Graphics.OpenGL.Profile.Core43"
]
"Graphics.OpenGL.Profile.Core45" -> mk [
"Graphics.OpenGL.Profile.Core44"
]
"Graphics.OpenGL.Profile.EmbeddedCommon10" -> mk [
"Graphics.OpenGL.Profile.EmbeddedLite10"
]
"Graphics.OpenGL.Profile.Embedded30" -> mk [
"Graphics.OpenGL.Profile.Embedded20"
]
"Graphics.OpenGL.Profile.Embedded31" -> mk [
"Graphics.OpenGL.Profile.Embedded30"
]
"Graphics.OpenGL.Profile.Standard11" -> mk [
"Graphics.OpenGL.Profile.Standard10"
]
"Graphics.OpenGL.Profile.Standard12" -> mk [
"Graphics.OpenGL.Profile.Standard11"
]
"Graphics.OpenGL.Profile.Standard13" -> mk [
"Graphics.OpenGL.Profile.Standard12"
]
"Graphics.OpenGL.Profile.Standard14" -> mk [
"Graphics.OpenGL.Profile.Standard13"
]
"Graphics.OpenGL.Profile.Standard15" -> mk [
"Graphics.OpenGL.Profile.Standard14"
]
"Graphics.OpenGL.Profile.Standard20" -> mk [
"Graphics.OpenGL.Profile.Standard15"
]
"Graphics.OpenGL.Profile.Standard21" -> mk [
"Graphics.OpenGL.Profile.Standard20"
]
"Graphics.OpenGL.Profile.Standard30" -> mk [
"Graphics.OpenGL.Profile.Standard21"
]
"Graphics.OpenGL.Profile.Standard31" -> mk [
"Graphics.OpenGL.Profile.Standard30"
]
_ -> ([], [])
where
mk names = (map ("module "++) names, [Import names])
requires :: String -> Require -> State (M.Map Entry Category) ()
requires name req = do
forM_ (requireEnums req) $ \e -> do
modify $ M.adjust (\(C v m) -> C v $ S.insert name m) (E $ saneEnum e)
forM_ (requireCommands req) $ \f -> do
modify $ M.adjust (\(C v m) -> C v $ S.insert name m) (F f)
entries :: Registry -> State (M.Map Entry Category) ()
entries registry = do
forM_ (registryCommands registry) $ \f -> do
modify $ M.insert
(F $ commandName f)
(C (commandSignature "m" f) S.empty)
forM_ (registryEnums registry) $ \e -> do
modify $ M.insert
(E . saneEnum $ enumName e)
(C (enumValue e) S.empty)
forM_ (registryExtensions registry) $ \ext -> do
forM_ (extensionRequires ext) $ \req -> do
requires (extensionModuleName $ extensionName ext) req
forM_ (registryFeatures registry) $ \fe -> do
let feature = featureName fe
forM_ (featureRequires fe) $ \req -> do
let name = fst . profileModuleName feature $ requireProfile req
requires name req
when (startswith "Graphics.OpenGL.Profile.Standard" name) $
requires "Graphics.OpenGL.Profile.Core32" req
forM_ (featureRemoves fe) $ \rm -> do
let profile = removeProfile rm
let (name, removeName) = profileModuleName feature profile
forM_ (removeEnums rm) $ \e -> do
modify $ M.adjust
(\(C v m) -> C v $ S.delete name m)
(E $ saneEnum e)
case removeName of
Just name' -> modify $ M.adjust
(\(C v m) -> C v $ S.insert name' m) (E $ saneEnum e)
Nothing -> return ()
forM_ (removeCommands rm) $ \f -> do
modify $ M.adjust
(\(C v m) -> C v $ S.delete name m)
(F f)
case removeName of
Just name' -> modify $ M.adjust
(\(C v m) -> C v $ S.insert name' m) (F f)
Nothing -> return ()
modules :: Registry
-> M.Map Entry Category
-> State (M.Map String [(Bool, Entry, String)]) ()
modules registry entr = do
forM_ (registryExtensions registry) $ \ext -> do
modify $ M.insert (extensionModuleName $ extensionName ext) []
forM_ profiles $ \profile -> do
modify $ M.insert (printf "Graphics.OpenGL.Profile.%s" profile) []
forM_ (M.toList entr) $ \(k, C v ms) -> do
forM_ (S.toList ms) $ \m -> do
modify $ M.alter (f (S.size ms > 1, k, v)) m
where
f r Nothing = Just [r]
f r (Just a) = Just $ a ++ [r]
profiles = [
"Compatibility32"
, "Compatibility33"
, "Compatibility40"
, "Compatibility41"
, "Compatibility42"
, "Compatibility43"
, "Compatibility44"
, "Compatibility45"
, "Core32"
, "Core33"
, "Core40"
, "Core41"
, "Core42"
, "Core43"
, "Core44"
, "Core45"
, "EmbeddedCommon11"
, "EmbeddedLite11"
, "Embedded20"
, "Embedded30"
, "Embedded31"
, "Standard10"
, "Standard11"
, "Standard12"
, "Standard13"
, "Standard14"
, "Standard15"
, "Standard20"
, "Standard21"
, "Standard30"
, "Standard31"
]
-- | Map between function name and vector index
data FunMap = FunMap
(M.Map Int String)
(M.Map String Int)
(M.Map Int String)
(M.Map String (Int, String)) -- module name -> extension id / name
deriving (Eq, Show)
funMap :: Registry -> [(Bool, Entry, String)] -> FunMap
funMap registry entr = FunMap
(M.fromList map')
(M.fromList $ map swap map')
(M.fromList map'')
(M.fromList $
map (\(i, x) -> (extensionModuleName x, (i, x))) $ zip [0..] exts)
where
isFunction k = case k of
(_, F _, _) -> True
_ -> False
map' = zip [0..] . map (\(_, F n, s) -> n) . nub $
filter isFunction entr
map'' = zip [0..] . map (\(_, F n, s) -> s) . nub $
filter isFunction entr
exts = map extensionName $ registryExtensions registry
funMapByInt :: Int -> FunMap -> String
funMapByInt i (FunMap m _ _ _) = M.findWithDefault undefined i m
funMapByFunction :: String -> FunMap -> Int
funMapByFunction s (FunMap _ m _ _) = M.findWithDefault undefined s m
funMapSignature :: Int -> FunMap -> String
funMapSignature i (FunMap _ _ m _) = M.findWithDefault undefined i m
funMapFst :: FunMap -> M.Map Int String
funMapFst (FunMap m _ _ _) = m
funMapMax :: FunMap -> Int
funMapMax (FunMap m _ _ _) = M.size m
funExt :: FunMap -> M.Map String (Int, String)
funExt (FunMap _ _ _ m) = m
funExtInfoByModule :: String -> FunMap -> Maybe (Int, String)
funExtInfoByModule s (FunMap _ _ _ m) = M.lookup s m
funMapExtSize :: FunMap -> Int
funMapExtSize (FunMap _ _ _ m) = M.size m
funBody :: FunMap -> String -> String -> Body
funBody fm n v =
Function n ("(MonadIO m, MonadReader e m, HasScope e) => " ++ v) $ strip body
where
numArgs = subtract 2 . length $ split " -> " v
params = join " " $ map (\x -> "v" ++ show x) [0..numArgs]
body = printf "%s = funGL %d >>= \\f -> liftIO $ f %s"
params
(funMapByFunction n fm)
params
mkScope :: FunMap -> [(Bool, Entry, String)] -> Module
mkScope fm entr = Module "Graphics.OpenGL.Internal.Scope" export body
where
numExtensions :: Int
numExtensions = funMapExtSize fm
numFunctions :: Int
numFunctions = funMapMax fm
export =
[ Section "Scope"
[ "module Control.Monad.Reader"
, "Scope"
, "HasScope(..)"
, "GLLoader"
, "extGL"
, "funGL"
, "initScope"
]
]
body =
[ Import
[ "Control.Applicative"
, "Control.Monad.Reader"
, "Data.Maybe"
, "qualified Data.Vector as V"
, "qualified Data.Vector.Unboxed as VU"
, "Foreign.C.String"
, "Foreign.C.Types"
, "Foreign.Marshal.Alloc"
, "Foreign.Ptr"
, "Foreign.Storable"
, "Graphics.OpenGL.Types"
, "Unsafe.Coerce"
]
, Code $
"data Scope = Scope (V.Vector (IO ())) (VU.Vector Bool)"
, Code $
"class HasScope e where\n" ++
"\tscope :: e -> Scope"
, Code $
"instance HasScope Scope where\n" ++
"\tscope = id"
, Code $
"type GLLoader = CString -> IO (Ptr ())"
, Function
"extGL" "(Monad m, MonadReader e m, HasScope e) => Int -> m Bool" $
"n = do\n" ++
"\tScope _ es <- asks scope\n" ++
"\treturn $ VU.unsafeIndex es n"
, Function
"funGL" "(MonadIO m, MonadReader e m, HasScope e) => Int -> m a" $
"n = do\n" ++
"\tScope fs _ <- asks scope\n" ++
"\tfunGL' fs n"
, Function
"funGL'" "MonadIO m => V.Vector (IO ()) -> Int -> m a"
"fs n = return . unsafeCoerce $ V.unsafeIndex fs n"
, Function "initScope" "GLLoader -> IO Scope" $
printf
( "loader = do\n"
++"\tfs <- V.generateM %d (load loader)\n"
++"\tes <- loadExtensions fs\n"
++"\treturn $ Scope fs es"
)
numFunctions
, Function "load'"
"GLLoader -> String -> (FunPtr a -> a) -> IO (IO ())" $
"f s ffi = withCString s f >>= return . unsafeCoerce . ffi . castPtrToFunPtr\n" ++
"{-# NOINLINE load' #-}"
, Function "loadExtensions"
"V.Vector (IO ()) -> IO (VU.Vector Bool)" $ printf
("fs = do\n" ++
"\tglGetString <- funGL' fs %d :: IO (GLenum -> IO (Ptr GLubyte))\n" ++
"\tglGetStringi <- funGL' fs %d :: IO (GLenum -> GLuint -> IO (Ptr GLubyte))\n" ++
"\tglGetIntegerv <- funGL' fs %d :: IO (GLenum -> Ptr GLint -> IO ())\n" ++
"\tnumExtensions <- alloca $ \\p -> glGetIntegerv 0x821D p >> peek p\n" ++
"\tsupported <- forM [0..(fromIntegral numExtensions)-1] $ \\n ->\n" ++
"\t\tmapExtension <$> (peekCString . castPtr =<< glGetStringi 0x1F03 n)\n" ++
"\treturn $\n" ++
"\t\tVU.unsafeUpd (VU.replicate %d False) . zip (map fromJust $ filter isJust supported) $ repeat True")
(funMapByFunction "glGetString" fm)
(funMapByFunction "glGetStringi" fm)
(funMapByFunction "glGetIntegerv" fm)
numExtensions
, Function "mapExtension"
"String -> Maybe Int" $
("ext = case ext of\n"++) .
(++"\n\t_ -> Nothing") .
join "\n" $ map (\(i, n) ->
printf "\t\"%s\" -> Just %d" n i)
(sort . map snd $ M.toList $ funExt fm)
, Function "load" "GLLoader -> Int -> IO (IO ())" $
"f n = case n of\n" ++
concatMap
(\(n, f) -> printf "\t%d -> load' f \"%s\" %s\n"
n f (ffiCommandName $ funMapSignature n fm))
(M.toList $ funMapFst fm)
] ++ ffiBody
ffiBody = nub $
map (\(n, f) -> Code $ printf
("foreign import ccall \"dynamic\"\n" ++
"\t%s :: %s")
(ffiCommandName $ funMapSignature n fm)
(ffiCommandSignature $ funMapSignature n fm))
(M.toList $ funMapFst fm)
mkShared :: FunMap -> [(Bool, Entry, String)] -> Module
mkShared fm entr = Module "Graphics.OpenGL.Internal.Shared" [] body
where
imp =
[ Import
[ "Graphics.OpenGL.Internal.Scope"
, "Graphics.OpenGL.Basic"
]
]
body = imp ++ (concat . map bodyF $ nub entr)
bodyF (False, _, _) = []
bodyF (_, E n, v) = [Function n "GLenum" ("= " ++ v)]
bodyF (_, F n, v) = [funBody fm n v]
mkModule :: FunMap -> String -> [(Bool, Entry, String)] -> Module
mkModule fm m entr = Module m export body
where
entryName (E n) = n
entryName (F n) = n
(ie, ib) = implicitPrelude m
hasShared = not . null $ filter (\(s, _, _) -> s) entr
shared = case hasShared of
True -> [Import
[ "Graphics.OpenGL.Internal.Shared"
]]
False -> []
export = case funExtInfoByModule m fm of
Just (i, en) ->
[ Section "Extension Support" $
[ "gl_" ++ (join "_" . tail $ split "_" en)
]
, Section en $ ie ++ map (\(s, e, _) -> entryName e) entr
]
Nothing ->
[ Section m $ ie ++ map (\(s, e, _) -> entryName e) entr
]
body =
[ Import
[ "Graphics.OpenGL.Internal.Scope"
, "Graphics.OpenGL.Basic"
]
] ++
shared ++ ib ++ extCheck ++ concatMap bodyF entr
extCheck = case funExtInfoByModule m fm of
Just (i, en) ->
[ Function
("gl_" ++ (join "_" . tail $ split "_" en))
"(Monad m, MonadReader e m, HasScope e) => m Bool"
("= extGL " ++ show i)
]
Nothing -> []
bodyF (True, _, _) = []
bodyF (_, E n, v) = [Function n "GLenum" ("= " ++ v)]
bodyF (_, F n, v) = [funBody fm n v]
mkExtensionGather :: FunMap -> [Module]
mkExtensionGather fm = (flip map) extensionGroups $
\x -> Module (printf "Graphics.OpenGL.Extension.%s" $ sanePrefix x)
[Section (printf "%s Extensions" x) $ map ("module "++) $ extInGroup x]
[Import $ extInGroup x]
where
extInGroup grp
= map fst
. sort
. filter (\x -> grp == (head . tail . split "_" . snd $ snd x))
. M.toList $ funExt fm
extensionGroups
= sort
. nub
. map (head . tail . split "_" . snd . snd)
. M.toList $ funExt fm
mkExtensionGroupGather :: [Module] -> Module
mkExtensionGroupGather ms = Module "Graphics.OpenGL.Extension"
[Section "Extensions" $ map (("module "++) . moduleName) ms]
[Import $ map moduleName ms]
generateSource :: Registry -> IO ()
generateSource registry = do
let s = execState (entries registry) M.empty
let m = execState (modules registry s) M.empty
let fm' = concatMap snd $ M.toList m
let fm = funMap registry fm'
saveModule $ mkShared fm fm'
saveModule $ mkScope fm fm'
mapM_ (saveModule . uncurry (mkModule fm)) $ M.toList m
let exts = mkExtensionGather fm
mapM_ saveModule $ exts
saveModule $ mkExtensionGroupGather exts
| polarina/opengl-wrangler | gen/Generator.hs | bsd-3-clause | 18,888 | 378 | 28 | 4,018 | 6,288 | 3,276 | 3,012 | 495 | 40 |
{-# LANGUAGE NoImplicitPrelude #-}
module Protocol.ROC.PointTypes.PointType12 where
import Data.Binary.Get (getWord8,Get)
import Data.Word (Word8)
import Prelude (($),
return,
Bool,
Eq,
Read,
Show)
import Protocol.ROC.Utils (anyButNull,getTime)
data PointType12 = PointType12 {
pointType12Seconds :: !PointType12Seconds
,pointType12Minutes :: !PointType12Minutes
,pointType12Hours :: !PointType12Hours
,pointType12Day :: !PointType12Day
,pointType12Month :: !PointType12Month
,pointType12Year :: !PointType12Year
,pointType12LeapYear :: !PointType12LeapYear
,pointType12DayofWeek :: !PointType12DayofWeek
,pointType12TimeSMHDMY :: !PointType12TimeSMHDMY
,pointType12Century :: !PointType12Century
,pointType12EnableDaySavTime :: !PointType12EnableDaySavTime
} deriving (Read,Eq, Show)
type PointType12Seconds = Word8
type PointType12Minutes = Word8
type PointType12Hours = Word8
type PointType12Day = Word8
type PointType12Month = Word8
type PointType12Year = Word8
type PointType12LeapYear = Word8
type PointType12DayofWeek = Word8
type PointType12TimeSMHDMY = [Word8]
type PointType12Century = Word8
type PointType12EnableDaySavTime = Bool
pointType12Parser :: Get PointType12
pointType12Parser = do
seconds <- getWord8
minutes <- getWord8
hours <- getWord8
day <- getWord8
month <- getWord8
year <- getWord8
leapYear <- getWord8
dayOfWeek <- getWord8
timeSMHDMY <- getTime
century <- getWord8
enableDaySavTime <- anyButNull
return $ PointType12 seconds minutes hours day month year leapYear dayOfWeek timeSMHDMY century enableDaySavTime
| plow-technologies/roc-translator | src/Protocol/ROC/PointTypes/PointType12.hs | bsd-3-clause | 2,116 | 0 | 9 | 740 | 362 | 205 | 157 | 71 | 1 |
module Main where
import System.Environment (getArgs)
import Network.Factual.API
import Data.Factual.Query.SchemaQuery
import Data.Factual.Response
main :: IO()
main = do
args <- getArgs
let oauthKey = head args
let oauthSecret = last args
let options = Options { token = generateToken oauthKey oauthSecret, timeout = Nothing }
let query = SchemaQuery Places
result <- executeQuery options query
putStrLn $ "Status: " ++ status result
putStrLn $ "Version: " ++ show (version result)
putStrLn $ "Data: " ++ show (response result)
| rudyl313/factual-haskell-driver | examples/SchemaExample.hs | bsd-3-clause | 549 | 0 | 12 | 98 | 185 | 92 | 93 | 16 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Config (Config(..), ConfigReader(..), Domain, get) where
import System.Environment as Sys
import Control.Monad.IO.Class
(MonadIO)
import Control.Monad.Reader
(MonadReader, ReaderT)
type Domain
= String
data Config
= Config
{ domain :: Domain
, port :: Int }
newtype ConfigReader a
= ConfigReader
{ runConfigReader :: ReaderT Config IO a }
deriving (Applicative, Functor, Monad, MonadIO, MonadReader Config)
get :: IO Config
get =
Config
<$> Sys.getEnv "MS_DOMAIN"
<*> (read <$> Sys.getEnv "MS_PORT")
| svanderbleek/media-server | src/Config.hs | bsd-3-clause | 660 | 0 | 9 | 124 | 173 | 105 | 68 | 24 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.